diff --git a/.gitignore b/.gitignore index 1830fcc7ef..fac95318bd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,22 +1,4 @@ -*/target/ - -build/ -*/src/generated/ +# Gradle .gradle/ - -.project -.classpath -.settings/org.eclipse.m2e.core.prefs -*/.classpath -*.prefs - -*.iml -.idea -out/ - -.nb-gradle -.nb-gradle-properties - -/classes - -**/*.*~ +**/build/ +!**/src/**/build/ diff --git a/107/.gitignore b/107/.gitignore deleted file mode 100755 index ae3c172604..0000000000 --- a/107/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/bin/ diff --git a/107/README.adoc b/107/README.adoc deleted file mode 100644 index c9d4e422a3..0000000000 --- a/107/README.adoc +++ /dev/null @@ -1,86 +0,0 @@ -= The Ehcache 3.x JSR-107 Provider - -== Getting started with Ehcache3 & JSR-107 - -There isn't anything special for you to do to use Ehcache3 as the caching provider for your application: add the -`ehcache-3.0.0.jar` to your application's classpath (possibly removing the previous provider's jar) and you are ready to -go: - -[source,java] ----- -CachingProvider provider = Caching.getCachingProvider(); // <1> -CacheManager cacheManager = provider.getCacheManager(); // <2> ----- -<1> Retrieves the default `CachingProvider`, this should be `org.ehcache.EhCachingProvider`, - which you can also force, by using the `Caching.getCachingProvider(String)` static method instead; -<2> Retrieve the default `CacheManager` instance using the provider. - -=== Using a specific configuration file - -You can also add a XML file that preconfigure `Cache` instances upfront. See -link:../xml/README.adoc[the XML README file] for more details on configuring `Cache` in XML. - -In order to pre-configure the `CacheManager` at creation time using the XML file, simply: - -[source,java] ----- -CachingProvider provider = Caching.getCachingProvider(); -CacheManager cacheManager = provider.getCacheManager( // <1> - this.getClass().getResource("/ehcache.xml").toURI(), // <2> - Customer.class.getClassLoader()); // <3> ----- -<1> Invoking `javax.cache.spi.CachingProvider.getCacheManager(java.net.URI, java.lang.ClassLoader)` -<2> where the first argument is an `URI` pointing to our XML configuration file, e.g. `ehcache.xml`; -<3> the second argument being the `ClassLoader` to use to load user-types if needed; i.e. `Class` instances that are - stored in the `Cache` managed by our `CacheManager`. - -NOTE: You can also use the `CachingProvider.getCacheManager()` method that doesn't take any argument instead. - The `URI` and `ClassLoader` used to configure the `CacheManager` will then use the - vendor specific values returned by `CachingProvider.getDefaultURI` and `.getDefaultClassLoader` respectively. - Be aware that these aren¹t entirely spec¹ed for Ehcache3 and may change in future releases! - -=== Supplement JSR-107's configurations - -You can also create `cache-templates` as of Ehcache3, see -link:../xml/README.adoc#__code_cache_template_code_elements[Cache Templates] section of the XML README file for more details. The Ehcache3 -JSR-107 Caching Provider comes with an extension to the regular XML configuration, so you can: - - . Configure a default template all programmatically created `Cache` instances will inherit from, and - . Configure a given named `Cache` to inherit from a specific template. - -This feature is particularly useful to configure `Cache` beyond the JSR-107 specification, e.g. giving them a capacity -constraint. All is needed is adding a jsr107 service in your XML configuration file: - -[source,xml] ----- - - - - - - - - - - java.lang.String - java.lang.String - 2000 - - - - 20 - - ----- -<1> First, declare a namespace for the 107 extension, e.g. `jsr107` -<2> Within a `service` element at the top of you configuration, add a `jsr107:defaults` element -<3> The element takes an optional attribute `default-template`, which references the `cache-template` to use for all - `javax.cache.Cache` created by the application at runtime using `javax.cache.CacheManager.createCache`. In - this example, the default `cache-template` used will be `tinyCache`, meaning that atop of their particular config, - programmatically created `Cache` instances will have their capacity constrained to 20 entries. -<4> Nested within the `jsr107:defaults`, add specific `cache-templates` to use for given named `Cache`, e.g. when - creating the `Cache` named `foos` at runtime, Ehcache will enhance its config, giving it a capacity of 2000 entries, - as well as insuring both key and value types are `String`. diff --git a/107/build.gradle b/107/build.gradle deleted file mode 100644 index 469acad59e..0000000000 --- a/107/build.gradle +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: EhDeploy - -configurations { - tckTestClasses -} - -sourceSets { - tckTest { - java.srcDir 'src/tck/java' - resources.srcDir 'src/tck/resources' - compileClasspath += sourceSets.test.compileClasspath - runtimeClasspath += sourceSets.test.runtimeClasspath - } -} - -dependencies { - compile project(':impl'), project(':xml') - provided "javax.cache:cache-api:$parent.jcacheVersion" - tckTestRuntime 'javax.cache:cache-tests:1.0.1' - tckTestClasses('javax.cache:cache-tests:1.0.1:tests') { - transitive = false - } -} - -tasks.withType(JavaCompile) { - options.compilerArgs += ['-Werror'] -} - -javadoc { - exclude '**/tck/**' - classpath = sourceSets.main.compileClasspath + sourceSets.main.runtimeClasspath + configurations.provided -} - -test { - if (testJava.javaVersion.isJava9Compatible()) { - jvmArgs += ['--add-modules', 'java.xml.bind'] - } -} - -task unpackTckTests(type: Copy) { - from { - configurations.tckTestClasses.collect {zipTree(it)} - } - into sourceSets.tckTest.java.outputDir -} - -task tckTest(type: Test, dependsOn: unpackTckTests) { - testClassesDirs = sourceSets.tckTest.output.classesDirs - classpath += sourceSets.tckTest.runtimeClasspath - - binResultsDir file("$buildDir/tck-tests-results/binary/$name") - reports.junitXml.destination = file("$buildDir/tck-tests-results") - reports.html.destination = file("$buildDir/reports/tck-tests") - - systemProperty 'java.net.preferIPv4Stack', 'true' - systemProperty 'javax.management.builder.initial', 'org.ehcache.jsr107.internal.tck.Eh107MBeanServerBuilder' - systemProperty 'org.jsr107.tck.management.agentId', 'Eh107MBeanServer' - systemProperty 'javax.cache.CacheManager', 'org.ehcache.CacheManager' - systemProperty 'javax.cache.Cache', 'org.ehcache.Cache' - systemProperty 'javax.cache.Cache.Entry', 'org.ehcache.Cache$Entry' - systemProperty 'javax.cache.annotation.CacheInvocationContext', 'javax.cache.annotation.impl.cdi.CdiCacheKeyInvocationContextImpl' -} - -test.dependsOn tckTest - - diff --git a/107/gradle.properties b/107/gradle.properties deleted file mode 100644 index 2be5b29d91..0000000000 --- a/107/gradle.properties +++ /dev/null @@ -1,4 +0,0 @@ -subPomName = Ehcache 3 JSR-107 module -subPomDesc = The JSR-107 compatibility module of Ehcache 3 -osgi = {"Export-Package" : ["!org.ehcache.jsr107.tck", "!org.ehcache.jsr107.internal*"],\ - "Import-Package" : ["javax.cache.*;resolution:=optional", "!sun.misc.*", "!sun.security.action.*", "!com.sun.jmx.mbeanserver.*"]} diff --git a/107/src/main/java/org/ehcache/jsr107/DefaultJsr107SerializationProvider.java b/107/src/main/java/org/ehcache/jsr107/DefaultJsr107SerializationProvider.java deleted file mode 100644 index 5267f646c9..0000000000 --- a/107/src/main/java/org/ehcache/jsr107/DefaultJsr107SerializationProvider.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.jsr107; - -import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; -import org.ehcache.impl.internal.spi.serialization.DefaultSerializationProvider; -import org.ehcache.impl.serialization.PlainJavaSerializer; - -/** - * @author Ludovic Orban - */ -class DefaultJsr107SerializationProvider extends DefaultSerializationProvider { - - @SuppressWarnings("unchecked") - DefaultJsr107SerializationProvider() { - super(new DefaultSerializationProviderConfiguration() - .addSerializerFor(Object.class, (Class) PlainJavaSerializer.class)); - } -} diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java b/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java deleted file mode 100644 index 49091e6308..0000000000 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.jsr107; - -import org.ehcache.core.spi.service.StatisticsService; -import org.ehcache.core.statistics.CacheStatistics; - -import java.net.URI; - -/** - * @author Ludovic Orban - */ -class Eh107CacheStatisticsMXBean extends Eh107MXBean implements javax.cache.management.CacheStatisticsMXBean { - - private final String cacheName; - private final StatisticsService statisticsService; - - Eh107CacheStatisticsMXBean(String cacheName, URI cacheManagerURI, StatisticsService statisticsService) { - super(cacheName, cacheManagerURI, "CacheStatistics"); - this.cacheName = cacheName; - this.statisticsService = statisticsService; - } - - @Override - public void clear() { - getCacheStatistics().clear(); - } - - @Override - public long getCacheHits() { - return getCacheStatistics().getCacheHits(); - } - - @Override - public float getCacheHitPercentage() { - return getCacheStatistics().getCacheHitPercentage(); - } - - @Override - public long getCacheMisses() { - return getCacheStatistics().getCacheMisses(); - } - - @Override - public float getCacheMissPercentage() { - return getCacheStatistics().getCacheMissPercentage(); - } - - @Override - public long getCacheGets() { - return getCacheStatistics().getCacheGets(); - } - - @Override - public long getCachePuts() { - return getCacheStatistics().getCachePuts(); - } - - @Override - public long getCacheRemovals() { - return getCacheStatistics().getCacheRemovals(); - } - - @Override - public long getCacheEvictions() { - return getCacheStatistics().getCacheEvictions(); - } - - @Override - public float getAverageGetTime() { - return getCacheStatistics().getCacheAverageGetTime(); - } - - @Override - public float getAveragePutTime() { - return getCacheStatistics().getCacheAveragePutTime(); - } - - @Override - public float getAverageRemoveTime() { - return getCacheStatistics().getCacheAverageRemoveTime(); - } - - private CacheStatistics getCacheStatistics() { - return statisticsService.getCacheStatistics(cacheName); - } -} diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107Expiry.java b/107/src/main/java/org/ehcache/jsr107/Eh107Expiry.java deleted file mode 100644 index 16acd241a2..0000000000 --- a/107/src/main/java/org/ehcache/jsr107/Eh107Expiry.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.jsr107; - -import org.ehcache.expiry.Expiry; - -/** - * Eh107Expiry - */ -abstract class Eh107Expiry implements Expiry { - private final ThreadLocal shortCircuitAccess = new ThreadLocal<>(); - - void enableShortCircuitAccessCalls() { - shortCircuitAccess.set(this); - } - - void disableShortCircuitAccessCalls() { - shortCircuitAccess.remove(); - } - - boolean isShortCircuitAccessCalls() { - return shortCircuitAccess.get() != null; - } - -} diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107InternalCacheManager.java b/107/src/main/java/org/ehcache/jsr107/Eh107InternalCacheManager.java deleted file mode 100644 index f5464cc7ec..0000000000 --- a/107/src/main/java/org/ehcache/jsr107/Eh107InternalCacheManager.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.jsr107; - -import org.ehcache.config.Configuration; -import org.ehcache.core.EhcacheManager; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceProvider; - -import java.util.Collection; - -/** - * Class which has for only purpose to allow to retrieve the {@code ServiceLocator}. - * - * @author Henri Tremblay - */ -class Eh107InternalCacheManager extends EhcacheManager { - - Eh107InternalCacheManager(Configuration config, Collection services, boolean useLoaderInAtomics) { - super(config, services, useLoaderInAtomics); - } - - ServiceProvider getServiceProvider() { - return serviceLocator; - } -} diff --git a/107/src/main/java/org/ehcache/jsr107/EhcacheExpiryWrapper.java b/107/src/main/java/org/ehcache/jsr107/EhcacheExpiryWrapper.java deleted file mode 100644 index 3b49e088b3..0000000000 --- a/107/src/main/java/org/ehcache/jsr107/EhcacheExpiryWrapper.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.jsr107; - -import org.ehcache.ValueSupplier; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expiry; - -/** - * EhcacheExpiryWrapper - */ -class EhcacheExpiryWrapper extends Eh107Expiry { - - private final Expiry wrappedExpiry; - - EhcacheExpiryWrapper(Expiry wrappedExpiry) { - this.wrappedExpiry = wrappedExpiry; - } - - @Override - public Duration getExpiryForCreation(K key, V value) { - return wrappedExpiry.getExpiryForCreation(key, value); - } - - @Override - public Duration getExpiryForAccess(K key, ValueSupplier value) { - return wrappedExpiry.getExpiryForAccess(key, value); - } - - @Override - public Duration getExpiryForUpdate(K key, ValueSupplier oldValue, V newValue) { - return wrappedExpiry.getExpiryForUpdate(key, oldValue, newValue); - } -} diff --git a/107/src/main/java/org/ehcache/jsr107/ExpiryPolicyToEhcacheExpiry.java b/107/src/main/java/org/ehcache/jsr107/ExpiryPolicyToEhcacheExpiry.java deleted file mode 100644 index 43c4ac13b8..0000000000 --- a/107/src/main/java/org/ehcache/jsr107/ExpiryPolicyToEhcacheExpiry.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.jsr107; - -import org.ehcache.ValueSupplier; - -import java.io.Closeable; -import java.io.IOException; - -import javax.cache.expiry.Duration; -import javax.cache.expiry.ExpiryPolicy; - -class ExpiryPolicyToEhcacheExpiry extends Eh107Expiry implements Closeable { - - private final ExpiryPolicy expiryPolicy; - - ExpiryPolicyToEhcacheExpiry(ExpiryPolicy expiryPolicy) { - this.expiryPolicy = expiryPolicy; - } - - @Override - public org.ehcache.expiry.Duration getExpiryForCreation(K key, V value) { - try { - Duration duration = expiryPolicy.getExpiryForCreation(); - if (duration.isEternal()) { - return org.ehcache.expiry.Duration.INFINITE; - } - return new org.ehcache.expiry.Duration(duration.getDurationAmount(), duration.getTimeUnit()); - } catch (Throwable t) { - return org.ehcache.expiry.Duration.ZERO; - } - } - - @Override - public org.ehcache.expiry.Duration getExpiryForAccess(K key, ValueSupplier value) { - if (isShortCircuitAccessCalls()) { - return null; - } - - try { - Duration duration = expiryPolicy.getExpiryForAccess(); - if (duration == null) { - return null; - } - if (duration.isEternal()) { - return org.ehcache.expiry.Duration.INFINITE; - } - return new org.ehcache.expiry.Duration(duration.getDurationAmount(), duration.getTimeUnit()); - } catch (Throwable t) { - return org.ehcache.expiry.Duration.ZERO; - } - } - - @Override - public org.ehcache.expiry.Duration getExpiryForUpdate(K key, ValueSupplier oldValue, V newValue) { - try { - Duration duration = expiryPolicy.getExpiryForUpdate(); - if (duration == null) { - return null; - } - if (duration.isEternal()) { - return org.ehcache.expiry.Duration.INFINITE; - } - return new org.ehcache.expiry.Duration(duration.getDurationAmount(), duration.getTimeUnit()); - } catch (Throwable t) { - return org.ehcache.expiry.Duration.ZERO; - } - } - - @Override - public void close() throws IOException { - if (expiryPolicy instanceof Closeable) { - ((Closeable)expiryPolicy).close(); - } - } -} diff --git a/107/src/main/java/org/ehcache/jsr107/MultiCacheException.java b/107/src/main/java/org/ehcache/jsr107/MultiCacheException.java deleted file mode 100644 index c3543dd478..0000000000 --- a/107/src/main/java/org/ehcache/jsr107/MultiCacheException.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.jsr107; - -import java.io.PrintStream; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.ListIterator; - -import javax.cache.CacheException; - -class MultiCacheException extends CacheException { - private static final long serialVersionUID = -6839700789356356261L; - - private final List throwables = new ArrayList<>(); - - MultiCacheException() { - super(); - } - - MultiCacheException(Throwable t) { - addThrowable(t); - } - - void addThrowable(Throwable t) { - if (t == null) { - throw new NullPointerException(); - } - - if (t == this) { - throw new IllegalArgumentException("cannot add to self"); - } - - if (t instanceof MultiCacheException) { - for (Throwable t2 : ((MultiCacheException)t).getThrowables()) { - throwables.add(t2); - } - } else { - throwables.add(t); - } - } - - private List getThrowables() { - return Collections.unmodifiableList(throwables); - } - - @Override - public String getMessage() { - List ts = getThrowables(); - if (ts.isEmpty()) { - return super.getMessage(); - } else { - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < ts.size(); i++) { - sb.append("[Exception ").append(i).append("] ").append(ts.get(i).getMessage()).append("\n"); - } - return sb.deleteCharAt(sb.length() - 1).toString(); - } - } - - MultiCacheException addFirstThrowable(Throwable t) { - if (t == null) { - throw new NullPointerException(); - } - - if (t == this) { - throw new IllegalArgumentException("cannot add to self"); - } - - if (t instanceof MultiCacheException) { - MultiCacheException mce = (MultiCacheException) t; - throwables.addAll(0, mce.getThrowables()); - } - throwables.add(0, t); - return this; - } - - @Override - public Throwable initCause(Throwable cause) { - throw new UnsupportedOperationException(); - } - - @Override - public Throwable getCause() { - return null; - } - - @Override - public void printStackTrace() { - super.printStackTrace(); - for (int i = 0; i < throwables.size(); i++) { - System.err.print(" [Exception " + i + "] "); - throwables.get(i).printStackTrace(); - } - } - - @Override - public void printStackTrace(PrintStream ps) { - super.printStackTrace(ps); - for (int i = 0; i < throwables.size(); i++) { - ps.print(" [Exception " + i + "] "); - throwables.get(i).printStackTrace(ps); - } - } - - @Override - public void printStackTrace(PrintWriter pw) { - super.printStackTrace(pw); - for (int i = 0; i < throwables.size(); i++) { - pw.print(" [Exception " + i + "] "); - throwables.get(i).printStackTrace(pw); - } - } - - void throwIfNotEmpty() { - if (!throwables.isEmpty()) { - - // if the only thing we contain is a single CacheException, then throw that - if (throwables.size() == 1) { - Throwable t = throwables.get(0); - if (t instanceof CacheException) { - throw (CacheException)t; - } - } - - throw this; - } - } -} diff --git a/107/src/main/java/org/ehcache/jsr107/NullCacheEntryEventFilter.java b/107/src/main/java/org/ehcache/jsr107/NullCacheEntryEventFilter.java deleted file mode 100644 index dca4e5a67a..0000000000 --- a/107/src/main/java/org/ehcache/jsr107/NullCacheEntryEventFilter.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.jsr107; - -import javax.cache.event.CacheEntryEvent; -import javax.cache.event.CacheEntryEventFilter; -import javax.cache.event.CacheEntryListenerException; - -/** - * @author teck - */ -class NullCacheEntryEventFilter implements CacheEntryEventFilter { - - @SuppressWarnings("rawtypes") - public static final CacheEntryEventFilter INSTANCE = new NullCacheEntryEventFilter(); - - @Override - public boolean evaluate(CacheEntryEvent event) throws CacheEntryListenerException { - return true; - } - -} diff --git a/107/src/main/java/org/ehcache/jsr107/Unwrap.java b/107/src/main/java/org/ehcache/jsr107/Unwrap.java deleted file mode 100644 index b4b5a917b8..0000000000 --- a/107/src/main/java/org/ehcache/jsr107/Unwrap.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.jsr107; - -/** - * @author teck - */ -final class Unwrap { - - static T unwrap(Class clazz, Object... obj) { - if (clazz == null || obj == null) { - throw new NullPointerException(); - } - - for (Object o : obj) { - if (o != null && clazz.isAssignableFrom(o.getClass())) { - return clazz.cast(o); - } - } - - throw new IllegalArgumentException("Cannot unwrap to " + clazz); - } - - private Unwrap() { - // - } -} diff --git a/107/src/tck/resources/ExcludeList b/107/src/tck/resources/ExcludeList deleted file mode 100644 index 0cc2da0b52..0000000000 --- a/107/src/tck/resources/ExcludeList +++ /dev/null @@ -1,13 +0,0 @@ -#List tests to be excluded. -#Lines beginning with a '#' are comments -#Enter One method per line with syntax FULL_CLASS_NAME#METHOD_NAME as in the example below - -# This is a dummy test that fails if not in the exclude list. -org.jsr107.tck.CachingTest#dummyTest - -# see https://github.com/jsr107/jsr107tck/issues/63 -org.jsr107.tck.management.CacheMBStatisticsBeanTest#testPutIfAbsent - -# see https://github.com/jsr107/jsr107tck/issues/61 -org.jsr107.tck.spi.CachingProviderClassLoaderTest#getCacheManagerSingleton - diff --git a/107/src/test/resources/ehcache-107-mbeans-cache-config.xml b/107/src/test/resources/ehcache-107-mbeans-cache-config.xml deleted file mode 100644 index 1e39955b8f..0000000000 --- a/107/src/test/resources/ehcache-107-mbeans-cache-config.xml +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - java.lang.String - java.lang.String - 2000 - - - - \ No newline at end of file diff --git a/107/src/test/resources/ehcache-107-serializer.xml b/107/src/test/resources/ehcache-107-serializer.xml deleted file mode 100644 index 5722c36df1..0000000000 --- a/107/src/test/resources/ehcache-107-serializer.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - - org.ehcache.impl.serialization.CompactJavaSerializer - - - - java.lang.Long - java.lang.String - - 20 - 1 - - - - - java.lang.Long - java.lang.String - - 20 - 1 - - - \ No newline at end of file diff --git a/107/src/test/resources/ehcache-107-stats.xml b/107/src/test/resources/ehcache-107-stats.xml deleted file mode 100644 index 0c58cadef3..0000000000 --- a/107/src/test/resources/ehcache-107-stats.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - - - - - - - - - 10 - 10 - - - - - - 10 - 10 - - - - - java.lang.String - java.lang.String - 10 - - - \ No newline at end of file diff --git a/107/src/test/resources/ehcache-107.xml b/107/src/test/resources/ehcache-107.xml deleted file mode 100644 index f018320e1e..0000000000 --- a/107/src/test/resources/ehcache-107.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - - java.lang.String - java.lang.String - 2000 - - - - 20 - - - \ No newline at end of file diff --git a/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-config.xml b/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-config.xml deleted file mode 100644 index b52a370326..0000000000 --- a/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-config.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - - java.lang.Long - com.pany.domain.Product - - com.pany.ehcache.integration.ProductCacheLoaderWriter - - 100 - - - \ No newline at end of file diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc index 8dab9e13a5..a8701cacac 100644 --- a/CONTRIBUTING.adoc +++ b/CONTRIBUTING.adoc @@ -54,7 +54,7 @@ An example of `Service` being the `org.ehcache.core.spi.store.Store.Provider`, i `Service` instances are created using Java's https://docs.oracle.com/javase/6/docs/api/java/util/ServiceLoader.html[`java.util.ServiceLoader` service-provider loading facility]. It is used to locate all `org.ehcache.core.spi.service.ServiceFactory` implementations on the classpath. -These are in turn used to create `Service` instances. Each `CacheManager` uses its own `org.ehcache.core.internal.service.ServiceLocator` facility to locate `Service` instances, which it then in turn life cycles. +These are in turn used to create `Service` instances. Each `CacheManager` uses its own `org.ehcache.core.spi.ServiceLocator` facility to locate `Service` instances, which it then in turn life cycles. `Service` instances are configured by their own respective `ServiceConfiguration` at `Service.start()` invocation time. `CacheManager` and its `Service` instances can then use these services. diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000000..ab6247ebda --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,18 @@ +/* + * All content copyright Terracotta, Inc., unless otherwise indicated. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + //See Jenkins wiki pages for info +checkmarxBuild checkmarx_project_name: 'Terracotta DB (TDB) Ehcache OSS' diff --git a/README.adoc b/README.adoc index 399bf6297e..5ece1c5e00 100644 --- a/README.adoc +++ b/README.adoc @@ -1,32 +1,30 @@ = The Ehcache 3.x line is currently the development line. -Status of the build: image:https://ehcache.ci.cloudbees.com/buildStatus/icon?job=ehcache3[Ehcache@Cloudbees, link="https://ehcache.ci.cloudbees.com/job/ehcache3/"] +Status of the build: +[link="https://dev.azure.com/TerracottaCI/ehcache/_build/latest?definitionId=14"] +image::https://dev.azure.com/TerracottaCI/ehcache/_apis/build/status/ehcache3[Build Status] For more information, you might want to go check the https://github.com/ehcache/ehcache3/wiki[wiki]. -image:https://www.cloudbees.com/sites/default/files/styles/large/public/Button-Powered-by-CB.png?itok=uMDWINfY[Cloudbees, link="http://www.cloudbees.com/resources/foss"] - == Getting started with the new API For samples, documentation, and usage information, please see http://ehcache.org. == Current release -We released 3.4.0 on August 29th 2017. +We released 3.9.0 on August 25th 2020. -The https://github.com/ehcache/ehcache3/releases/tag/v3.4.0[release notes] contain the links to the artifacts and the documentation to help you get started. +The https://github.com/ehcache/ehcache3/releases/tag/v3.9.0[release notes] contain the links to the artifacts and the documentation to help you get started. -You should consider upgrading to 3.4.x as it does all 3.0.x, 3.1.x, 3.2.x and 3.3.x do and more with a fully compatible API. -The only thing to note compared to 3.0.x is that transactional support has been moved to a separate jar. +You should consider upgrading to 3.9.x as it does all previous 3.x do and more with a fully compatible API. == Current development & next release -We are still working on the missing features of the clustering tier of Ehcache 3 which will be included in upcoming releases. +We released the missing features of the clustering tier of Ehcache 3 in 3.7.0. -We are also considering moving to Java 8 support only for the upcoming 3.5.0. +Starting with version 3.5, Ehcache only supports Java 8 and later. -We may still do 3.3.x release to include all fixes that have been made on it, but this is now less a priority. -There is no longer any plan for a 3.0.x, 3.1.x or 3.2.x release. +Version 3.5 is now in maintenance mode. We are no longer planning to perform release of earlier versions. See the https://github.com/ehcache/ehcache3/milestones[milestones on GitHub] for more details on the current status. diff --git a/api/.gitignore b/api/.gitignore deleted file mode 100755 index ae3c172604..0000000000 --- a/api/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/bin/ diff --git a/api/build.gradle b/api/build.gradle deleted file mode 100644 index 8119b013a6..0000000000 --- a/api/build.gradle +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: EhDeploy - -checkstyle { - configFile = file("$projectDir/config/checkstyle.xml") -} - -tasks.withType(JavaCompile) { - options.compilerArgs += ['-Werror'] -} diff --git a/api/gradle.properties b/api/gradle.properties deleted file mode 100644 index 3736cff405..0000000000 --- a/api/gradle.properties +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -subPomName = Ehcache 3 API module -subPomDesc = The API module of Ehcache 3 diff --git a/api/src/main/java/org/ehcache/config/CacheConfiguration.java b/api/src/main/java/org/ehcache/config/CacheConfiguration.java deleted file mode 100644 index d90c349792..0000000000 --- a/api/src/main/java/org/ehcache/config/CacheConfiguration.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.config; - -import org.ehcache.Cache; -import org.ehcache.expiry.Expiry; -import org.ehcache.spi.service.ServiceConfiguration; - -import java.util.Collection; - -/** - * Represents the minimal configuration for a {@link Cache}. - *

- * Implementations are expected to be read-only. - * - * @param the key type for the cache - * @param the value type for the cache - */ -public interface CacheConfiguration { - - /** - * The service configurations defined for the {@link Cache}. - *

- * Implementations must return an unmodifiable collection. - * - * @return service configurations - */ - Collection> getServiceConfigurations(); - - /** - * The key type for the {@link Cache}. - *

- * The key type must not be {@code null}. - * - * @return a non {@code null} class - */ - Class getKeyType(); - - /** - * The value type for the {@link Cache}. - *

- * The value type must not be {@code null}. - * - * @return a non {@code null} class - */ - Class getValueType(); - - /** - * The {@link EvictionAdvisor} predicate function. - *

- * Entries which pass this predicate may be ignored by the eviction process. - * This is only a hint. - * - * @return the eviction advisor predicate - */ - EvictionAdvisor getEvictionAdvisor(); - - /** - * The {@link ClassLoader} for the {@link Cache}. - *

- * This {@code ClassLoader} will be used to instantiate cache level services - * and for deserializing cache entries when required. - *

- * The {@code ClassLoader} must not be null. - * - * @return the cache {@code ClassLoader} - */ - ClassLoader getClassLoader(); - - /** - * The {@link Expiry} rules for the {@link Cache}. - *

- * The {@code Expiry} cannot be null. - * - * @return the {@code Expiry} - */ - Expiry getExpiry(); - - /** - * The {@link ResourcePools} for the {@link Cache}. - *

- * The {@code ResourcePools} cannot be null nor empty. - * - * @return the {@link ResourcePools} - */ - ResourcePools getResourcePools(); - -} diff --git a/api/src/main/java/org/ehcache/expiry/Expiry.java b/api/src/main/java/org/ehcache/expiry/Expiry.java deleted file mode 100644 index b9a38f5847..0000000000 --- a/api/src/main/java/org/ehcache/expiry/Expiry.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.expiry; - -import org.ehcache.ValueSupplier; - -/** - * A policy object that governs expiration for mappings in a {@link org.ehcache.Cache Cache}. - *

- * Previous values are not accessible directly but are rather available through a {@link ValueSupplier value supplier} - * to indicate that access can require computation (such as deserialization). - *

- * NOTE: Some cache configurations (eg. caches with eventual consistency) may use local (ie. non-consistent) state - * to decide whether to call {@link #getExpiryForUpdate(Object, ValueSupplier, Object)} vs. - * {@link #getExpiryForCreation(Object, Object)}. For these cache configurations it is advised to return the same - * value for both of these methods - *

- * See {@link Expirations} for helper methods to create common {@code Expiry} instances. - * - * @param the key type for the cache - * @param the value type for the cache - * - * @see Expirations - */ -public interface Expiry { - - /** - * Returns the lifetime of an entry when it is initially added to a {@link org.ehcache.Cache Cache}. - *

- * This method must not return {@code null}. - *

- * Exceptions thrown from this method will be swallowed and result in the expiry duration being - * {@link Duration#ZERO ZERO}. - * - * @param key the key of the newly added entry - * @param value the value of the newly added entry - * @return a non-null {@link Duration} - */ - Duration getExpiryForCreation(K key, V value); - - /** - * Returns the expiration {@link Duration} (relative to the current time) when an existing entry is accessed from a - * {@link org.ehcache.Cache Cache}. - *

- * Returning {@code null} indicates that the expiration time remains unchanged. - *

- * Exceptions thrown from this method will be swallowed and result in the expiry duration being - * {@link Duration#ZERO ZERO}. - * - * @param key the key of the accessed entry - * @param value a value supplier for the accessed entry - * @return an expiration {@code Duration}, {@code null} means unchanged - */ - Duration getExpiryForAccess(K key, ValueSupplier value); - - - /** - * Returns the expiration {@link Duration} (relative to the current time) when an existing entry is updated in a - * {@link org.ehcache.Cache Cache}. - *

- * Returning {@code null} indicates that the expiration time remains unchanged. - *

- * Exceptions thrown from this method will be swallowed and result in the expiry duration being - * {@link Duration#ZERO ZERO}. - * - * @param key the key of the updated entry - * @param oldValue a value supplier for the previous value of the entry - * @param newValue the new value of the entry - * @return an expiration {@code Duration}, {@code null} means unchanged - */ - Duration getExpiryForUpdate(K key, ValueSupplier oldValue, V newValue); - -} diff --git a/api/src/main/java/org/ehcache/expiry/package-info.java b/api/src/main/java/org/ehcache/expiry/package-info.java deleted file mode 100644 index 4ea3f314dd..0000000000 --- a/api/src/main/java/org/ehcache/expiry/package-info.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * {@link org.ehcache.expiry.Expiry Expiry} API of a {@link org.ehcache.Cache Cache}. - */ -package org.ehcache.expiry; \ No newline at end of file diff --git a/api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriterProvider.java b/api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriterProvider.java deleted file mode 100644 index 38e07d9418..0000000000 --- a/api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriterProvider.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.spi.loaderwriter; - -import org.ehcache.config.CacheConfiguration; -import org.ehcache.spi.service.Service; - -/** - * A {@link Service} that creates {@link CacheLoaderWriter} instances. - *

- * A {@code CacheManager} will use the {@link #createCacheLoaderWriter(java.lang.String, org.ehcache.config.CacheConfiguration)} - * method to create {@code CacheLoaderWriter} instances for each {@code Cache} it - * manages. - *

- * For any non {@code null} value returned, the {@code Cache} will be configured to use the - * {@code CacheLoaderWriter} instance returned. - */ -public interface CacheLoaderWriterProvider extends Service { - - /** - * Creates a {@code CacheLoaderWriter} for use with the {@link org.ehcache.Cache Cache} - * of the given alias and configuration. - * - * @param alias the {@code Cache} alias in the {@code CacheManager} - * @param cacheConfiguration the configuration for the associated cache - * @param the loader-writer key type - * @param the loader-writer value type - * - * @return the {@code CacheLoaderWriter} to be used by the {@code Cache} or {@code null} if none - */ - CacheLoaderWriter createCacheLoaderWriter(String alias, CacheConfiguration cacheConfiguration); - - /** - * Releases a {@code CacheLoaderWriter} when the associated {@link org.ehcache.Cache Cache} - * is finished with it. - *

- * If the {@code CacheLoaderWriter} instance was user provided {@link java.io.Closeable#close() close} - * will not be invoked. - * - * @param cacheLoaderWriter the {@code CacheLoaderWriter} being released - * @throws Exception when the release fails - */ - void releaseCacheLoaderWriter(CacheLoaderWriter cacheLoaderWriter) throws Exception; - -} diff --git a/api/src/main/java/org/ehcache/spi/service/ServiceConfiguration.java b/api/src/main/java/org/ehcache/spi/service/ServiceConfiguration.java deleted file mode 100644 index b7766dc7cf..0000000000 --- a/api/src/main/java/org/ehcache/spi/service/ServiceConfiguration.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.spi.service; - -/** - * A configuration type to be used when interacting with a {@link Service}. - * - * @param the service type this configuration works with - * - */ -public interface ServiceConfiguration { - - /** - * Indicates which service this configuration works with. - * - * @return the service type - */ - Class getServiceType(); -} diff --git a/api/src/main/java/org/ehcache/spi/service/ServiceCreationConfiguration.java b/api/src/main/java/org/ehcache/spi/service/ServiceCreationConfiguration.java deleted file mode 100644 index 4db06f3209..0000000000 --- a/api/src/main/java/org/ehcache/spi/service/ServiceCreationConfiguration.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.spi.service; - -/** - * A configuration type used when creating a {@link Service}. - * - * @param the service type this configuration works with - * - */ -public interface ServiceCreationConfiguration { - - /** - * Indicates which service consumes this configuration at creation. - * - * @return the service type - */ - Class getServiceType(); -} diff --git a/api/src/test/java/org/ehcache/expiry/ExpirationsTest.java b/api/src/test/java/org/ehcache/expiry/ExpirationsTest.java deleted file mode 100644 index 05d8d1988c..0000000000 --- a/api/src/test/java/org/ehcache/expiry/ExpirationsTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.expiry; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; - -import java.util.concurrent.TimeUnit; - -import org.ehcache.ValueSupplier; -import org.junit.Test; - -public class ExpirationsTest { - - @Test - public void testNoExpiration() { - Expiry expiry = Expirations.noExpiration(); - assertThat(expiry.getExpiryForCreation(this, this), equalTo(Duration.INFINITE)); - assertThat(expiry.getExpiryForAccess(this, holderOf(this)), nullValue()); - assertThat(expiry.getExpiryForUpdate(this, holderOf(this), this), nullValue()); - } - - @Test - public void testTTIExpiration() { - Duration duration = new Duration(1L, TimeUnit.SECONDS); - Expiry expiry = Expirations.timeToIdleExpiration(duration); - assertThat(expiry.getExpiryForCreation(this, this), equalTo(duration)); - assertThat(expiry.getExpiryForAccess(this, holderOf(this)), equalTo(duration)); - assertThat(expiry.getExpiryForUpdate(this, holderOf(this), this), equalTo(duration)); - } - - @Test - public void testTTLExpiration() { - Duration duration = new Duration(1L, TimeUnit.SECONDS); - Expiry expiry = Expirations.timeToLiveExpiration(duration); - assertThat(expiry.getExpiryForCreation(this, holderOf(this)), equalTo(duration)); - assertThat(expiry.getExpiryForAccess(this, holderOf(this)), nullValue()); - assertThat(expiry.getExpiryForUpdate(this, holderOf(this), this), equalTo(duration)); - } - - @Test - public void testExpiration() { - Duration creation = new Duration(1L, TimeUnit.SECONDS); - Duration access = new Duration(2L, TimeUnit.SECONDS); - Duration update = new Duration(3L, TimeUnit.SECONDS); - Expiry expiry = Expirations.builder().setCreate(creation).setAccess(access).setUpdate(update).build(); - assertThat(expiry.getExpiryForCreation(this, this), equalTo(creation)); - assertThat(expiry.getExpiryForAccess(this, holderOf(this)), equalTo(access)); - assertThat(expiry.getExpiryForUpdate(this, holderOf(this),this), equalTo(update)); - } - - private ValueSupplier holderOf(final Object obj) { - return () -> obj; - } -} diff --git a/azure-pipelines-static-analysis.yml b/azure-pipelines-static-analysis.yml new file mode 100644 index 0000000000..4c3f99b9a8 --- /dev/null +++ b/azure-pipelines-static-analysis.yml @@ -0,0 +1,29 @@ +# +# Copyright Terracotta, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# See shared code location for steps and parameters: +# https://dev.azure.com/TerracottaCI/_git/terracotta + +resources: + repositories: + - repository: templates + type: git + name: terracotta/terracotta + +jobs: +- template: build-templates/gradle-common.yml@templates + parameters: + gradleTasks: 'check' diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000000..590980b08b --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,49 @@ +# +# Copyright Terracotta, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# See shared code location for steps and parameters: +# https://dev.azure.com/TerracottaCI/_git/terracotta + +resources: + repositories: + - repository: templates + type: git + name: terracotta/terracotta + +jobs: +- template: build-templates/gradle-common.yml@templates + parameters: + jdkVersion: '1.8' + jobName: 'LinuxJava8' + +- template: build-templates/gradle-common.yml@templates + parameters: + jdkVersion: '1.8' + options: '-PtestVM=java11Home' + jobName: 'LinuxJava11' + +- template: build-templates/gradle-common.yml@templates + parameters: + vmImage: 'windows-latest' + jdkVersion: '1.8' + jobName: 'WindowsJava8' + +- template: build-templates/gradle-common.yml@templates + parameters: + vmImage: 'windows-latest' + jdkVersion: '1.8' + options: '-PtestVM=java11Home' + jobName: 'WindowsJava11' diff --git a/build-logic/build.gradle b/build-logic/build.gradle new file mode 100644 index 0000000000..78eed08c1a --- /dev/null +++ b/build-logic/build.gradle @@ -0,0 +1,67 @@ +plugins { + id 'java-gradle-plugin' +} + +repositories { + gradlePluginPortal() + mavenCentral() +} + +gradlePlugin { + plugins { + internalModule { + id = 'org.ehcache.build.internal-module' + implementationClass = 'org.ehcache.build.InternalEhcacheModule' + } + publicModule { + id = 'org.ehcache.build.public-module' + implementationClass = 'org.ehcache.build.PublicEhcacheModule' + } + clusteredModule { + id = 'org.ehcache.build.clustered-module' + implementationClass = 'org.ehcache.build.ClusteredEhcacheModule' + } + serverModule { + id = 'org.ehcache.build.clustered-server-module' + implementationClass = 'org.ehcache.build.ClusteredServerModule' + } + distribution { + id = 'org.ehcache.build.package' + implementationClass = 'org.ehcache.build.EhcachePackage' + } + + variant { + id = 'org.ehcache.build.plugins.variant' + implementationClass = 'org.ehcache.build.plugins.VariantPlugin' + } + + base { + id = 'org.ehcache.build.conventions.base' + implementationClass = 'org.ehcache.build.conventions.BaseConvention' + } + java { + id = 'org.ehcache.build.conventions.java' + implementationClass = 'org.ehcache.build.conventions.JavaConvention' + } + javaLibrary { + id = 'org.ehcache.build.conventions.java-library' + implementationClass = 'org.ehcache.build.conventions.JavaLibraryConvention' + } + war { + id = 'org.ehcache.build.conventions.war' + implementationClass = 'org.ehcache.build.conventions.WarConvention' + } + } +} + +dependencies { + api gradleApi() + api 'biz.aQute.bnd:biz.aQute.bnd.gradle:6.0.0' + api 'gradle.plugin.com.github.jengelman.gradle.plugins:shadow:7.0.0' + api 'org.unbroken-dome.gradle-plugins:gradle-xjc-plugin:2.0.0' + api 'com.github.spotbugs.snom:spotbugs-gradle-plugin:4.7.9' + implementation 'biz.aQute.bnd:biz.aQute.bndlib:6.0.0' + implementation 'org.osgi:org.osgi.service.component.annotations:1.5.0' + implementation 'org.apache.felix:org.apache.felix.scr.generator:1.18.4' + implementation 'org.apache.felix:org.apache.felix.scr.ds-annotations:1.2.10' +} diff --git a/build-logic/src/main/java/org/ehcache/build/ClusteredEhcacheModule.java b/build-logic/src/main/java/org/ehcache/build/ClusteredEhcacheModule.java new file mode 100644 index 0000000000..5ca1afbab9 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/ClusteredEhcacheModule.java @@ -0,0 +1,12 @@ +package org.ehcache.build; + +import org.gradle.api.Project; + +public class ClusteredEhcacheModule extends EhcacheModule { + + @Override + public void apply(Project project) { + project.setGroup("org.ehcache.modules.clustered"); + super.apply(project); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/ClusteredServerModule.java b/build-logic/src/main/java/org/ehcache/build/ClusteredServerModule.java new file mode 100644 index 0000000000..64be60977d --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/ClusteredServerModule.java @@ -0,0 +1,17 @@ +package org.ehcache.build; + +import org.ehcache.build.conventions.DeployConvention; +import org.ehcache.build.plugins.VoltronPlugin; +import org.gradle.api.Plugin; +import org.gradle.api.Project; + +public class ClusteredServerModule implements Plugin { + + @Override + public void apply(Project project) { + project.setGroup("org.ehcache.modules.clustered"); + + project.getPlugins().apply(DeployConvention.class); + project.getPlugins().apply(VoltronPlugin.class); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/EhcacheModule.java b/build-logic/src/main/java/org/ehcache/build/EhcacheModule.java new file mode 100644 index 0000000000..dd3aa59140 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/EhcacheModule.java @@ -0,0 +1,17 @@ +package org.ehcache.build; + +import org.ehcache.build.conventions.BndConvention; +import org.ehcache.build.conventions.JavaLibraryConvention; +import org.ehcache.build.conventions.DeployConvention; +import org.gradle.api.Plugin; +import org.gradle.api.Project; + +public abstract class EhcacheModule implements Plugin { + + @Override + public void apply(Project project) { + project.getPlugins().apply(JavaLibraryConvention.class); + project.getPlugins().apply(DeployConvention.class); + project.getPlugins().apply(BndConvention.class); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/EhcachePackage.java b/build-logic/src/main/java/org/ehcache/build/EhcachePackage.java new file mode 100644 index 0000000000..112fd3d0d2 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/EhcachePackage.java @@ -0,0 +1,16 @@ +package org.ehcache.build; + +import org.ehcache.build.conventions.DeployConvention; +import org.ehcache.build.plugins.PackagePlugin; +import org.gradle.api.Plugin; +import org.gradle.api.Project; + +public class EhcachePackage implements Plugin { + + @Override + public void apply(Project project) { + project.setGroup("org.ehcache"); + project.getPlugins().apply(PackagePlugin.class); + project.getPlugins().apply(DeployConvention.class); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/InternalEhcacheModule.java b/build-logic/src/main/java/org/ehcache/build/InternalEhcacheModule.java new file mode 100644 index 0000000000..1ba6b69b8a --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/InternalEhcacheModule.java @@ -0,0 +1,13 @@ +package org.ehcache.build; + +import org.gradle.api.Project; + +public class InternalEhcacheModule extends EhcacheModule { + + @Override + public void apply(Project project) { + project.setGroup("org.ehcache.modules"); + super.apply(project); + } +} + diff --git a/build-logic/src/main/java/org/ehcache/build/PublicEhcacheModule.java b/build-logic/src/main/java/org/ehcache/build/PublicEhcacheModule.java new file mode 100644 index 0000000000..477c54b3e7 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/PublicEhcacheModule.java @@ -0,0 +1,11 @@ +package org.ehcache.build; + +import org.gradle.api.Project; + +public class PublicEhcacheModule extends EhcacheModule { + @Override + public void apply(Project project) { + project.setGroup("org.ehcache"); + super.apply(project); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/BaseConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/BaseConvention.java new file mode 100644 index 0000000000..edf95548a1 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/conventions/BaseConvention.java @@ -0,0 +1,23 @@ +package org.ehcache.build.conventions; + +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.artifacts.ResolutionStrategy; +import org.gradle.api.plugins.BasePlugin; + +import java.net.URI; + +public class BaseConvention implements Plugin { + + @Override + public void apply(Project project) { + project.getPlugins().apply(BasePlugin.class); + + project.getRepositories().mavenCentral(); + project.getRepositories().maven(repo -> repo.setUrl(URI.create("https://repo.terracotta.org/maven2"))); + + project.getConfigurations().configureEach( + config -> config.resolutionStrategy(ResolutionStrategy::failOnVersionConflict) + ); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/BndConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/BndConvention.java new file mode 100644 index 0000000000..bbb915168e --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/conventions/BndConvention.java @@ -0,0 +1,89 @@ +package org.ehcache.build.conventions; + +import aQute.bnd.gradle.BndBuilderPlugin; +import aQute.bnd.gradle.BundleTaskExtension; +import aQute.bnd.osgi.Constants; +import org.ehcache.build.plugins.osgids.OsgiDsPlugin; +import org.gradle.api.Action; +import org.gradle.api.GradleException; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.artifacts.Dependency; +import org.gradle.api.artifacts.ExternalDependency; +import org.gradle.api.artifacts.ProjectDependency; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.provider.MapProperty; +import org.gradle.api.publish.PublishingExtension; +import org.gradle.api.publish.maven.MavenPublication; +import org.gradle.api.publish.plugins.PublishingPlugin; +import org.gradle.api.tasks.bundling.Jar; + +import static java.lang.System.lineSeparator; +import static java.util.stream.Collectors.joining; + +public class BndConvention implements Plugin { + + @Override + public void apply(Project project) { + project.getPlugins().apply(BndBuilderPlugin.class); + project.getPlugins().apply(OsgiDsPlugin.class); + project.getPlugins().apply(DeployConvention.class); + + project.getTasks().named(JavaPlugin.JAR_TASK_NAME, Jar.class, jar -> { + jar.getExtensions().configure(BundleTaskExtension.class, bundle -> configureBundleDefaults(project, bundle)); + }); + + project.getConfigurations().named("baseline", config -> { + config.getResolutionStrategy().getComponentSelection().all(selection -> { + if (!selection.getCandidate().getVersion().matches("^\\d+(?:\\.\\d+)*$")) { + selection.reject("Only full releases can be used as OSGi baselines"); + } + }); + }); + + String dependencyNotation = project.getGroup() + ":" + project.getName() + ":(," + project.getVersion() + "["; + Dependency baseline = project.getDependencies().add("baseline", dependencyNotation); + if (baseline instanceof ProjectDependency) { + throw new GradleException("Baseline should not be a project dependency"); + } else if (baseline instanceof ExternalDependency) { + ((ExternalDependency) baseline).setForce(true); + ((ExternalDependency) baseline).setTransitive(false); + } else { + throw new IllegalArgumentException("Unexpected dependency type: " + baseline); + } + } + + public static void configureBundleDefaults(Project project, BundleTaskExtension bundle) { + MapProperty defaultInstructions = project.getObjects().mapProperty(String.class, String.class); + bundleDefaults(project).execute(defaultInstructions); + bundle.bnd(defaultInstructions.map(kv -> kv.entrySet().stream().map(e -> e.getKey() + "=" + e.getValue()).collect(joining(lineSeparator())))); + } + + public static Action> bundleDefaults(Project project) { + return properties -> { + project.getPlugins().withType(PublishingPlugin.class).configureEach(publishingPlugin -> { + project.getExtensions().getByType(PublishingExtension.class).getPublications().withType(MavenPublication.class).stream().findAny().ifPresent(publication -> { + properties.put(Constants.BUNDLE_NAME, publication.getPom().getName()); + properties.put(Constants.BUNDLE_DESCRIPTION, publication.getPom().getDescription()); + }); + }); + properties.put(Constants.BUNDLE_SYMBOLICNAME, project.getGroup() + "." + project.getName()); + properties.put(Constants.BUNDLE_DOCURL, "http://ehcache.org"); + properties.put(Constants.BUNDLE_LICENSE, "LICENSE"); + properties.put(Constants.BUNDLE_VENDOR, "Terracotta Inc., a wholly-owned subsidiary of Software AG USA, Inc."); + properties.put(Constants.BUNDLE_VERSION, osgiFixedVersion(project.getVersion().toString())); + properties.put(Constants.SERVICE_COMPONENT, "OSGI-INF/*.xml"); + }; + } + + public static String osgiFixedVersion(String version) { + /* + * The bnd gradle plugin does not handle our 2-digit snapshot versioning scheme very well. It maps `x.y-SNAPSHOT` + * to `x.y.0.SNAPSHOT`. This is bad since `x.y.0.SNAPSHOT` is considered to be less than *all* `x.y.z`. This means + * the baseline version range expression `(,x.y.0.SNAPSHOT[` will always pick the last release from the previous + * minor line. To fix this we manually map to a 3-digit snapshot version where the 3rd digit is a number chosen + * to be higher than we would ever release ('All the worlds a VAX'). + */ + return version.replaceAll("^(\\d+.\\d+)-SNAPSHOT$", "$1.999-SNAPSHOT"); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/CheckstyleConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/CheckstyleConvention.java new file mode 100644 index 0000000000..0973fc1237 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/conventions/CheckstyleConvention.java @@ -0,0 +1,22 @@ +package org.ehcache.build.conventions; + +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.plugins.quality.CheckstyleExtension; +import org.gradle.api.plugins.quality.CheckstylePlugin; + +import java.util.Map; + +public class CheckstyleConvention implements Plugin { + @Override + public void apply(Project project) { + project.getPlugins().apply(CheckstylePlugin.class); + + project.getExtensions().configure(CheckstyleExtension.class, checkstyle -> { + checkstyle.setConfigFile(project.getRootProject().file("config/checkstyle.xml")); + Map properties = checkstyle.getConfigProperties(); + properties.put("projectDir", project.getProjectDir()); + properties.put("rootDir", project.getRootDir()); + }); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/DeployConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/DeployConvention.java new file mode 100644 index 0000000000..bd7376ef8b --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/conventions/DeployConvention.java @@ -0,0 +1,167 @@ +package org.ehcache.build.conventions; + +import org.gradle.api.Action; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.Task; +import org.gradle.api.artifacts.ProjectDependency; +import org.gradle.api.internal.artifacts.ivyservice.projectmodule.ProjectComponentPublication; +import org.gradle.api.internal.component.SoftwareComponentInternal; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.publish.PublishingExtension; +import org.gradle.api.publish.maven.MavenPublication; +import org.gradle.api.publish.maven.internal.publication.MavenPomInternal; +import org.gradle.api.publish.maven.internal.publisher.MavenProjectIdentity; +import org.gradle.api.publish.maven.plugins.MavenPublishPlugin; +import org.gradle.api.publish.maven.tasks.AbstractPublishToMaven; +import org.gradle.api.publish.maven.tasks.GenerateMavenPom; +import org.gradle.api.publish.maven.tasks.PublishToMavenRepository; +import org.gradle.api.tasks.TaskProvider; +import org.gradle.api.tasks.WriteProperties; +import org.gradle.jvm.tasks.Jar; +import org.gradle.plugins.signing.SigningExtension; +import org.gradle.plugins.signing.SigningPlugin; + +import java.io.File; +import java.util.Collection; +import java.util.concurrent.Callable; +import java.util.function.Predicate; + +import static java.util.stream.Collectors.toList; +import static org.gradle.api.publish.plugins.PublishingPlugin.PUBLISH_TASK_GROUP; + +/** + * Deploy plugin for published artifacts. This is an abstraction over the {@code maven-publish} plugin. + * + * Defaults: + *
    + *
  • POM: population of general content: organization, issue-management, scm, etc.
  • + *
  • POM copied to {@code META-INF/maven/groupId/artifactId/pom.xml}
  • + *
  • POM properties file copied to {@code META-INF/maven/groupId/artifactId/pom.properties}
  • + *
  • Javadoc and Source JAR Publishing
  • + *
  • {@code install} as alias of {@code publishToMavenLocal}
  • + *
+ */ +public class DeployConvention implements Plugin { + + private static final Predicate IS_RELEASE = p -> !p.getVersion().toString().endsWith("-SNAPSHOT"); + + @Override + public void apply(Project project) { + project.getPlugins().apply(SigningPlugin.class); + project.getPlugins().apply(MavenPublishPlugin.class); + + project.getExtensions().configure(PublishingExtension.class, publishing -> { + publishing.getPublications().withType(MavenPublication.class).configureEach(mavenPublication -> mavenPublication.pom(pom -> { + pom.getUrl().set("http://ehcache.org"); + pom.organization(org -> { + org.getName().set("Terracotta Inc., a wholly-owned subsidiary of Software AG USA, Inc."); + org.getUrl().set("http://terracotta.org"); + }); + pom.issueManagement(issue -> { + issue.getSystem().set("Github"); + issue.getUrl().set("https://github.com/ehcache/ehcache3/issues"); + }); + pom.scm(scm -> { + scm.getUrl().set("https://github.com/ehcache/ehcache3"); + scm.getConnection().set("scm:git:https://github.com/ehcache/ehcache3.git"); + scm.getDeveloperConnection().set("scm:git:git@github.com:ehcache/ehcache3.git"); + }); + pom.licenses(licenses -> licenses.license(license -> { + license.getName().set("The Apache Software License, Version 2.0"); + license.getUrl().set("http://www.apache.org/licenses/LICENSE-2.0.txt"); + license.getDistribution().set("repo"); + })); + pom.developers(devs -> devs.developer(dev -> { + dev.getName().set("Terracotta Engineers"); + dev.getEmail().set("tc-oss@softwareag.com"); + dev.getOrganization().set("Terracotta Inc., a wholly-owned subsidiary of Software AG USA, Inc."); + dev.getOrganizationUrl().set("http://ehcache.org"); + })); + })); + publishing.repositories(repositories -> repositories.maven(maven -> { + if (IS_RELEASE.test(project)) { + maven.setUrl(project.property("deployUrl")); + maven.credentials(creds -> { + creds.setUsername(project.property("deployUser").toString()); + creds.setPassword(project.property("deployPwd").toString()); + }); + } else { + maven.setName("sonatype-nexus-snapshot"); + maven.setUrl("https://oss.sonatype.org/content/repositories/snapshots"); + maven.credentials(creds -> { + creds.setUsername(project.property("sonatypeUser").toString()); + creds.setPassword(project.property("sonatypePwd").toString()); + }); + } + })); + }); + + project.getExtensions().configure(SigningExtension.class, signing -> { + signing.setRequired((Callable) () -> IS_RELEASE.test(project) && project.getGradle().getTaskGraph().getAllTasks().stream().anyMatch(t -> t instanceof PublishToMavenRepository)); + signing.sign(project.getExtensions().getByType(PublishingExtension.class).getPublications()); + }); + + /* + * Do **not** convert the anonymous Action here to a lambda expression - it will break Gradle's up-to-date tracking + * and cause tasks to be needlessly rerun. + */ + //noinspection Convert2Lambda + project.getTasks().withType(AbstractPublishToMaven.class).configureEach(publishTask -> publishTask.doFirst(new Action() { + @Override + public void execute(Task task) { + MavenPublication publication = publishTask.getPublication(); + if (publication instanceof ProjectComponentPublication) { + SoftwareComponentInternal component = ((ProjectComponentPublication) publication).getComponent(); + if (component != null) { //The shadow plugin doesn"t associate a component with the publication + Collection unpublishedDeps = component.getUsages().stream().flatMap(usage -> + usage.getDependencies().stream().filter(ProjectDependency.class::isInstance).map(ProjectDependency.class::cast).filter(moduleDependency -> + !moduleDependency.getDependencyProject().getPlugins().hasPlugin(DeployConvention.class))).collect(toList()); + if (!unpublishedDeps.isEmpty()) { + project.getLogger().warn("{} has applied the deploy plugin but has unpublished project dependencies: {}", project, unpublishedDeps); + } + } + } + } + })); + + project.getTasks().register("install", task -> + task.dependsOn(project.getTasks().named(MavenPublishPlugin.PUBLISH_LOCAL_LIFECYCLE_TASK_NAME)) + ); + + project.getPlugins().withType(JavaPlugin.class).configureEach(plugin -> { + project.getExtensions().configure(JavaPluginExtension.class, java -> { + java.withJavadocJar(); + java.withSourcesJar(); + }); + + project.afterEvaluate(p -> { + p.getExtensions().configure(PublishingExtension.class, publishing -> { + if (publishing.getPublications().isEmpty()) { + publishing.publications(publications -> publications.register("mavenJava", MavenPublication.class, mavenJava -> mavenJava.from(p.getComponents().getByName("java")))); + } + }); + + p.getTasks().withType(GenerateMavenPom.class).all(pomTask -> { + MavenProjectIdentity identity = ((MavenPomInternal) pomTask.getPom()).getProjectIdentity(); + TaskProvider pomPropertiesTask = project.getTasks().register(pomTask.getName().replace("PomFile", "PomProperties"), WriteProperties.class, task -> { + task.dependsOn(pomTask); + task.setGroup(PUBLISH_TASK_GROUP); + task.setOutputFile(new File(pomTask.getDestination().getParentFile(), "pom.properties")); + task.property("groupId", identity.getGroupId()); + task.property("artifactId", identity.getArtifactId()); + task.property("version", identity.getVersion()); + }); + + project.getTasks().withType(Jar.class).configureEach(jar -> { + jar.into("META-INF/maven/" + identity.getGroupId().get() + "/" + identity.getArtifactId().get(), spec -> { + spec.from(pomTask, pom -> pom.rename(".*", "pom.xml")); + spec.from(pomPropertiesTask); + }); + }); + }); + }); + }); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/JacocoConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/JacocoConvention.java new file mode 100644 index 0000000000..66f96ef814 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/conventions/JacocoConvention.java @@ -0,0 +1,28 @@ +package org.ehcache.build.conventions; + +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.tasks.testing.Test; +import org.gradle.testing.jacoco.plugins.JacocoPlugin; +import org.gradle.testing.jacoco.plugins.JacocoTaskExtension; +import org.gradle.testing.jacoco.tasks.JacocoReport; + +public class JacocoConvention implements Plugin { + + @Override + public void apply(Project project) { + project.getPlugins().apply(JacocoPlugin.class); + + project.getTasks().withType(JacocoReport.class).configureEach(jacocoReport -> { + jacocoReport.getReports().configureEach(report -> { + report.getRequired().set(false); + }); + }); + + project.getTasks().withType(Test.class).configureEach(test -> { + test.getExtensions().configure(JacocoTaskExtension.class, jacoco -> { + jacoco.getExcludes().add("org.terracotta.tripwire.*"); + }); + }); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/JavaBaseConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/JavaBaseConvention.java new file mode 100644 index 0000000000..83ce040a2c --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/conventions/JavaBaseConvention.java @@ -0,0 +1,131 @@ +package org.ehcache.build.conventions; + +import org.gradle.api.JavaVersion; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.java.archives.Attributes; +import org.gradle.api.plugins.JavaBasePlugin; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.tasks.bundling.Jar; +import org.gradle.api.tasks.compile.JavaCompile; +import org.gradle.api.tasks.javadoc.Javadoc; +import org.gradle.api.tasks.testing.Test; +import org.gradle.external.javadoc.CoreJavadocOptions; +import org.gradle.internal.jvm.JavaInfo; +import org.gradle.internal.jvm.Jvm; +import org.gradle.process.internal.ExecException; + +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.OutputStream; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static java.util.Arrays.asList; + +public class JavaBaseConvention implements Plugin { + @Override + public void apply(Project project) { + project.getPlugins().apply(JavaBasePlugin.class); + project.getPlugins().apply(BaseConvention.class); + + JavaInfo testJava = fetchTestJava(project); + project.getExtensions().getExtraProperties().set("testJava", testJava); + + project.getExtensions().configure(JavaPluginExtension.class, java -> { + java.setSourceCompatibility(JavaVersion.VERSION_1_8); + java.setTargetCompatibility(JavaVersion.VERSION_1_8); + }); + + project.getTasks().withType(Jar.class).configureEach(jar -> { + jar.manifest(manifest -> { + Attributes attributes = manifest.getAttributes(); + attributes.put("Implementation-Title", project.getName()); + attributes.put("Implementation-Vendor-Id", project.getGroup()); + attributes.put("Implementation-Version", project.getVersion()); + attributes.put("Implementation-Revision", getRevision(project)); + attributes.put("Built-By", System.getProperty("user.name")); + attributes.put("Built-JDK", System.getProperty("java.version")); + }); + jar.from(project.getRootProject().file("LICENSE")); + }); + + project.getTasks().withType(Test.class).configureEach(test -> { + test.setExecutable(testJava.getJavaExecutable()); + test.setMaxHeapSize("256m"); + test.setMaxParallelForks(16); + test.systemProperty("java.awt.headless", "true"); + }); + + project.getTasks().withType(JavaCompile.class).configureEach(compile -> { + compile.getOptions().setEncoding("UTF-8"); + compile.getOptions().setCompilerArgs(asList("-Werror", "-Xlint:all")); + }); + + project.getTasks().withType(Javadoc.class).configureEach(javadoc -> { + javadoc.setTitle(project.getName() + " " + project.getVersion() + " API"); + javadoc.exclude("**/internal/**"); + javadoc.getOptions().setEncoding("UTF-8"); + ((CoreJavadocOptions) javadoc.getOptions()).addStringOption("Xdoclint:none", "-quiet"); + }); + } + + private static JavaInfo fetchTestJava(Project project) { + Object testVM = project.findProperty("testVM"); + if (testVM == null) { + return Jvm.current(); + } else { + File jvmHome = project.file(testVM); + if (!jvmHome.exists() && project.hasProperty(testVM.toString())) { + testVM = project.property(testVM.toString()); + jvmHome = project.file(testVM); + } + + return jvmForHome(project, jvmHome); + } + } + + private static final Pattern VERSION_OUTPUT = Pattern.compile("\\w+ version \"(?.+)\""); + private static Jvm jvmForHome(Project project, File home) { + File java = Jvm.forHome(home).getJavaExecutable(); + + OutputStream stdout = new ByteArrayOutputStream(); + OutputStream stderr = new ByteArrayOutputStream(); + project.exec(spec -> { + spec.executable(java); + spec.args("-version"); + spec.setStandardOutput(stdout); + spec.setErrorOutput(stderr); + }); + String versionOutput = stderr.toString(); + Matcher matcher = VERSION_OUTPUT.matcher(versionOutput); + if (matcher.find()) { + return Jvm.discovered(home, null, JavaVersion.toVersion(matcher.group("version"))); + } else { + throw new IllegalArgumentException("Could not parse version of " + java + " from output:\n" + versionOutput); + } + } + + + private static Object getRevision(Project project) { + String envCommit = System.getenv("GIT_COMMIT"); + if(envCommit != null) { + return envCommit; + } else { + try { + ByteArrayOutputStream stdout = new ByteArrayOutputStream(); + ByteArrayOutputStream stderr = new ByteArrayOutputStream(); + project.exec(spec -> { + spec.executable("git"); + spec.args("rev-parse", "HEAD"); + spec.setStandardOutput(stdout); + spec.setErrorOutput(stderr); + }).assertNormalExitValue(); + + return stdout.toString().trim(); + } catch (ExecException e) { + return "Unknown"; + } + } + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/JavaConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/JavaConvention.java new file mode 100644 index 0000000000..5e50b677ae --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/conventions/JavaConvention.java @@ -0,0 +1,27 @@ +package org.ehcache.build.conventions; + +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.artifacts.dsl.DependencyHandler; +import org.gradle.api.plugins.JavaPlugin; + +public class JavaConvention implements Plugin { + @Override + public void apply(Project project) { + project.getPlugins().apply(JavaBaseConvention.class); + project.getPlugins().apply(JavaPlugin.class); + project.getPlugins().apply(CheckstyleConvention.class); + project.getPlugins().apply(JacocoConvention.class); + project.getPlugins().apply(SpotbugsConvention.class); + + DependencyHandler dependencies = project.getDependencies(); + dependencies.add(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME, "org.slf4j:slf4j-api:" + project.property("slf4jVersion")); + dependencies.add(JavaPlugin.TEST_RUNTIME_ONLY_CONFIGURATION_NAME, "org.slf4j:slf4j-simple:" + project.property("slf4jVersion")); + + dependencies.add(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME, "junit:junit:" + project.property("junitVersion")); + dependencies.add(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME, "org.assertj:assertj-core:" + project.property("assertjVersion")); + dependencies.add(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME, "org.hamcrest:hamcrest-library:" + project.property("hamcrestVersion")); + dependencies.add(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME, "org.mockito:mockito-core:" + project.property("mockitoVersion")); + dependencies.add(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME, "org.terracotta:terracotta-utilities-test-tools:" + project.property("terracottaUtilitiesVersion")); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/JavaLibraryConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/JavaLibraryConvention.java new file mode 100644 index 0000000000..77b15e4b37 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/conventions/JavaLibraryConvention.java @@ -0,0 +1,14 @@ +package org.ehcache.build.conventions; + +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.plugins.JavaLibraryPlugin; + +public class JavaLibraryConvention implements Plugin { + + @Override + public void apply(Project project) { + project.getPlugins().apply(JavaConvention.class); + project.getPlugins().apply(JavaLibraryPlugin.class); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/SpotbugsConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/SpotbugsConvention.java new file mode 100644 index 0000000000..9815385453 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/conventions/SpotbugsConvention.java @@ -0,0 +1,53 @@ +package org.ehcache.build.conventions; + +import com.github.spotbugs.snom.SpotBugsExtension; +import com.github.spotbugs.snom.SpotBugsPlugin; +import com.github.spotbugs.snom.SpotBugsTask; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.plugins.JavaBasePlugin; +import org.gradle.api.plugins.JavaPluginExtension; + +public class SpotbugsConvention implements Plugin { + + @Override + public void apply(Project project) { + project.getPlugins().apply(SpotBugsPlugin.class); + + SpotBugsExtension spotbugs = project.getExtensions().getByType(SpotBugsExtension.class); + + spotbugs.getIgnoreFailures().set(false); + // Later versions of Spotbugs have stupid heuristics for EI_EXPOSE_REP* + spotbugs.getToolVersion().set("4.2.3"); + + project.getPlugins().withType(JavaBasePlugin.class).configureEach(plugin -> { + + project.getExtensions().configure(JavaPluginExtension.class, java -> { + java.getSourceSets().configureEach(sourceSet -> { + project.getDependencies().add(sourceSet.getCompileOnlyConfigurationName(), + "com.github.spotbugs:spotbugs-annotations:" + spotbugs.getToolVersion().get()); + }); + + project.getTasks().withType(SpotBugsTask.class).configureEach(task -> { + if (task.getName().contains("Test")) { + task.setEnabled(false); + } else { + task.getReports().register("xml", report -> report.setEnabled(true)); + task.getReports().register("html", report -> report.setEnabled(false)); + } + }); + }); + + }); + + + project.getConfigurations().named("spotbugs", config -> { + config.getResolutionStrategy().dependencySubstitution(subs -> { + subs.substitute(subs.module("org.apache.commons:commons-lang3:3.11")) + .using(subs.module("org.apache.commons:commons-lang3:3.12.0")) + .because("Spotbugs has dependency divergences"); + }); + }); + + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/WarConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/WarConvention.java new file mode 100644 index 0000000000..ec469eda6a --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/conventions/WarConvention.java @@ -0,0 +1,13 @@ +package org.ehcache.build.conventions; + +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.plugins.WarPlugin; + +public class WarConvention implements Plugin { + @Override + public void apply(Project project) { + project.getPlugins().apply(WarPlugin.class); + project.getPlugins().apply(JavaConvention.class); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/plugins/PackagePlugin.java b/build-logic/src/main/java/org/ehcache/build/plugins/PackagePlugin.java new file mode 100644 index 0000000000..40070ea348 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/plugins/PackagePlugin.java @@ -0,0 +1,285 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.build.plugins; + +import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar; +import org.ehcache.build.conventions.BaseConvention; +import org.ehcache.build.conventions.BndConvention; +import org.ehcache.build.conventions.JavaBaseConvention; +import org.ehcache.build.util.OsgiManifestJarExtension; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.capabilities.Capability; +import org.gradle.api.component.AdhocComponentWithVariants; +import org.gradle.api.component.SoftwareComponentFactory; +import org.gradle.api.file.DuplicatesStrategy; +import org.gradle.api.file.FileCollection; +import org.gradle.api.file.FileTree; +import org.gradle.api.internal.project.ProjectInternal; +import org.gradle.api.plugins.BasePlugin; +import org.gradle.api.plugins.JavaBasePlugin; +import org.gradle.api.plugins.jvm.internal.JvmPluginServices; +import org.gradle.api.provider.Provider; +import org.gradle.api.publish.PublishingExtension; +import org.gradle.api.publish.maven.MavenPublication; +import org.gradle.api.publish.maven.plugins.MavenPublishPlugin; +import org.gradle.api.tasks.Sync; +import org.gradle.api.tasks.TaskProvider; +import org.gradle.api.tasks.bundling.Jar; +import org.gradle.api.tasks.bundling.Zip; +import org.gradle.api.tasks.javadoc.Javadoc; +import org.gradle.internal.jvm.Jvm; +import org.gradle.internal.resolve.ArtifactResolveException; +import org.gradle.internal.service.ServiceRegistry; +import org.gradle.language.base.plugins.LifecycleBasePlugin; + +import java.io.File; +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +import static java.lang.Integer.parseInt; +import static java.util.Collections.emptyList; +import static org.ehcache.build.plugins.VariantPlugin.COMMON_SOURCE_SET_NAME; +import static org.ehcache.build.util.PluginUtils.bucket; +import static org.ehcache.build.util.PluginUtils.createBucket; +import static org.ehcache.build.util.PluginUtils.capitalize; +import static org.gradle.api.attributes.DocsType.JAVADOC; +import static org.gradle.api.attributes.DocsType.SOURCES; +import static org.gradle.api.attributes.DocsType.USER_MANUAL; +import static org.gradle.api.internal.artifacts.JavaEcosystemSupport.configureDefaultTargetPlatform; +import static org.gradle.api.plugins.JavaPlugin.API_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.API_ELEMENTS_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.COMPILE_ONLY_API_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.JAVADOC_ELEMENTS_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.RUNTIME_ELEMENTS_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.RUNTIME_ONLY_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.SOURCES_ELEMENTS_CONFIGURATION_NAME; + +/** + * EhDistribute + */ +public class PackagePlugin implements Plugin { + + private static final String CONTENTS_CONFIGURATION_NAME = "contents"; + + @Override + public void apply(Project project) { + project.getPlugins().apply(BaseConvention.class); + project.getPlugins().apply(JavaBaseConvention.class); + + ServiceRegistry projectServices = ((ProjectInternal) project).getServices(); + JvmPluginServices jvmPluginServices = projectServices.get(JvmPluginServices.class); + SoftwareComponentFactory softwareComponentFactory = projectServices.get(SoftwareComponentFactory.class); + AdhocComponentWithVariants javaComponent = softwareComponentFactory.adhoc("java"); + project.getComponents().add(javaComponent); + + TaskProvider asciidocZip = project.getTasks().register("asciidocZip", Zip.class, zip -> { + zip.getArchiveClassifier().set("docs"); + zip.from(project.getTasks().getByPath(":docs:userDoc")); + }); + Configuration userdocElements = jvmPluginServices.createOutgoingElements("userdocElements", builder -> + builder.published().artifact(asciidocZip).providesAttributes(attributes -> attributes.documentation(USER_MANUAL))); + javaComponent.addVariantsFromConfiguration(userdocElements, variantDetails -> {}); + + createDefaultPackage(project); + + project.getPlugins().withType(VariantPlugin.class).configureEach(plugin -> { + Configuration commonContents = createBucket(project, CONTENTS_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME); + Configuration commonApi = createBucket(project, API_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME); + Configuration commonImplementation = createBucket(project, IMPLEMENTATION_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME).extendsFrom(commonApi); + Configuration commonCompileOnlyApi = createBucket(project, COMPILE_ONLY_API_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME); + Configuration commonRuntimeOnly = createBucket(project, RUNTIME_ONLY_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME); + + project.getConfigurations().named(CONTENTS_CONFIGURATION_NAME).configure(conf -> conf.extendsFrom(commonContents)); + project.getConfigurations().named(API_CONFIGURATION_NAME).configure(conf -> conf.extendsFrom(commonApi)); + project.getConfigurations().named(IMPLEMENTATION_CONFIGURATION_NAME).configure(conf -> conf.extendsFrom(commonImplementation)); + project.getConfigurations().named(COMPILE_ONLY_API_CONFIGURATION_NAME).configure(conf -> conf.extendsFrom(commonCompileOnlyApi)); + project.getConfigurations().named(RUNTIME_ONLY_CONFIGURATION_NAME).configure(conf -> conf.extendsFrom(commonRuntimeOnly)); + + project.getExtensions().configure(VariantPlugin.VariantExtension.class, variants -> { + variants.getVariants().configureEach(variant -> { + createPackage(project, variant.getName(), variant.getCapabilities().get()); + + bucket(project, CONTENTS_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonContents); + bucket(project, API_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonApi); + bucket(project, IMPLEMENTATION_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonImplementation); + bucket(project, COMPILE_ONLY_API_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonCompileOnlyApi); + bucket(project, RUNTIME_ONLY_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonRuntimeOnly); + }); + }); + }); + + project.getPlugins().withType(MavenPublishPlugin.class).configureEach(plugin -> { + project.getExtensions().configure(PublishingExtension.class, publishing -> { + publishing.getPublications().register("mavenJava", MavenPublication.class, mavenPublication -> { + mavenPublication.from(javaComponent); + }); + }); + }); + } + + private void createDefaultPackage(Project project) { + createPackage(project, null, emptyList()); + } + + private void createPackage(Project project, String variant, List capabilities) { + ServiceRegistry projectServices = ((ProjectInternal) project).getServices(); + JvmPluginServices jvmPluginServices = projectServices.get(JvmPluginServices.class); + + Configuration contents = createBucket(project, CONTENTS_CONFIGURATION_NAME, variant); + + Configuration contentsRuntimeElements = jvmPluginServices.createResolvableConfiguration(camelPrefix(variant, "contentsRuntimeElements"), builder -> + builder.extendsFrom(contents).requiresJavaLibrariesRuntime()); + + Configuration contentSourcesElements = jvmPluginServices.createResolvableConfiguration(camelPrefix(variant, "contentsSourcesElements"), builder -> + builder.extendsFrom(contents).requiresAttributes(refiner -> refiner.documentation(SOURCES))); + + TaskProvider shadowJar = project.getTasks().register(camelPrefix(variant, "jar"), ShadowJar.class, shadow -> { + shadow.setGroup(BasePlugin.BUILD_GROUP); + shadow.getArchiveClassifier().set(variant); + + shadow.setConfigurations(Collections.singletonList(contentsRuntimeElements)); + shadow.relocate("org.terracotta.statistics.", "org.ehcache.shadow.org.terracotta.statistics."); + shadow.relocate("org.terracotta.offheapstore.", "org.ehcache.shadow.org.terracotta.offheapstore."); + shadow.relocate("org.terracotta.context.", "org.ehcache.shadow.org.terracotta.context."); + shadow.relocate("org.terracotta.utilities.", "org.ehcache.shadow.org.terracotta.utilities."); + + shadow.mergeServiceFiles(); + + shadow.exclude("META-INF/MANIFEST.MF", "LICENSE", "NOTICE"); + + // LICENSE is included in root gradle build + shadow.from(new File(project.getRootDir(), "NOTICE")); + shadow.setDuplicatesStrategy(DuplicatesStrategy.EXCLUDE); + }); + + Provider sourcesTree = project.provider(() -> contentSourcesElements.getResolvedConfiguration().getLenientConfiguration().getAllModuleDependencies().stream().flatMap(d -> d.getModuleArtifacts().stream()) + .map(artifact -> { + try { + return Optional.of(artifact.getFile()); + } catch (ArtifactResolveException e) { + return Optional.empty(); + } + }).filter(Optional::isPresent).map(Optional::get).distinct().map(file -> { + if (file.isFile()) { + return project.zipTree(file); + } else { + return project.fileTree(file); + } + }).reduce(FileTree::plus).orElse(project.files().getAsFileTree())); + + TaskProvider sources = project.getTasks().register(camelPrefix(variant, "sources"), Sync.class, sync -> { + sync.dependsOn(contentSourcesElements); + sync.from(sourcesTree, spec -> spec.exclude("META-INF/**", "LICENSE", "NOTICE")); + sync.into(project.getLayout().getBuildDirectory().dir(camelPrefix(variant,"sources"))); + }); + + TaskProvider sourcesJar = project.getTasks().register(camelPrefix(variant, "sourcesJar"), Jar.class, jar -> { + jar.setGroup(BasePlugin.BUILD_GROUP); + jar.from(sources); + jar.from(shadowJar, spec -> spec.include("META-INF/**", "LICENSE", "NOTICE")); + jar.getArchiveClassifier().set(kebabPrefix(variant, "sources")); + }); + + TaskProvider javadoc = project.getTasks().register(camelPrefix(variant, "javadoc"), Javadoc.class, task -> { + task.setGroup(JavaBasePlugin.DOCUMENTATION_GROUP); + task.setTitle(project.getName() + " " + project.getVersion() + " API"); + task.source(sources); + task.include("*.java"); + task.setClasspath(contentsRuntimeElements); + task.setDestinationDir(new File(project.getBuildDir(), "docs/" + camelPrefix(variant, "javadoc"))); + }); + TaskProvider javadocJar = project.getTasks().register(camelPrefix(variant, "javadocJar"), Jar.class, jar -> { + jar.setGroup(BasePlugin.BUILD_GROUP); + jar.from(javadoc); + jar.getArchiveClassifier().set(kebabPrefix(variant, "javadoc")); + }); + + Configuration api = createBucket(project, API_CONFIGURATION_NAME, variant); + Configuration implementation = createBucket(project, IMPLEMENTATION_CONFIGURATION_NAME, variant).extendsFrom(api); + Configuration compileOnlyApi = createBucket(project, COMPILE_ONLY_API_CONFIGURATION_NAME, variant); + Configuration runtimeOnly = createBucket(project, RUNTIME_ONLY_CONFIGURATION_NAME, variant); + + Configuration apiElements = jvmPluginServices.createOutgoingElements(camelPrefix(variant, API_ELEMENTS_CONFIGURATION_NAME), builder -> + builder.extendsFrom(api, compileOnlyApi).published().providesApi().withCapabilities(capabilities).artifact(shadowJar)); + configureDefaultTargetPlatform(apiElements, parseInt(Jvm.current().getJavaVersion().getMajorVersion())); + Configuration compileClasspath = jvmPluginServices.createResolvableConfiguration(camelPrefix(variant, COMPILE_CLASSPATH_CONFIGURATION_NAME), builder -> + builder.extendsFrom(apiElements).requiresJavaLibrariesRuntime()); + Configuration runtimeElements = jvmPluginServices.createOutgoingElements(camelPrefix(variant, RUNTIME_ELEMENTS_CONFIGURATION_NAME), builder -> + builder.extendsFrom(implementation, runtimeOnly).published().providesRuntime().withCapabilities(capabilities).artifact(shadowJar)); + configureDefaultTargetPlatform(runtimeElements, parseInt(Jvm.current().getJavaVersion().getMajorVersion())); + Configuration runtimeClasspath = jvmPluginServices.createResolvableConfiguration(camelPrefix(variant, RUNTIME_CLASSPATH_CONFIGURATION_NAME), builder -> + builder.extendsFrom(runtimeElements).requiresJavaLibrariesRuntime()); + + Configuration sourcesElements = jvmPluginServices.createOutgoingElements(camelPrefix(variant, SOURCES_ELEMENTS_CONFIGURATION_NAME), builder -> + builder.published().artifact(sourcesJar).withCapabilities(capabilities).providesAttributes(attributes -> attributes.documentation(SOURCES).asJar())); + Configuration javadocElements = jvmPluginServices.createOutgoingElements(camelPrefix(variant, JAVADOC_ELEMENTS_CONFIGURATION_NAME), builder -> + builder.published().artifact(javadocJar).withCapabilities(capabilities).providesAttributes(attributes -> attributes.documentation(JAVADOC).asJar())); + + shadowJar.configure(shadow -> { + OsgiManifestJarExtension osgiExtension = new OsgiManifestJarExtension(shadow); + osgiExtension.getClasspath().from(runtimeClasspath); + osgiExtension.getSources().from(sources); + BndConvention.bundleDefaults(project).execute(osgiExtension.getInstructions()); + }); + + project.getComponents().named("java", AdhocComponentWithVariants.class, java -> { + java.addVariantsFromConfiguration(apiElements, variantDetails -> { + variantDetails.mapToMavenScope("compile"); + if (variant != null) { + variantDetails.mapToOptional(); + } + }); + java.addVariantsFromConfiguration(runtimeElements, variantDetails -> { + variantDetails.mapToMavenScope("runtime"); + if (variant != null) { + variantDetails.mapToOptional(); + } + }); + java.addVariantsFromConfiguration(sourcesElements, variantDetails -> {}); + java.addVariantsFromConfiguration(javadocElements, variantDetails -> {}); + }); + + + project.getTasks().named(LifecycleBasePlugin.ASSEMBLE_TASK_NAME).configure(task -> { + task.dependsOn(shadowJar); + task.dependsOn(javadocJar); + task.dependsOn(sourcesJar); + }); + } + + private static String camelPrefix(String variant, String thing) { + if (variant == null) { + return thing; + } else { + return variant + capitalize(thing); + } + } + + private static String kebabPrefix(String variant, String thing) { + if (variant == null) { + return thing; + } else { + return variant + "-" + thing; + } + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/plugins/VariantPlugin.java b/build-logic/src/main/java/org/ehcache/build/plugins/VariantPlugin.java new file mode 100644 index 0000000000..8cc14e802b --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/plugins/VariantPlugin.java @@ -0,0 +1,228 @@ +package org.ehcache.build.plugins; + +import aQute.bnd.gradle.BndBuilderPlugin; +import aQute.bnd.gradle.BundleTaskExtension; +import org.ehcache.build.conventions.BndConvention; +import org.ehcache.build.util.PluginUtils; +import org.gradle.api.Action; +import org.gradle.api.NamedDomainObjectContainer; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.capabilities.Capability; +import org.gradle.api.file.Directory; +import org.gradle.api.file.SourceDirectorySet; +import org.gradle.api.internal.HasConvention; +import org.gradle.api.internal.artifacts.dsl.CapabilityNotationParserFactory; +import org.gradle.api.internal.project.ProjectInternal; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.plugins.jvm.internal.JvmPluginServices; +import org.gradle.api.provider.ListProperty; +import org.gradle.api.provider.Property; +import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.Sync; +import org.gradle.api.tasks.TaskProvider; +import org.gradle.api.tasks.bundling.Jar; +import org.gradle.internal.typeconversion.NotationParser; +import org.gradle.language.base.plugins.LifecycleBasePlugin; +import org.unbrokendome.gradle.plugins.xjc.XjcPlugin; +import org.unbrokendome.gradle.plugins.xjc.XjcSourceSetConvention; + +import java.util.function.Function; + +import static java.util.Objects.requireNonNull; +import static org.ehcache.build.util.PluginUtils.capitalize; +import static org.gradle.api.attributes.DocsType.SOURCES; +import static org.gradle.api.plugins.JavaPlugin.API_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.API_ELEMENTS_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.COMPILE_ONLY_API_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.RUNTIME_ELEMENTS_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.RUNTIME_ONLY_CONFIGURATION_NAME; +import static org.gradle.api.plugins.JavaPlugin.SOURCES_ELEMENTS_CONFIGURATION_NAME; +import static org.gradle.api.tasks.SourceSet.MAIN_SOURCE_SET_NAME; + +public class VariantPlugin implements Plugin { + + protected static final String COMMON_SOURCE_SET_NAME = "common"; + + @Override + public void apply(Project project) { + VariantExtension variants = project.getExtensions().create("variants", VariantExtension.class, project); + configureJavaPluginBehavior(project, variants); + } + + private void configureJavaPluginBehavior(Project project, VariantExtension variants) { + project.getPlugins().withType(JavaPlugin.class).configureEach(javaPlugin -> { + JavaPluginExtension java = project.getExtensions().getByType(JavaPluginExtension.class); + + variants.getVariants().configureEach(variant -> { + if (variant.hasSources().get()) { + SourceSet commonSources = java.getSourceSets().findByName(COMMON_SOURCE_SET_NAME); + if (commonSources == null) { + commonSources = java.getSourceSets().create(COMMON_SOURCE_SET_NAME, common -> { + project.getTasks().named(common.getCompileJavaTaskName(), task -> task.setEnabled(false)); + project.getTasks().named(common.getClassesTaskName(), task -> task.setEnabled(false)); + linkToCommonSource(project, common, java.getSourceSets().getByName(MAIN_SOURCE_SET_NAME)); + }); + } + SourceSet variantSources = java.getSourceSets().create(variant.getName()); + + linkToCommonSource(project, commonSources, variantSources); + + java.registerFeature(variant.getName(), feature -> { + feature.usingSourceSet(variantSources); + feature.withSourcesJar(); + variant.getCapabilities().get().forEach(capability -> { + feature.capability(capability.getGroup(), capability.getName(), requireNonNull(capability.getVersion())); + }); + }); + + project.getPlugins().withType(BndBuilderPlugin.class).configureEach(bnd -> { + project.getTasks().named(variantSources.getJarTaskName(), Jar.class, jar -> { + jar.setDescription("Assembles a bundle containing the " + variant + " variant classes."); + BundleTaskExtension extension = jar.getExtensions().create(BundleTaskExtension.NAME, BundleTaskExtension.class, jar); + BndConvention.configureBundleDefaults(project, extension); + jar.doLast("buildBundle", extension.buildAction()); + }); + }); + + project.getTasks().named(LifecycleBasePlugin.ASSEMBLE_TASK_NAME).configure(task -> { + task.dependsOn(variantSources.getJarTaskName()); + task.dependsOn(variantSources.getSourcesJarTaskName()); + }); + } else { + SourceSet mainSource = java.getSourceSets().getByName(MAIN_SOURCE_SET_NAME); + + JvmPluginServices jvmPluginServices = ((ProjectInternal) project).getServices().get(JvmPluginServices.class); + + Configuration commonApi = PluginUtils.createBucket(project, API_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME); + project.getConfigurations().named(mainSource.getApiConfigurationName()).configure(config -> config.extendsFrom(commonApi)); + Configuration commonCompileOnlyApi = PluginUtils.createBucket(project, COMPILE_ONLY_API_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME); + project.getConfigurations().named(mainSource.getCompileOnlyApiConfigurationName()).configure(config -> config.extendsFrom(commonCompileOnlyApi)); + Configuration commonImplementation = PluginUtils.createBucket(project, IMPLEMENTATION_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME); + project.getConfigurations().named(mainSource.getImplementationConfigurationName()).configure(config -> config.extendsFrom(commonImplementation)); + Configuration commonRuntimeOnly = PluginUtils.createBucket(project, RUNTIME_ONLY_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME); + project.getConfigurations().named(mainSource.getRuntimeOnlyConfigurationName()).configure(config -> config.extendsFrom(commonRuntimeOnly)); + + Configuration api = PluginUtils.createBucket(project, API_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonApi); + Configuration implementation = PluginUtils.createBucket(project, IMPLEMENTATION_CONFIGURATION_NAME, variant.getName()).extendsFrom(api, commonImplementation); + Configuration compileOnlyApi = PluginUtils.createBucket(project, COMPILE_ONLY_API_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonCompileOnlyApi); + Configuration runtimeOnly = PluginUtils.createBucket(project, RUNTIME_ONLY_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonRuntimeOnly); + + Configuration apiElements = jvmPluginServices.createOutgoingElements(variant.getName() + capitalize(API_ELEMENTS_CONFIGURATION_NAME), builder -> + builder.fromSourceSet(mainSource).withCapabilities(variant.getCapabilities().get()) + .extendsFrom(api, compileOnlyApi).withClassDirectoryVariant().providesApi()); + project.getConfigurations().named(mainSource.getApiElementsConfigurationName(), + config -> config.getOutgoing().getArtifacts().configureEach(artifact -> apiElements.getOutgoing().getArtifacts().add(artifact))); + + Configuration runtimeElements = jvmPluginServices.createOutgoingElements(variant.getName() + capitalize(RUNTIME_ELEMENTS_CONFIGURATION_NAME), builder -> + builder.fromSourceSet(mainSource).withCapabilities(variant.getCapabilities().get()).published() + .extendsFrom(implementation, runtimeOnly).providesRuntime()); + project.getConfigurations().named(mainSource.getRuntimeElementsConfigurationName(), + config -> config.getOutgoing().getArtifacts().configureEach(artifact -> runtimeElements.getOutgoing().getArtifacts().add(artifact))); + + Configuration sourcesElements = jvmPluginServices.createOutgoingElements(variant.getName() + capitalize(SOURCES_ELEMENTS_CONFIGURATION_NAME), builder -> + builder.fromSourceSet(mainSource).withCapabilities(variant.getCapabilities().get()).published() + .providesAttributes(attributes -> attributes.documentation(SOURCES).asJar())); + project.getConfigurations().named(mainSource.getSourcesElementsConfigurationName(), + config -> config.getOutgoing().getArtifacts().configureEach(artifact -> sourcesElements.getOutgoing().getArtifacts().add(artifact))); + } + }); + }); + } + + private static void linkToCommonSource(Project project, SourceSet commonSources, SourceSet derivedSources) { + registerCommonCopyTask(project, commonSources, derivedSources, SourceSet::getJava); + registerCommonCopyTask(project, commonSources, derivedSources, SourceSet::getResources); + + Configuration commonApi = project.getConfigurations().maybeCreate(commonSources.getApiConfigurationName()); + project.getConfigurations().maybeCreate(derivedSources.getApiConfigurationName()).extendsFrom(commonApi); + Configuration commonImplementation = project.getConfigurations().maybeCreate(commonSources.getImplementationConfigurationName()); + project.getConfigurations().maybeCreate(derivedSources.getImplementationConfigurationName()).extendsFrom(commonImplementation); + + project.getPlugins().withType(XjcPlugin.class).configureEach(plugin -> { + Function xjc = sourceSet -> ((HasConvention) sourceSet).getConvention().getPlugin(XjcSourceSetConvention.class); + + XjcSourceSetConvention commonXjc = xjc.apply(commonSources); + project.getTasks().named(commonXjc.getXjcGenerateTaskName(), task -> task.setEnabled(false)); + + registerCommonCopyTask(project, commonSources, derivedSources, xjc.andThen(XjcSourceSetConvention::getXjcSchema)); + registerCommonCopyTask(project, commonSources, derivedSources, xjc.andThen(XjcSourceSetConvention::getXjcCatalog)); + registerCommonCopyTask(project, commonSources, derivedSources, xjc.andThen(XjcSourceSetConvention::getXjcBinding)); + registerCommonCopyTask(project, commonSources, derivedSources, xjc.andThen(XjcSourceSetConvention::getXjcUrl)); + }); + } + + private static void registerCommonCopyTask(Project project, SourceSet common, SourceSet variant, Function type) { + SourceDirectorySet commonSource = type.apply(common); + Provider variantLocation = project.getLayout().getBuildDirectory().dir("generated/sources/common/" + variant.getName() + "/" + commonSource.getName()); + TaskProvider variantTask = project.getTasks().register(variant.getTaskName("copyCommon", commonSource.getName()), Sync.class, sync -> { + sync.from(commonSource); + sync.into(variantLocation); + }); + type.apply(variant).srcDir(variantTask); + } + + public static class Variant { + + private static final NotationParser CAPABILITY_NOTATION_PARSER = new CapabilityNotationParserFactory(true).create(); + + private final String name; + private final Property hasSources; + private final ListProperty capabilities; + + public Variant(String name, ObjectFactory objectFactory) { + this.name = name; + this.hasSources = objectFactory.property(Boolean.class).convention(false); + this.capabilities = objectFactory.listProperty(Capability.class); + + this.hasSources.finalizeValueOnRead(); + this.capabilities.finalizeValueOnRead(); + } + + public String getName() { + return name; + } + + public Property hasSources() { + return hasSources; + } + + public ListProperty getCapabilities() { + return capabilities; + } + + public void withSeparateSource() { + this.hasSources.set(true); + } + + public void capability(Object notation) { + this.capabilities.add(CAPABILITY_NOTATION_PARSER.parseNotation(notation)); + } + } + + public static class VariantExtension { + + private final ObjectFactory objectFactory; + private final NamedDomainObjectContainer variants; + + public VariantExtension(Project project) { + this.objectFactory = project.getObjects(); + this.variants = project.container(Variant.class); + } + + public void variant(String variant, Action action) { + Variant v = new Variant(variant, objectFactory); + action.execute(v); + variants.add(v); + } + + public NamedDomainObjectContainer getVariants() { + return variants; + } + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/plugins/VoltronPlugin.java b/build-logic/src/main/java/org/ehcache/build/plugins/VoltronPlugin.java new file mode 100644 index 0000000000..db5299198f --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/plugins/VoltronPlugin.java @@ -0,0 +1,66 @@ +package org.ehcache.build.plugins; + +import org.ehcache.build.conventions.JavaLibraryConvention; +import org.gradle.api.Action; +import org.gradle.api.NamedDomainObjectProvider; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.Task; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.dsl.DependencyHandler; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.tasks.bundling.Jar; + +import java.io.File; +import java.util.jar.Attributes; +import java.util.stream.Collectors; + +import static java.util.Collections.singletonMap; + +public class VoltronPlugin implements Plugin { + + private static final String VOLTRON_CONFIGURATION_NAME = "voltron"; + private static final String SERVICE_CONFIGURATION_NAME = "service"; + + @Override + public void apply(Project project) { + project.getPlugins().apply(JavaLibraryConvention.class); + + NamedDomainObjectProvider voltron = project.getConfigurations().register(VOLTRON_CONFIGURATION_NAME, config -> { + config.setDescription("Dependencies provided by Voltron from server/lib"); + config.setCanBeConsumed(true); + config.setCanBeResolved(true); + + DependencyHandler dependencyHandler = project.getDependencies(); + String terracottaApisVersion = project.property("terracottaApisVersion").toString(); + String slf4jVersion = project.property("slf4jVersion").toString(); + config.getDependencies().add(dependencyHandler.create("org.terracotta:entity-server-api:" + terracottaApisVersion)); + config.getDependencies().add(dependencyHandler.create("org.terracotta:standard-cluster-services:" + terracottaApisVersion)); + config.getDependencies().add(dependencyHandler.create("org.terracotta:packaging-support:" + terracottaApisVersion)); + config.getDependencies().add(dependencyHandler.create("org.slf4j:slf4j-api:" + slf4jVersion)); + }); + + NamedDomainObjectProvider service = project.getConfigurations().register(SERVICE_CONFIGURATION_NAME, config -> { + config.setDescription("Services consumed by this plugin"); + config.setCanBeResolved(true); + config.setCanBeConsumed(true); + }); + + project.getConfigurations().named(JavaPlugin.API_CONFIGURATION_NAME, config -> { + config.extendsFrom(voltron.get()); + config.extendsFrom(service.get()); + }); + + project.getTasks().named(JavaPlugin.JAR_TASK_NAME, Jar.class, jar -> { + //noinspection Convert2Lambda + jar.doFirst(new Action() { + @Override + public void execute(Task task) { + jar.manifest(manifest -> manifest.attributes(singletonMap(Attributes.Name.CLASS_PATH.toString(), + (project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME).minus(voltron.get()).minus(service.get())) + .getFiles().stream().map(File::getName).collect(Collectors.joining(" "))))); + } + }); + }); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/plugins/osgids/GenerateDeclarativeServicesDescriptors.java b/build-logic/src/main/java/org/ehcache/build/plugins/osgids/GenerateDeclarativeServicesDescriptors.java new file mode 100644 index 0000000000..c285f97550 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/plugins/osgids/GenerateDeclarativeServicesDescriptors.java @@ -0,0 +1,113 @@ +package org.ehcache.build.plugins.osgids; + +import org.apache.felix.scrplugin.Options; +import org.apache.felix.scrplugin.Project; +import org.apache.felix.scrplugin.SCRDescriptorException; +import org.apache.felix.scrplugin.SCRDescriptorFailureException; +import org.apache.felix.scrplugin.SCRDescriptorGenerator; +import org.apache.felix.scrplugin.Source; +import org.gradle.api.DefaultTask; +import org.gradle.api.GradleException; +import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.file.DirectoryProperty; +import org.gradle.api.file.EmptyFileVisitor; +import org.gradle.api.file.FileCollection; +import org.gradle.api.file.FileVisitDetails; +import org.gradle.api.tasks.Classpath; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.OutputDirectory; +import org.gradle.api.tasks.TaskAction; + +import java.io.Closeable; +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Set; + +public abstract class GenerateDeclarativeServicesDescriptors extends DefaultTask { + + @InputFiles + public abstract ConfigurableFileCollection getInputFiles(); + + @Classpath + public abstract ConfigurableFileCollection getClasspath(); + + @OutputDirectory + public abstract DirectoryProperty getOutputDirectory(); + + @TaskAction + public void generateDeclarativeServicesDescriptors() throws SCRDescriptorException, SCRDescriptorFailureException, IOException { + final Options scrOptions = createOptions(); + + try (GradleScrProject scrProject = new GradleScrProject(getInputFiles(), getClasspath())) { + final SCRDescriptorGenerator scrGenerator = new SCRDescriptorGenerator(new ScrLoggerAdapter(getLogger())); + scrGenerator.setOptions(scrOptions); + scrGenerator.setProject(scrProject); + + scrGenerator.execute(); + } + } + + private Options createOptions() { + final Options scrOptions = new Options(); + scrOptions.setOutputDirectory(getOutputDirectory().get().getAsFile()); + scrOptions.setStrictMode(false); + scrOptions.setSpecVersion(null); + + return scrOptions; + } + + static class GradleScrProject extends Project implements Closeable { + + private final URLClassLoader urlClassLoader; + + GradleScrProject(FileCollection input, FileCollection classpath) { + Set classpathFiles = classpath.getFiles(); + URL[] classpathUrls = classpathFiles.stream().map(f -> { + try { + return f.toURI().toURL(); + } catch (MalformedURLException e) { + throw new GradleException("Malformed URL in classpath", e); + } + }).toArray(URL[]::new); + this.urlClassLoader = URLClassLoader.newInstance(classpathUrls, getClass().getClassLoader()); + setClassLoader(urlClassLoader); + setDependencies(classpathFiles); + setSources(createScrSources(input)); + } + + @Override + public void close() throws IOException { + urlClassLoader.close(); + } + + private static Collection createScrSources(FileCollection input) { + Collection sources = new ArrayList<>(); + + input.getAsFileTree().matching(f -> f.include("**/*.class")).visit(new EmptyFileVisitor() { + @Override + public void visitFile(FileVisitDetails fileVisitDetails) { + String dotSeparated = String.join(".", fileVisitDetails.getRelativePath().getSegments()); + String className = dotSeparated.substring(0, dotSeparated.length() - ".class".length()); + File file = fileVisitDetails.getFile(); + sources.add(new Source() { + @Override + public String getClassName() { + return className; + } + + @Override + public File getFile() { + return file; + } + }); + } + }); + return sources; + } + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/plugins/osgids/OsgiDsPlugin.java b/build-logic/src/main/java/org/ehcache/build/plugins/osgids/OsgiDsPlugin.java new file mode 100644 index 0000000000..a844ab0af9 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/plugins/osgids/OsgiDsPlugin.java @@ -0,0 +1,22 @@ +package org.ehcache.build.plugins.osgids; + +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.tasks.SourceSetContainer; +import org.gradle.api.tasks.TaskProvider; + +public class OsgiDsPlugin implements Plugin { + @Override + public void apply(Project project) { + project.getExtensions().configure(SourceSetContainer.class, sourceSets -> sourceSets.configureEach(sourceSet -> { + String taskName = sourceSet.getTaskName("generate", "DeclarativeServicesDescriptors"); + TaskProvider generateTask = project.getTasks().register(taskName, GenerateDeclarativeServicesDescriptors.class, task -> { + task.setDescription("Generate OSGi Declarative Services XML descriptors for " + sourceSet.getName() + " classes"); + task.getInputFiles().from(sourceSet.getOutput().getClassesDirs()); + task.getClasspath().from(sourceSet.getRuntimeClasspath()); + task.getOutputDirectory().set(project.getLayout().getBuildDirectory().dir("generated/resources/osgi-ds/" + sourceSet.getName())); + }); + sourceSet.getOutput().getGeneratedSourcesDirs().plus(project.fileTree(generateTask)); + })); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/plugins/osgids/ScrLoggerAdapter.java b/build-logic/src/main/java/org/ehcache/build/plugins/osgids/ScrLoggerAdapter.java new file mode 100644 index 0000000000..279b26714f --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/plugins/osgids/ScrLoggerAdapter.java @@ -0,0 +1,149 @@ +/** + * Copyright (C) 2016 Elmar Schug + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.build.plugins.osgids; + +import org.apache.felix.scrplugin.Log; +import org.gradle.api.logging.Logger; + + +final class ScrLoggerAdapter implements Log +{ + private final Logger logger; + + ScrLoggerAdapter(Logger logger) { + this.logger = logger; + } + + @Override + public boolean isDebugEnabled() + { + return logger.isDebugEnabled(); + } + + @Override + public void debug(String content) + { + logger.debug(content); + } + + @Override + public void debug(String content, Throwable error) + { + logger.debug(content, error); + } + + @Override + public void debug(Throwable error) + { + logger.debug(error.toString()); + } + + @Override + public boolean isInfoEnabled() + { + return logger.isInfoEnabled(); + } + + @Override + public void info(String content) + { + logger.info(content); + } + + @Override + public void info(String content, Throwable error) + { + logger.info(content, error); + } + + @Override + public void info(Throwable error) + { + logger.info(error.toString()); + } + + @Override + public boolean isWarnEnabled() + { + return logger.isWarnEnabled(); + } + + @Override + public void warn(String content) + { + logger.warn(content); + } + + @Override + public void warn(String content, String location, int lineNumber) + { + logger.warn("{} [{},{}]", content, location, lineNumber); + } + + @Override + public void warn(String content, String location, int lineNumber, int columNumber) + { + logger.warn("{} [{},{}:{}]", content, location, lineNumber, columNumber); + } + + @Override + public void warn(String content, Throwable error) + { + logger.warn(content, error); + } + + @Override + public void warn(Throwable error) + { + logger.warn(error.toString()); + } + + @Override + public boolean isErrorEnabled() + { + return logger.isErrorEnabled(); + } + + @Override + public void error(String content) + { + logger.error(content); + } + + @Override + public void error(String content, String location, int lineNumber) + { + logger.error("{} [{},}{}]", content, location, lineNumber); + } + + @Override + public void error(String content, String location, int lineNumber, int columnNumber) + { + logger.error("{} [{},{}:{}]", content, location, lineNumber, columnNumber); + } + + @Override + public void error(String content, Throwable error) + { + logger.error(content, error); + } + + @Override + public void error(Throwable error) + { + logger.error(error.toString()); + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/util/OsgiManifestJarExtension.java b/build-logic/src/main/java/org/ehcache/build/util/OsgiManifestJarExtension.java new file mode 100644 index 0000000000..d2e8556173 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/util/OsgiManifestJarExtension.java @@ -0,0 +1,103 @@ +package org.ehcache.build.util; + +import aQute.bnd.osgi.Builder; +import aQute.bnd.osgi.Jar; +import aQute.service.reporter.Report; +import org.gradle.api.Action; +import org.gradle.api.GradleException; +import org.gradle.api.Task; +import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.provider.MapProperty; +import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.Classpath; +import org.gradle.api.tasks.ClasspathNormalizer; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputFiles; + +import java.io.File; +import java.util.Map; +import java.util.concurrent.Callable; + +public class OsgiManifestJarExtension { + + private final org.gradle.api.tasks.bundling.Jar jarTask; + private final MapProperty instructions; + private final ConfigurableFileCollection classpath; + private final ConfigurableFileCollection sources; + + public OsgiManifestJarExtension(org.gradle.api.tasks.bundling.Jar jarTask) { + this.jarTask = jarTask; + this.instructions = jarTask.getProject().getObjects().mapProperty(String.class, String.class); + this.classpath = jarTask.getProject().getObjects().fileCollection(); + this.sources = jarTask.getProject().getObjects().fileCollection(); + + jarTask.getInputs().files(classpath).withNormalizer(ClasspathNormalizer.class).withPropertyName("osgi.classpath"); + jarTask.getInputs().files(sources).withPropertyName("osgi.sources"); + jarTask.getInputs().property("osgi.instructions", (Callable>) instructions::get); + + jarTask.getExtensions().add("osgi", this); + jarTask.doLast("buildManifest", new BuildAction()); + } + + public void instruction(String key, String value) { + instructions.put(key, value); + } + + public void instruction(String key, Provider value) { + instructions.put(key, value); + } + + @Input @Classpath + public ConfigurableFileCollection getClasspath() { + return classpath; + } + + @InputFiles + public ConfigurableFileCollection getSources() { + return sources; + } + + @Input + public MapProperty getInstructions() { + return instructions; + } + + + private class BuildAction implements Action { + @Override + public void execute(Task t) { + try (Builder builder = new Builder()) { + File archiveFile = jarTask.getArchiveFile().get().getAsFile(); + + jarTask.getProject().sync(sync -> sync.from(archiveFile).into(jarTask.getTemporaryDir())); + File archiveCopyFile = new File(jarTask.getTemporaryDir(), archiveFile.getName()); + + Jar bundleJar = new Jar(archiveCopyFile); + + builder.setJar(bundleJar); + builder.setClasspath(getClasspath().getFiles()); + builder.setSourcepath(getSources().getFiles().toArray(new File[0])); + builder.addProperties(getInstructions().get()); + + try (Jar builtJar = builder.build()) { + builtJar.write(archiveFile); + } + + if (!builder.isOk()) { + jarTask.getProject().delete(archiveFile); + builder.getErrors().forEach((String msg) -> { + Report.Location location = builder.getLocation(msg); + if ((location != null) && (location.file != null)) { + jarTask.getLogger().error("{}:{}: error: {}", location.file, location.line, msg); + } else { + jarTask.getLogger().error("error : {}", msg); + } + }); + throw new GradleException("Bundle " + archiveFile.getName() + " has errors"); + } + } catch (Exception e) { + throw new GradleException("Error building bundle", e); + } + } + } +} diff --git a/build-logic/src/main/java/org/ehcache/build/util/PluginUtils.java b/build-logic/src/main/java/org/ehcache/build/util/PluginUtils.java new file mode 100644 index 0000000000..619b6413f3 --- /dev/null +++ b/build-logic/src/main/java/org/ehcache/build/util/PluginUtils.java @@ -0,0 +1,48 @@ +package org.ehcache.build.util; + +import org.gradle.api.Project; +import org.gradle.api.artifacts.Configuration; + +import java.util.Locale; + +public class PluginUtils { + + public static Configuration createBucket(Project project, String kind, String variant) { + if (variant == null) { + return createBucket(project, kind); + } else { + Configuration configuration = project.getConfigurations().maybeCreate(variant + capitalize(kind)); + configuration.setDescription(capitalize(kind) + " dependencies for " + variant); + configuration.setVisible(false); + configuration.setCanBeResolved(false); + configuration.setCanBeConsumed(false); + return configuration; + } + } + + public static Configuration createBucket(Project project, String kind) { + Configuration configuration = project.getConfigurations().maybeCreate(kind); + configuration.setDescription(capitalize(kind) + " dependencies"); + configuration.setVisible(false); + configuration.setCanBeResolved(false); + configuration.setCanBeConsumed(false); + return configuration; + } + + public static Configuration bucket(Project project, String kind, String variant) { + if (variant == null) { + return bucket(project, kind); + } else { + return project.getConfigurations().getByName(variant + capitalize(kind)); + } + } + + public static Configuration bucket(Project project, String kind) { + return project.getConfigurations().getByName(kind); + } + + public static String capitalize(String word) { + return word.substring(0, 1).toUpperCase(Locale.ROOT) + word.substring(1); + } + +} diff --git a/build.gradle b/build.gradle index b094bd8526..b107ffb59d 100644 --- a/build.gradle +++ b/build.gradle @@ -13,16 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import scripts.* -import org.gradle.internal.jvm.Jvm plugins { // This adds tasks to auto close or release nexus staging repos // see https://github.com/Codearte/gradle-nexus-staging-plugin/ - id 'io.codearte.nexus-staging' version '0.9.0' - // This adds the ability to print a taskTree - // ./gradlew ... taskTree - id "com.dorongold.task-tree" version "1.3" + id 'io.codearte.nexus-staging' + //OWASP Security Vulnerability Detection + id 'org.owasp.dependencycheck' +} + +wrapper { + distributionType = Wrapper.DistributionType.ALL +} + +allprojects { + version = findProperty('overrideVersion') ?: ehcacheVersion } if (deployUrl.contains('nexus')) { @@ -45,178 +50,25 @@ if (deployUrl.contains('nexus')) { } } -project.nexusStaging { +nexusStaging { username = project.ext.deployUser password = project.ext.deployPwd logger.debug("Nexus Staging: Using login ${username} and url ${serverUrl}") } -// Disable automatic promotion for added safety -closeAndReleaseRepository.enabled = false - - -ext { - - baseVersion = findProperty('overrideVersion') ?: '3.5.0-SNAPSHOT' - - utils = new Utils(baseVersion, logger) - isReleaseVersion = !baseVersion.endsWith('SNAPSHOT') - isCloudbees = System.getenv('JENKINS_URL')?.contains('cloudbees') +tasks.named('closeAndReleaseRepository') { + // Disable automatic promotion for added safety + enabled = false; } - assert (JavaVersion.current().isJava8Compatible()) : 'The Ehcache 3 build requires Java 8+ to run' -ext { - testJava = Jvm.current() -} - -if (hasProperty('testVM')) { - testJava = Utils.jvmForHome(new File(testVM)) - println "Using Test JVM $testJava [Version: $testJava.javaVersion.majorVersion]" -} - -subprojects { - apply plugin: 'java' - apply plugin: 'eclipse' - apply plugin: 'checkstyle' - apply plugin: 'findbugs' - apply plugin: 'jacoco' - - group = 'org.ehcache.modules' - version = baseVersion - - archivesBaseName = "ehcache-${project.name}" - - sourceCompatibility = 1.8 - targetCompatibility = 1.8 - - repositories { - if (project.hasProperty('mvnlocal')) { - mavenLocal() - } - mavenCentral() - maven { url "http://repo.terracotta.org/maven2" } - } - - sourceSets { - slowTest { - java.srcDir 'src/slow-test/java' - resources.srcDir 'src/slow-test/resources' - compileClasspath += sourceSets.test.compileClasspath - runtimeClasspath += sourceSets.test.runtimeClasspath - } - } - - dependencies { - compileOnly "com.google.code.findbugs:annotations:$parent.findbugsVersion" - testCompileOnly "com.google.code.findbugs:annotations:$parent.findbugsVersion" - testCompile "junit:junit:$junitVersion", "org.assertj:assertj-core:$assertjVersion", "org.hamcrest:hamcrest-library:$hamcrestVersion" - testCompile("org.mockito:mockito-core:$mockitoVersion") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - testRuntime "org.slf4j:slf4j-simple:$parent.slf4jVersion" - } - - jar { - utils.fillManifest(manifest,"ehcache-${project.name}") - from "$rootDir/LICENSE" - } - - test { - maxHeapSize = "1408m" - systemProperty 'java.awt.headless', 'true' - if (parent.isCloudbees) { - systemProperty 'disable.concurrent.tests', 'true' - } - } - - task slowTest(type: Test) { - testClassesDirs = sourceSets.slowTest.output.classesDirs - classpath += sourceSets.slowTest.runtimeClasspath - - binResultsDir file("$buildDir/slow-tests-results/binary/$name") - reports.junitXml.destination = file("$buildDir/slow-tests-results") - reports.html.destination = file("$buildDir/reports/slow-tests") - } - - task sourceJar(type: Jar, dependsOn: classes) { - from sourceSets.main.allJava - classifier = 'sources' - } - - javadoc { - title "$project.archivesBaseName $project.version API" - exclude '**/internal/**' - } - - task javadocJar(type: Jar, dependsOn: javadoc) { - from javadoc.destinationDir - classifier = 'javadoc' - } - - artifacts { - archives jar - archives javadocJar - archives sourceJar - } - - checkstyle { - configFile = file("$rootDir/config/checkstyle.xml") - configProperties = ['projectDir':projectDir, 'rootDir':rootDir] - toolVersion = checkstyleVersion - } - - findbugs { - ignoreFailures = false - sourceSets = [sourceSets.main] - toolVersion = findbugsVersion - } - - findbugsMain { - reports { - // Switch from xml to html by changing these flags - xml.enabled = true - html.enabled = false - } - } - - jacoco { - toolVersion = jacocoVersion - } - - jacocoTestReport { - reports { - xml.enabled false - csv.enabled false - } - } - - tasks.withType(AbstractCompile) { - options.with { - fork = true - } - } - tasks.withType(Test) { - executable = testJava.javaExecutable - } - tasks.withType(Javadoc) { - options.addStringOption('Xdoclint:none', '-quiet') - } - - configurations.all { - resolutionStrategy { - failOnVersionConflict() - } - } +dependencyCheck { + failBuildOnCVSS = 0 + suppressionFile = 'config/owasp-supressions.xml' + skipConfigurations += ['checkstyle', 'spotbugs', 'xjcClasspath'] + skipProjects += [':docs', ':demos:00-NoCache', ':demos:01-CacheAside'] } - -allprojects { - tasks.withType(JavaCompile) { - options.encoding = 'UTF-8' - options.compilerArgs += ['-Xlint:unchecked'] - } - tasks.withType(Javadoc) { - options.encoding = 'UTF-8' - } +tasks.register('check') { + dependsOn dependencyCheckAggregate } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle deleted file mode 100644 index 5e7622186e..0000000000 --- a/buildSrc/build.gradle +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: 'groovy' - -repositories { jcenter() } -dependencies { - compile gradleApi() - compile localGroovy() - compile 'com.github.jengelman.gradle.plugins:shadow:2.0.1' -} diff --git a/buildSrc/src/main/groovy/EhDeploy.groovy b/buildSrc/src/main/groovy/EhDeploy.groovy deleted file mode 100644 index 417716d255..0000000000 --- a/buildSrc/src/main/groovy/EhDeploy.groovy +++ /dev/null @@ -1,92 +0,0 @@ -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.artifacts.maven.Conf2ScopeMappingContainer -import org.gradle.api.artifacts.maven.MavenDeployment -import org.gradle.api.plugins.MavenPlugin -import org.gradle.plugins.signing.Sign -import scripts.Utils - -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * EhDeploy - */ -class EhDeploy implements Plugin { - @Override - void apply(Project project) { - - def utils = new Utils(project.baseVersion, project.logger) - - project.plugins.apply 'signing' - project.plugins.apply 'maven' - project.plugins.apply EhPomGenerate // for generating pom.* - - project.configurations { - provided - } - - project.sourceSets { - main { - compileClasspath += project.configurations.provided - } - test { - compileClasspath += project.configurations.provided - runtimeClasspath += project.configurations.provided - } - } - - project.signing { - required { project.isReleaseVersion && project.gradle.taskGraph.hasTask("uploadArchives") } - sign project.configurations.getByName('archives') - } - - def artifactFiltering = { - pom.scopeMappings.mappings.remove(project.configurations.testCompile) - pom.scopeMappings.mappings.remove(project.configurations.testRuntime) - pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.provided, Conf2ScopeMappingContainer.PROVIDED) - - utils.pomFiller(pom, project.subPomName, project.subPomDesc) - - } - - project.install { - repositories.mavenInstaller artifactFiltering - } - - project.uploadArchives { - repositories { - mavenDeployer ({ - beforeDeployment { MavenDeployment deployment -> project.signing.signPom(deployment)} - - if (project.isReleaseVersion) { - repository(url: project.deployUrl) { - authentication(userName: project.deployUser, password: project.deployPwd) - } - } else { - repository(id: 'sonatype-nexus-snapshot', url: 'https://oss.sonatype.org/content/repositories/snapshots') { - authentication(userName: project.sonatypeUser, password: project.sonatypePwd) - } - } - } << artifactFiltering) - } - } - - def installer = project.install.repositories.mavenInstaller - def deployer = project.uploadArchives.repositories.mavenDeployer - - } -} diff --git a/buildSrc/src/main/groovy/EhDistribute.groovy b/buildSrc/src/main/groovy/EhDistribute.groovy deleted file mode 100644 index e1d1ba8f5d..0000000000 --- a/buildSrc/src/main/groovy/EhDistribute.groovy +++ /dev/null @@ -1,82 +0,0 @@ -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.artifacts.ProjectDependency -import scripts.Utils - -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * EhDistribute - */ -class EhDistribute implements Plugin { - - @Override - void apply(Project project) { - def utils = new Utils(project.baseVersion, project.logger) - def hashsetOfProjects = project.configurations.compileOnly.dependencies.withType(ProjectDependency).dependencyProject - - project.plugins.apply 'java' - project.plugins.apply 'maven' - project.plugins.apply 'signing' - project.plugins.apply 'com.github.johnrengelman.shadow' - project.plugins.apply EhOsgi - project.plugins.apply EhPomMangle - project.plugins.apply EhDocs - project.plugins.apply EhPomGenerate - - def OSGI_OVERRIDE_KEYS = ['Import-Package', 'Export-Package', 'Private-Package', 'Tool', 'Bnd-LastModified', 'Created-By', 'Require-Capability'] - - project.configurations { - shadowCompile - shadowProvided - } - - project.shadowJar { - configurations = [[project.configurations.compileOnly]] - baseName = "$project.archivesBaseName-shadow" - classifier = '' - dependencies { - exclude({ rdep -> !['org.ehcache', 'org.terracotta'].any({ prefix -> rdep.moduleGroup.startsWith(prefix) })}) - } - mergeServiceFiles() - } - - project.jar { - dependsOn project.shadowJar - from(project.zipTree(project.shadowJar.archivePath.getPath())) { - exclude 'META-INF/MANIFEST.MF', 'LICENSE', 'NOTICE' - } - // LICENSE is included in root gradle build - from "$project.rootDir/NOTICE" - duplicatesStrategy = 'exclude' - } - - - project.sourceJar { - from hashsetOfProjects.flatten { - it.sourceSets.main.allSource - } - } - - - project.signing { - required { project.isReleaseVersion && project.gradle.taskGraph.hasTask("uploadArchives") } - sign project.configurations.getByName('archives') - } - - } -} diff --git a/buildSrc/src/main/groovy/EhDocs.groovy b/buildSrc/src/main/groovy/EhDocs.groovy deleted file mode 100644 index 0a900fb480..0000000000 --- a/buildSrc/src/main/groovy/EhDocs.groovy +++ /dev/null @@ -1,76 +0,0 @@ -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.artifacts.ProjectDependency -import org.gradle.api.tasks.bundling.Jar -import org.gradle.api.tasks.bundling.Zip -import org.gradle.api.tasks.javadoc.Javadoc -import scripts.Utils - -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * EhDocs - * Handle javadocs and API/SPI/asciidoc - */ -class EhDocs implements Plugin { - - @Override - void apply(Project project) { - def utils = new Utils(project.baseVersion, project.logger) - def hashsetOfProjects = project.configurations.compile.dependencies.withType(ProjectDependency).dependencyProject + - project.configurations.compileOnly.dependencies.withType(ProjectDependency).dependencyProject - - project.javadoc { - title "$project.archivesBaseName $project.version API" - source hashsetOfProjects.javadoc.source - classpath = project.files(hashsetOfProjects.javadoc.classpath) - project.ext.properties.javadocExclude?.tokenize(',').each { - exclude it.trim() - } - } - - if (!project.hasProperty('spiJavadocDisable')) { - - project.task('spiJavadoc', type: Javadoc) { - title "$project.archivesBaseName $project.version API & SPI" - source hashsetOfProjects.javadoc.source - classpath = project.files(hashsetOfProjects.javadoc.classpath) - exclude '**/internal/**' - destinationDir = project.file("$project.docsDir/spi-javadoc") - } - - project.task('spiJavadocJar', type: Jar, dependsOn: 'spiJavadoc') { - classifier = 'spi-javadoc' - from project.tasks.getByPath('spiJavadoc').destinationDir - } - - } - - project.task('asciidocZip', type: Zip, dependsOn: ':docs:asciidoctor') { - classifier = 'docs' - from project.tasks.getByPath(':docs:asciidoctor').outputDir - } - - project.artifacts { - archives project.asciidocZip - if (!project.hasProperty('spiJavadocDisable')) { - archives project.spiJavadocJar - } - } - - } -} diff --git a/buildSrc/src/main/groovy/EhOsgi.groovy b/buildSrc/src/main/groovy/EhOsgi.groovy deleted file mode 100644 index 6b16d01d1c..0000000000 --- a/buildSrc/src/main/groovy/EhOsgi.groovy +++ /dev/null @@ -1,95 +0,0 @@ -import com.github.jengelman.gradle.plugins.shadow.tasks.DefaultInheritManifest -import groovy.json.JsonSlurper -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.artifacts.ProjectDependency -import org.gradle.api.internal.file.FileResolver -import org.gradle.api.plugins.osgi.OsgiPluginConvention -import scripts.Utils - -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * EhOsgi - * OSGI additions to the manifest controlled by osgi key in gradle.properties - * This plugin supports shadowJar if available - */ -class EhOsgi implements Plugin { - - @Override - void apply(Project project) { - def utils = new Utils(project.baseVersion, project.logger) - def hashsetOfProjects = project.configurations.compile.dependencies.withType(ProjectDependency).dependencyProject + - project.configurations.compileOnly.dependencies.withType(ProjectDependency).dependencyProject - hashsetOfProjects += project //self also, in case the invoking project defines osgi properties - - project.plugins.apply 'java' - project.plugins.apply 'maven' - project.plugins.apply 'signing' - - def OSGI_OVERRIDE_KEYS = ['Import-Package', 'Export-Package', 'Private-Package', 'Tool', 'Bnd-LastModified', 'Created-By', 'Require-Capability'] - - project.jar.doFirst { - manifest = new DefaultInheritManifest(getServices().get(FileResolver.class)) - if (project.hasProperty('shadowJar')) { - manifest.inheritFrom "$project.buildDir/tmp/shadowJar/MANIFEST.MF" - } - utils.fillManifest(manifest, project.archivesBaseName) - - def osgiConvention = new OsgiPluginConvention(project) - def osgiManifest = osgiConvention.osgiManifest { - - if (project.hasProperty('shadowJar')) { - classesDir = project.shadowJar.archivePath - classpath = project.files(project.configurations.shadowCompile, project.configurations.shadowProvided) - } else { - classesDir = project.sourceSets.main.java.outputDir - classpath = project.sourceSets.main.compileClasspath - } - - // Metadata - instructionReplace 'Bundle-Name', "$project.archivesBaseName 3" - instructionReplace 'Bundle-SymbolicName', "org.ehcache.$project.archivesBaseName" - instruction 'Bundle-Description', 'Ehcache is an open-source caching library, compliant with the JSR-107 standard.' - instruction 'Bundle-DocURL', 'http://ehcache.org' - instruction 'Bundle-License', 'LICENSE' - instruction 'Bundle-Vendor', 'Terracotta Inc., a wholly-owned subsidiary of Software AG USA, Inc.' - instruction 'Bundle-RequiredExecutionEnvironment', 'JavaSE-1.8' - - hashsetOfProjects.findAll({ p -> p.ext.properties.osgi}).each{ prop -> - new JsonSlurper().parseText(prop.ext.properties.osgi).each { - project.logger.info "OSGI: ${it.key}: ${it.value}" - instruction(it.key, *it.value) - } - } - - instruction 'Export-Package', '*' - instruction 'Import-Package', '*' - } - manifest.inheritFrom(osgiManifest) { - eachEntry { - if (it.getKey().startsWith('Bundle') || OSGI_OVERRIDE_KEYS.contains(it.getKey())) { - it.setValue(it.getMergeValue()) - } else { - it.setValue(it.getBaseValue()) - } - } - } - } - - } -} diff --git a/buildSrc/src/main/groovy/EhPomGenerate.groovy b/buildSrc/src/main/groovy/EhPomGenerate.groovy deleted file mode 100644 index 81761ffe63..0000000000 --- a/buildSrc/src/main/groovy/EhPomGenerate.groovy +++ /dev/null @@ -1,125 +0,0 @@ - - -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.publish.maven.MavenPublication -import scripts.Utils - -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * EhPomGenerate: - * Creates pom.xml and pom.properties to be included in produced jars - * Mimics standard maven jar layout. - */ -class EhPomGenerate implements Plugin { - - @Override - void apply(Project project) { - - def utils = new Utils(project.baseVersion, project.logger) - - project.plugins.apply 'maven-publish' // for generating pom.* - - def mavenTempResourcePath = "${project.buildDir}/mvn/META-INF/maven/${project.group}/${project.archivesBaseName}" - - project.model { - // Write pom to temp location to be picked up later, - // generatePomFileForMavenJavaPublication task comes from maven-publish. - tasks.generatePomFileForMavenJavaPublication { - destination = project.file("$mavenTempResourcePath/pom.xml") - } - } - - // Configure pom generation - project.publishing { - publications { - mavenJava(MavenPublication) { - artifactId project.archivesBaseName - from project.components.java - utils.pomFiller(pom, project.subPomName, project.subPomDesc) - if (project.hasProperty('shadowJar')) { - pom.withXml { - if (asNode().dependencies.isEmpty()) { - asNode().appendNode('dependencies') - } - project.configurations.shadowCompile.dependencies.each { - def dep = asNode().dependencies[0].appendNode('dependency') - dep.appendNode('groupId', it.group) - dep.appendNode('artifactId', it.name) - dep.appendNode('version', it.version) - dep.appendNode('scope', 'compile') - } - project.configurations.pomOnlyCompile.dependencies.each { - def dep = asNode().dependencies[0].appendNode('dependency') - dep.appendNode('groupId', it.group) - dep.appendNode('artifactId', it.name) - dep.appendNode('version', it.version) - dep.appendNode('scope', 'compile') - } - project.configurations.shadowProvided.dependencies.each { - def dep = asNode().dependencies[0].appendNode('dependency') - dep.appendNode('groupId', it.group) - dep.appendNode('artifactId', it.name) - dep.appendNode('version', it.version) - dep.appendNode('scope', 'provided') - } - project.configurations.pomOnlyProvided.dependencies.each { - def dep = asNode().dependencies[0].appendNode('dependency') - dep.appendNode('groupId', it.group) - dep.appendNode('artifactId', it.name) - dep.appendNode('version', it.version) - dep.appendNode('scope', 'provided') - } - } - } - } - } - } - - // Write pom.properties to temp location - project.task('writeMavenProperties') { - doLast { - project.file(mavenTempResourcePath).mkdirs() - def propertyFile = project.file "$mavenTempResourcePath/pom.properties" - def props = new Properties() - props.setProperty('version', project.version) - props.setProperty('groupId', project.group) - props.setProperty('artifactId', project.archivesBaseName) - props.store propertyFile.newWriter(), null - } - } - - if (utils.isReleaseVersion) { - //ensure that we generate maven stuff and delay resolution as the first task is created dynamically - project.processResources.dependsOn { - project.tasks.findAll { task -> - task.name == 'generatePomFileForMavenJavaPublication' || task.name == 'writeMavenProperties' - } - } - - // Pick up pom.xml and pom.properties from temp location - project.sourceSets { - main { - resources { - srcDir "${project.buildDir}/mvn" - } - } - } - } - } -} diff --git a/buildSrc/src/main/groovy/EhPomMangle.groovy b/buildSrc/src/main/groovy/EhPomMangle.groovy deleted file mode 100644 index 271271ab3d..0000000000 --- a/buildSrc/src/main/groovy/EhPomMangle.groovy +++ /dev/null @@ -1,97 +0,0 @@ -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.artifacts.ProjectDependency -import org.gradle.api.artifacts.maven.Conf2ScopeMappingContainer -import org.gradle.api.artifacts.maven.MavenDeployment -import org.gradle.api.plugins.MavenPlugin -import scripts.Utils - -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * EhPomMangle - * Removes all implicit dependencies from the pom - * and adds only what is specified in (from shadowJar) - * - * project.configurations.shadowCompile (as compile) - * project.configurations.shadowProvided (as provided) - * - * as well as (these do not affect shadow) - * - * project.configurations.pomOnlyCompile - * project.configurations.pomOnlyProvided - * - * Also defines the pom defaults (name, desc, etc) unless overridden in gradle.properties - * Also sets up upload repositories - */ -class EhPomMangle implements Plugin { - - @Override - void apply(Project project) { - def utils = new Utils(project.baseVersion, project.logger) - - project.plugins.apply 'java' - project.plugins.apply 'maven' - project.plugins.apply 'signing' - - project.configurations { - shadowCompile - shadowProvided - pomOnlyCompile - pomOnlyProvided - } - - def artifactFiltering = { - pom.scopeMappings.mappings.remove(project.configurations.compile) - pom.scopeMappings.mappings.remove(project.configurations.runtime) - pom.scopeMappings.mappings.remove(project.configurations.testCompile) - pom.scopeMappings.mappings.remove(project.configurations.testRuntime) - pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.shadowCompile, Conf2ScopeMappingContainer.COMPILE) - pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.shadowProvided, Conf2ScopeMappingContainer.PROVIDED) - - //Anything extra to add to pom that isn't in the shadowed jar or compilation - pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.pomOnlyCompile, Conf2ScopeMappingContainer.COMPILE) - pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.pomOnlyProvided, Conf2ScopeMappingContainer.PROVIDED) - - utils.pomFiller(pom, project.subPomName, project.subPomDesc) - - } - - project.install { - repositories.mavenInstaller artifactFiltering - } - - project.uploadArchives { - repositories { - mavenDeployer ({ - beforeDeployment { MavenDeployment deployment -> project.signing.signPom(deployment)} - - if (project.isReleaseVersion) { - repository(url: project.deployUrl) { - authentication(userName: project.deployUser, password: project.deployPwd) - } - } else { - repository(id: 'sonatype-nexus-snapshot', url: 'https://oss.sonatype.org/content/repositories/snapshots') { - authentication(userName: project.sonatypeUser, password: project.sonatypePwd) - } - } - } << artifactFiltering) - } - } - - } -} diff --git a/buildSrc/src/main/groovy/scripts/Utils.groovy b/buildSrc/src/main/groovy/scripts/Utils.groovy deleted file mode 100644 index b674c95236..0000000000 --- a/buildSrc/src/main/groovy/scripts/Utils.groovy +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package scripts - -import org.gradle.api.JavaVersion -import org.gradle.internal.jvm.Jvm - -class Utils { - - String version - String revision - boolean isReleaseVersion - - Utils(version, logger) { - this.version = version - this.isReleaseVersion = !version.endsWith('SNAPSHOT') - def tmp = System.getenv("GIT_COMMIT") - if(tmp != null) { - revision = tmp - } else { - logger.debug('Revision not found in system properties, trying command line') - def cmd = 'git rev-parse HEAD' - try { - def proc = cmd.execute() - revision = proc.text.trim() - } catch (IOException ioex) { - revision = 'Unknown' - } - } - logger.debug(revision) - } - - def fillManifest(manifest, title) { - manifest.attributes( - 'provider': 'gradle', - 'Implementation-Title': title, - 'Implementation-Version': "$version $revision", - 'Built-By': System.getProperty('user.name'), - 'Built-JDK': System.getProperty('java.version')) - if (isReleaseVersion) { - manifest.attributes('Build-Time': new Date().format("yyyy-MM-dd'T'HH:mm:ssZ")) - } - } - - def pomFiller(pom, nameVar, descriptionVar) { - pom.withXml { - asNode().version[0] + { - name nameVar - description descriptionVar - url 'http://ehcache.org' - organization { - name 'Terracotta Inc., a wholly-owned subsidiary of Software AG USA, Inc.' - url 'http://terracotta.org' - } - issueManagement { - system 'Github' - url 'https://github.com/ehcache/ehcache3/issues' - } - scm { - url 'https://github.com/ehcache/ehcache3' - connection 'scm:git:https://github.com/ehcache/ehcache3.git' - developerConnection 'scm:git:git@github.com:ehcache/ehcache3.git' - } - licenses { - license { - name 'The Apache Software License, Version 2.0' - url 'http://www.apache.org/licenses/LICENSE-2.0.txt' - distribution 'repo' - } - } - developers { - developer { - name 'Terracotta Engineers' - email 'tc-oss@softwareag.com' - organization 'Terracotta Inc., a wholly-owned subsidiary of Software AG USA, Inc.' - organizationUrl 'http://ehcache.org' - } - } - } - } - } - - static def jvmForHome(File home) { - def java = Jvm.forHome(home).javaExecutable - def versionCommand = "$java -version".execute(); - def version = JavaVersion.toVersion((versionCommand.err.text =~ /\w+ version "(.+)"/)[0][1]) - return Jvm.discovered(home, version) - } -} diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle deleted file mode 100644 index dd5e7e5e94..0000000000 --- a/clustered/client/build.gradle +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: EhDeploy - -dependencies { - compileOnly project(':api') - compileOnly project(':xml') - compile project(':clustered:common'), "org.slf4j:slf4j-api:$slf4jVersion" - provided "org.terracotta:entity-client-api:$terracottaApisVersion" - provided "org.terracotta:runnel:$terracottaPlatformVersion" - - testCompile project(':api') - testCompile project(':xml') - testCompile project(':transactions') - testCompile(project(':clustered:server')) { - exclude group: 'org.terracotta.internal', module: 'tc-config-parser' - } - testCompile "org.terracotta:entity-test-lib:$terracottaPassthroughTestingVersion" - testCompile "org.terracotta:passthrough-server:$terracottaPassthroughTestingVersion" -} - -test { - if (testJava.javaVersion.isJava9Compatible()) { - jvmArgs += ['--add-modules', 'java.xml.bind'] - } -} - -tasks.withType(JavaCompile) { - options.compilerArgs += ['-Werror'] -} diff --git a/clustered/client/gradle.properties b/clustered/client/gradle.properties deleted file mode 100644 index 56c6dfbf5d..0000000000 --- a/clustered/client/gradle.properties +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -subPomName = Ehcache 3 Client Side Clustering module -subPomDesc = The Client Side Clustering module of Ehcache 3 -osgi = {"Export-Package" : ["!org.ehcache.clustered.client.internal.*", "!sun.misc"],\ - "Import-Package" : ["!org.ehcache.clustered.client*", "!sun.misc*"]} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteringServiceConfiguration.java b/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteringServiceConfiguration.java deleted file mode 100644 index 0499bba666..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteringServiceConfiguration.java +++ /dev/null @@ -1,218 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.config; - -import org.ehcache.CacheManager; -import org.ehcache.PersistentCacheManager; -import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.CacheManagerConfiguration; -import org.ehcache.core.HumanReadable; -import org.ehcache.spi.service.ServiceCreationConfiguration; - -import java.net.URI; -import java.time.Duration; -import java.util.Map; -import java.util.Objects; - -import org.ehcache.clustered.common.ServerSideConfiguration; - -import static org.ehcache.clustered.client.config.Timeouts.DEFAULT_OPERATION_TIMEOUT; - -/** - * Specifies the configuration for a {@link ClusteringService}. - */ -// TODO: Should this accept/hold a *list* of URIs? -public class ClusteringServiceConfiguration - implements ServiceCreationConfiguration, - CacheManagerConfiguration, - HumanReadable { - - private final URI clusterUri; - private final boolean autoCreate; - private final ServerSideConfiguration serverConfiguration; - private final Timeouts timeouts; - - /** - * Creates a {@code ClusteringServiceConfiguration} from the properties provided. - * - * @param clusterUri the non-{@code null} URI identifying the cluster server - * - * @throws NullPointerException if {@code clusterUri} is {@code null} - * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations - */ - public ClusteringServiceConfiguration(URI clusterUri) { - this(clusterUri, Timeouts.DEFAULT); - } - - /** - * Creates a {@code ClusteringServiceConfiguration} from the properties provided. - * - * @param clusterUri the non-{@code null} URI identifying the cluster server - * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations - * - * @throws NullPointerException if {@code clusterUri} is {@code null} - * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations - */ - public ClusteringServiceConfiguration(URI clusterUri, Timeouts timeouts) { - this(clusterUri, timeouts, null); - } - - /** - * Creates a {@code ClusteringServiceConfiguration} from the properties provided. - * - * @param clusterUri the non-{@code null} URI identifying the cluster server - * @param serverConfig the server side entity configuration required - * - * @throws NullPointerException if {@code clusterUri} is {@code null} - * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations - */ - public ClusteringServiceConfiguration(URI clusterUri, ServerSideConfiguration serverConfig) { - this(clusterUri, Timeouts.DEFAULT, serverConfig); - } - - /** - * Creates a {@code ClusteringServiceConfiguration} from the properties provided. - * - * @param clusterUri the non-{@code null} URI identifying the cluster server - * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations - * @param serverConfig the server side entity configuration required - * - * @throws NullPointerException if {@code clusterUri} is {@code null} - * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations - */ - public ClusteringServiceConfiguration(URI clusterUri, Timeouts timeouts, ServerSideConfiguration serverConfig) { - this(clusterUri, timeouts, false, serverConfig); - } - - /** - * Creates a {@code ClusteringServiceConfiguration} from the properties provided. - * - * @param clusterUri the non-{@code null} URI identifying the cluster server - * @param autoCreate {@code true} if server components should be auto created - * @param serverConfig the server side entity configuration required - * - * @throws NullPointerException if {@code clusterUri} is {@code null} - * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations - */ - public ClusteringServiceConfiguration(URI clusterUri, boolean autoCreate, ServerSideConfiguration serverConfig) { - this(clusterUri, Timeouts.DEFAULT, autoCreate, serverConfig); - } - - /** - * Creates a {@code ClusteringServiceConfiguration} from the properties provided. - * - * @param clusterUri the non-{@code null} URI identifying the cluster server - * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations - * @param autoCreate {@code true} if server components should be auto created - * @param serverConfig the server side entity configuration required - * - * @throws NullPointerException if {@code clusterUri} is {@code null} - * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations - */ - public ClusteringServiceConfiguration(URI clusterUri, Timeouts timeouts, boolean autoCreate, ServerSideConfiguration serverConfig) { - this.clusterUri = Objects.requireNonNull(clusterUri, "Cluster URI cannot be null"); - this.autoCreate = autoCreate; - this.serverConfiguration = serverConfig; - this.timeouts = Objects.requireNonNull(timeouts, "Operation timeouts cannot be null"); - } - - protected ClusteringServiceConfiguration(ClusteringServiceConfiguration baseConfig) { - Objects.requireNonNull(baseConfig, "Base configuration cannot be null"); - this.clusterUri = baseConfig.getClusterUri(); - this.timeouts = baseConfig.getTimeouts(); - this.autoCreate = baseConfig.isAutoCreate(); - this.serverConfiguration = baseConfig.getServerConfiguration(); - } - - /** - * The {@code URI} of the cluster that will be connected to. - * - * @return the cluster {@code URI} - */ - public URI getClusterUri() { - return clusterUri; - } - - /** - * Returns {@code true} is server side components should be automatically created. - * - * @return {@code true} is auto-create is enabled - */ - public boolean isAutoCreate() { - return autoCreate; - } - - /** - * The default server resource to use for caches and pools, or {@code null} if one is not defined. - * - * @return the default server resource - */ - public ServerSideConfiguration getServerConfiguration() { - return serverConfiguration; - } - - /** - * The timeouts for all cache operations - * - * @return the cache timeouts - */ - public Timeouts getTimeouts() { - return timeouts; - } - - /** - * The timeout for cache read operations. - * - * @return the cache read operation timeout - * - * @deprecated Use {@link #getTimeouts()} - */ - @Deprecated - public Duration getReadOperationTimeout() { - return timeouts.getReadOperationTimeout(); - } - - @Override - public Class getServiceType() { - return ClusteringService.class; - } - - @SuppressWarnings("unchecked") - @Override - public CacheManagerBuilder builder(CacheManagerBuilder other) { - return (CacheManagerBuilder) other.using(this); // unchecked - } - - @Override - public String readableString() { - return this.getClass().getName() + ":\n " + - "clusterUri: " + getClusterUri()+ "\n " + - "timeouts: " + getTimeouts()+ "\n " + - "autoCreate: " + isAutoCreate() + "\n " + - "defaultServerResource: " + serverConfiguration.getDefaultServerResource() + "\n " + - readablePoolsString(); - } - - private String readablePoolsString() { - StringBuilder pools = new StringBuilder("resourcePools:\n"); - for(Map.Entry entry : serverConfiguration.getResourcePools().entrySet()) { - pools.append(" " + entry.getKey() + ": " + entry.getValue() + "\n"); - } - return pools.toString(); - } -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteringServiceConfigurationBuilder.java b/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteringServiceConfigurationBuilder.java deleted file mode 100644 index 0374f5943b..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteringServiceConfigurationBuilder.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.client.config.builders; - -import java.net.URI; - -import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; - -import java.time.Duration; -import java.time.temporal.ChronoUnit; -import java.util.Objects; -import java.util.concurrent.TimeUnit; -import org.ehcache.clustered.client.config.Timeouts; -import org.ehcache.clustered.common.ServerSideConfiguration; -import org.ehcache.config.Builder; - -/** - * A builder of ClusteringService configurations. - */ -public final class ClusteringServiceConfigurationBuilder implements Builder { - - private final URI clusterUri; - private final Timeouts timeouts; - private final boolean autoCreate; - - /** - * Creates a new builder connecting to the given cluster. - * - * @param clusterUri cluster URI - * - * @return a clustering service configuration builder - */ - public static ClusteringServiceConfigurationBuilder cluster(URI clusterUri) { - return new ClusteringServiceConfigurationBuilder(clusterUri, TimeoutsBuilder.timeouts().build(), false); - } - - private ClusteringServiceConfigurationBuilder(URI clusterUri, Timeouts timeouts, boolean autoCreate) { - this.clusterUri = Objects.requireNonNull(clusterUri, "Cluster URI can't be null"); - this.timeouts = Objects.requireNonNull(timeouts, "Timeouts can't be null"); - this.autoCreate = autoCreate; - } - - /** - * Support connection to an existing entity or create if the entity if absent. - * - * @return a clustering service configuration builder - */ - public ServerSideConfigurationBuilder autoCreate() { - return new ServerSideConfigurationBuilder(new ClusteringServiceConfigurationBuilder(this.clusterUri, this.timeouts, true)); - } - - /** - * Only support connection to an existing entity. - * - * @return a clustering service configuration builder - */ - public ServerSideConfigurationBuilder expecting() { - return new ServerSideConfigurationBuilder(new ClusteringServiceConfigurationBuilder(this.clusterUri, this.timeouts, false)); - } - - /** - * Adds timeouts. - * Read operations which time out return a result comparable to a cache miss. - * Write operations which time out won't do anything. - * Lifecycle operations which time out will fail with exception - * - * @param timeouts the amount of time permitted for all operations - * - * @return a clustering service configuration builder - * - * @throws NullPointerException if {@code timeouts} is {@code null} - */ - public ClusteringServiceConfigurationBuilder timeouts(Timeouts timeouts) { - return new ClusteringServiceConfigurationBuilder(this.clusterUri, timeouts, this.autoCreate); - } - - /** - * Adds timeouts. - * Read operations which time out return a result comparable to a cache miss. - * Write operations which time out won't do anything. - * Lifecycle operations which time out will fail with exception - * - * @param timeoutsBuilder the builder for amount of time permitted for all operations - * - * @return a clustering service configuration builder - * - * @throws NullPointerException if {@code timeouts} is {@code null} - */ - public ClusteringServiceConfigurationBuilder timeouts(Builder timeoutsBuilder) { - return new ClusteringServiceConfigurationBuilder(this.clusterUri, timeoutsBuilder.build(), this.autoCreate); - } - - /** - * Adds a read operation timeout. Read operations which time out return a result comparable to - * a cache miss. - * - * @param duration the amount of time permitted for read operations - * @param unit the time units for {@code duration} - * - * @return a clustering service configuration builder - * - * @throws NullPointerException if {@code unit} is {@code null} - * @throws IllegalArgumentException if {@code amount} is negative - * - * @deprecated Use {@link #timeouts(Timeouts)}. Note that calling this method will override any timeouts previously set - * by setting the read operation timeout to the specified value and everything else to its default. - */ - @Deprecated - public ClusteringServiceConfigurationBuilder readOperationTimeout(long duration, TimeUnit unit) { - Duration readTimeout = Duration.of(duration, toChronoUnit(unit)); - return timeouts(TimeoutsBuilder.timeouts().read(readTimeout).build()); - } - - @Override - public ClusteringServiceConfiguration build() { - return new ClusteringServiceConfiguration(clusterUri, timeouts, autoCreate, null); - } - - /** - * Internal method to build a new {@link ClusteringServiceConfiguration} from the {@link ServerSideConfigurationBuilder}. - * - * @param serverSideConfiguration the {@code ServerSideConfiguration} to use - * - * @return a new {@code ClusteringServiceConfiguration} instance built from {@code this} - * {@code ClusteringServiceConfigurationBuilder} and the {@code serverSideConfiguration} provided - */ - ClusteringServiceConfiguration build(ServerSideConfiguration serverSideConfiguration) { - return new ClusteringServiceConfiguration(clusterUri, timeouts, autoCreate, serverSideConfiguration); - } - - private static ChronoUnit toChronoUnit(TimeUnit unit) { - if(unit == null) { - return null; - } - switch (unit) { - case NANOSECONDS: return ChronoUnit.NANOS; - case MICROSECONDS: return ChronoUnit.MICROS; - case MILLISECONDS: return ChronoUnit.MILLIS; - case SECONDS: return ChronoUnit.SECONDS; - case MINUTES: return ChronoUnit.MINUTES; - case HOURS: return ChronoUnit.HOURS; - case DAYS: return ChronoUnit.DAYS; - default: throw new AssertionError("Unknown unit: " + unit); - } - } - -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredResourceConfigurationParser.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredResourceConfigurationParser.java deleted file mode 100644 index 11903da55c..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredResourceConfigurationParser.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.config.xml; - -import org.ehcache.clustered.client.internal.config.DedicatedClusteredResourcePoolImpl; -import org.ehcache.clustered.client.internal.config.SharedClusteredResourcePoolImpl; -import org.ehcache.clustered.client.internal.config.ClusteredResourcePoolImpl; -import org.ehcache.config.ResourcePool; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.xml.CacheResourceConfigurationParser; -import org.ehcache.xml.exceptions.XmlConfigurationException; -import org.w3c.dom.Attr; -import org.w3c.dom.DOMException; -import org.w3c.dom.Element; - -import java.io.IOException; -import java.net.URI; - -import javax.xml.transform.Source; -import javax.xml.transform.stream.StreamSource; - -import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.NAMESPACE; -import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.XML_SCHEMA; - -/** - * Provides a parser for the {@code /config/cache/resources} extension elements. - */ -public class ClusteredResourceConfigurationParser implements CacheResourceConfigurationParser { - @Override - public Source getXmlSchema() throws IOException { - return new StreamSource(XML_SCHEMA.openStream()); - } - - @Override - public URI getNamespace() { - return NAMESPACE; - } - - protected ResourcePool parseResourceConfig(final Element fragment) { - final String elementName = fragment.getLocalName(); - if ("clustered-shared".equals(elementName)) { - final String sharing = fragment.getAttribute("sharing"); - return new SharedClusteredResourcePoolImpl(sharing); - - } else if ("clustered-dedicated".equals(elementName)) { - // 'from' attribute is optional on 'clustered-dedicated' element - final Attr fromAttr = fragment.getAttributeNode("from"); - final String from = (fromAttr == null ? null : fromAttr.getValue()); - - final String unitValue = fragment.getAttribute("unit").toUpperCase(); - final MemoryUnit sizeUnits; - try { - sizeUnits = MemoryUnit.valueOf(unitValue); - } catch (IllegalArgumentException e) { - throw new XmlConfigurationException(String.format("XML configuration element <%s> 'unit' attribute '%s' is not valid", elementName, unitValue), e); - } - - final String sizeValue; - try { - sizeValue = fragment.getFirstChild().getNodeValue(); - } catch (DOMException e) { - throw new XmlConfigurationException(String.format("XML configuration element <%s> value is not valid", elementName), e); - } - final long size; - try { - size = Long.parseLong(sizeValue); - } catch (NumberFormatException e) { - throw new XmlConfigurationException(String.format("XML configuration element <%s> value '%s' is not valid", elementName, sizeValue), e); - } - - return new DedicatedClusteredResourcePoolImpl(from, size, sizeUnits); - } else if("clustered".equals(elementName)) { - return new ClusteredResourcePoolImpl(); - } - return null; - } - - @Override - public ResourcePool parseResourceConfiguration(final Element fragment) { - ResourcePool resourcePool = parseResourceConfig(fragment); - if (resourcePool != null) { - return resourcePool; - } - throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported", - fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); - } -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParser.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParser.java deleted file mode 100644 index ddc8fc1f62..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParser.java +++ /dev/null @@ -1,263 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.config.xml; - -import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; -import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; -import org.ehcache.clustered.client.config.Timeouts; -import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; -import org.ehcache.clustered.client.internal.store.ClusteredStore; -import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.common.ServerSideConfiguration; -import org.ehcache.clustered.common.ServerSideConfiguration.Pool; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.xml.CacheManagerServiceConfigurationParser; -import org.ehcache.xml.CacheServiceConfigurationParser; -import org.ehcache.xml.exceptions.XmlConfigurationException; -import org.ehcache.xml.model.TimeType; -import org.w3c.dom.Attr; -import org.w3c.dom.Element; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; - -import java.io.IOException; -import java.math.BigInteger; -import java.net.URI; -import java.net.URISyntaxException; -import java.time.Duration; -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; - -import javax.xml.bind.JAXBContext; -import javax.xml.bind.JAXBElement; -import javax.xml.bind.JAXBException; -import javax.xml.bind.Unmarshaller; -import javax.xml.transform.Source; -import javax.xml.transform.stream.StreamSource; - -import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.*; -import static org.ehcache.xml.XmlModel.convertToJavaTimeUnit; - -/** - * Provides parsing support for the {@code } elements representing a {@link ClusteringService ClusteringService}. - * - * @see ClusteredCacheConstants#XSD - */ -public class ClusteringServiceConfigurationParser implements CacheManagerServiceConfigurationParser, - CacheServiceConfigurationParser { - - public static final String CLUSTERED_STORE_ELEMENT_NAME = "clustered-store"; - public static final String CONSISTENCY_ATTRIBUTE_NAME = "consistency"; - - @Override - public Source getXmlSchema() throws IOException { - return new StreamSource(XML_SCHEMA.openStream()); - } - - @Override - public URI getNamespace() { - return NAMESPACE; - } - - @Override - public ServiceConfiguration parseServiceConfiguration(Element fragment) { - if (CLUSTERED_STORE_ELEMENT_NAME.equals(fragment.getLocalName())) { - if (fragment.hasAttribute(CONSISTENCY_ATTRIBUTE_NAME)) { - return new ClusteredStoreConfiguration(Consistency.valueOf(fragment.getAttribute("consistency").toUpperCase())); - } else { - return new ClusteredStoreConfiguration(); - } - } - throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported", - fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); - } - - /** - * Complete interpretation of the top-level elements defined in {@value ClusteredCacheConstants#XSD}. - * This method is called only for those elements from the namespace set by {@link ClusteredCacheConstants#NAMESPACE}. - *

- * This method presumes the element presented is valid according to the XSD. - * - * @param fragment the XML fragment to process - * - * @return a {@link org.ehcache.clustered.client.config.ClusteringServiceConfiguration ClusteringServiceConfiguration} - */ - @Override - public ServiceCreationConfiguration parseServiceCreationConfiguration(final Element fragment) { - - if ("cluster".equals(fragment.getLocalName())) { - - ServerSideConfig serverConfig = null; - URI connectionUri = null; - Duration getTimeout = null, putTimeout = null, connectionTimeout = null; - final NodeList childNodes = fragment.getChildNodes(); - for (int i = 0; i < childNodes.getLength(); i++) { - final Node item = childNodes.item(i); - if (Node.ELEMENT_NODE == item.getNodeType()) { - if ("connection".equals(item.getLocalName())) { - /* - * is a required element in the XSD - */ - final Attr urlAttribute = ((Element)item).getAttributeNode("url"); - final String urlValue = urlAttribute.getValue(); - try { - connectionUri = new URI(urlValue); - } catch (URISyntaxException e) { - throw new XmlConfigurationException( - String.format("Value of %s attribute on XML configuration element <%s> in <%s> is not a valid URI - '%s'", - urlAttribute.getName(), item.getNodeName(), fragment.getTagName(), connectionUri), e); - } - - } else if ("read-timeout".equals(item.getLocalName())) { - /* - * is an optional element - */ - getTimeout = processTimeout(fragment, item); - - } else if ("write-timeout".equals(item.getLocalName())) { - /* - * is an optional element - */ - putTimeout = processTimeout(fragment, item); - - } else if ("connection-timeout".equals(item.getLocalName())) { - /* - * is an optional element - */ - connectionTimeout = processTimeout(fragment, item); - - } else if ("server-side-config".equals(item.getLocalName())) { - /* - * is an optional element - */ - serverConfig = processServerSideConfig(item); - } - } - } - - try { - Timeouts timeouts = getTimeouts(getTimeout, putTimeout, connectionTimeout); - if (serverConfig == null) { - return new ClusteringServiceConfiguration(connectionUri, timeouts); - } - - ServerSideConfiguration serverSideConfiguration; - if (serverConfig.defaultServerResource == null) { - serverSideConfiguration = new ServerSideConfiguration(serverConfig.pools); - } else { - serverSideConfiguration = new ServerSideConfiguration(serverConfig.defaultServerResource, serverConfig.pools); - } - - return new ClusteringServiceConfiguration(connectionUri, timeouts, serverConfig.autoCreate, serverSideConfiguration); - } catch (IllegalArgumentException e) { - throw new XmlConfigurationException(e); - } - } - throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported", - fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); - } - - private Timeouts getTimeouts(Duration getTimeout, Duration putTimeout, Duration connectionTimeout) { - TimeoutsBuilder builder = TimeoutsBuilder.timeouts(); - if (getTimeout != null) { - builder.read(getTimeout); - } - if(putTimeout != null) { - builder.write(putTimeout); - } - if(connectionTimeout != null) { - builder.connection(connectionTimeout); - } - return builder.build(); - } - - private Duration processTimeout(Element parentElement, Node timeoutNode) { - try { - // are direct subtype of ehcache:time-type; use JAXB to interpret it - JAXBContext context = JAXBContext.newInstance(TimeType.class.getPackage().getName()); - Unmarshaller unmarshaller = context.createUnmarshaller(); - JAXBElement jaxbElement = unmarshaller.unmarshal(timeoutNode, TimeType.class); - - TimeType timeType = jaxbElement.getValue(); - BigInteger amount = timeType.getValue(); - if (amount.compareTo(BigInteger.valueOf(Long.MAX_VALUE)) > 0) { - throw new XmlConfigurationException( - String.format("Value of XML configuration element <%s> in <%s> exceeds allowed value - %s", - timeoutNode.getNodeName(), parentElement.getTagName(), amount)); - } - return Duration.of(amount.longValue(), convertToJavaTimeUnit(timeType.getUnit())); - - } catch (JAXBException e) { - throw new XmlConfigurationException(e); - } - } - - private ServerSideConfig processServerSideConfig(Node serverSideConfigElement) { - ServerSideConfig serverSideConfig = new ServerSideConfig(); - serverSideConfig.autoCreate = Boolean.parseBoolean(((Element) serverSideConfigElement).getAttribute("auto-create")); - final NodeList serverSideNodes = serverSideConfigElement.getChildNodes(); - for (int i = 0; i < serverSideNodes.getLength(); i++) { - final Node item = serverSideNodes.item(i); - if (Node.ELEMENT_NODE == item.getNodeType()) { - String nodeLocalName = item.getLocalName(); - if ("default-resource".equals(nodeLocalName)) { - serverSideConfig.defaultServerResource = ((Element)item).getAttribute("from"); - - } else if ("shared-pool".equals(nodeLocalName)) { - Element sharedPoolElement = (Element)item; - String poolName = sharedPoolElement.getAttribute("name"); // required - Attr fromAttr = sharedPoolElement.getAttributeNode("from"); // optional - String fromResource = (fromAttr == null ? null : fromAttr.getValue()); - Attr unitAttr = sharedPoolElement.getAttributeNode("unit"); // optional - default 'B' - String unit = (unitAttr == null ? "B" : unitAttr.getValue()); - MemoryUnit memoryUnit = MemoryUnit.valueOf(unit.toUpperCase(Locale.ENGLISH)); - - String quantityValue = sharedPoolElement.getFirstChild().getNodeValue(); - long quantity; - try { - quantity = Long.parseLong(quantityValue); - } catch (NumberFormatException e) { - throw new XmlConfigurationException("Magnitude of value specified for is too large"); - } - - Pool poolDefinition; - if (fromResource == null) { - poolDefinition = new Pool(memoryUnit.toBytes(quantity)); - } else { - poolDefinition = new Pool(memoryUnit.toBytes(quantity), fromResource); - } - - if (serverSideConfig.pools.put(poolName, poolDefinition) != null) { - throw new XmlConfigurationException("Duplicate definition for "); - } - } - } - } - return serverSideConfig; - } - - private static final class ServerSideConfig { - private boolean autoCreate = false; - private String defaultServerResource = null; - private final Map pools = new HashMap<>(); - } -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java deleted file mode 100644 index 51b5a90c62..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ /dev/null @@ -1,476 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.service; - -import org.ehcache.CachePersistenceException; -import org.ehcache.clustered.client.config.ClusteredResourcePool; -import org.ehcache.clustered.client.config.ClusteredResourceType; -import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; -import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntity; -import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityFactory; -import org.ehcache.clustered.client.internal.ClusterTierManagerCreationException; -import org.ehcache.clustered.client.internal.ClusterTierManagerNotFoundException; -import org.ehcache.clustered.client.internal.ClusterTierManagerValidationException; -import org.ehcache.clustered.client.config.Timeouts; -import org.ehcache.clustered.client.internal.store.ClusterTierClientEntity; -import org.ehcache.clustered.client.internal.store.EventualServerStoreProxy; -import org.ehcache.clustered.client.internal.store.ServerStoreProxy; -import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; -import org.ehcache.clustered.client.internal.store.StrongServerStoreProxy; -import org.ehcache.clustered.client.service.ClientEntityFactory; -import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.clustered.client.service.EntityBusyException; -import org.ehcache.clustered.client.service.EntityService; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.exceptions.DestroyInProgressException; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.ResourceType; -import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.persistence.StateRepository; -import org.ehcache.spi.service.MaintainableService; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceProvider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.terracotta.connection.Connection; -import org.terracotta.connection.ConnectionException; -import org.terracotta.connection.ConnectionFactory; -import org.terracotta.connection.ConnectionPropertyNames; -import org.terracotta.connection.entity.Entity; -import org.terracotta.exception.EntityAlreadyExistsException; -import org.terracotta.exception.EntityNotFoundException; - -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.Arrays; -import java.util.Properties; -import java.util.Random; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.TimeoutException; - -/** - * Provides support for accessing server-based cluster services. - */ -class DefaultClusteringService implements ClusteringService, EntityService { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultClusteringService.class); - - static final String CONNECTION_PREFIX = "Ehcache:"; - - private final ClusteringServiceConfiguration configuration; - private final URI clusterUri; - private final String entityIdentifier; - private final ConcurrentMap knownPersistenceSpaces = new ConcurrentHashMap<>(); - private final Timeouts timeouts; - - private volatile Connection clusterConnection; - private ClusterTierManagerClientEntityFactory entityFactory; - private ClusterTierManagerClientEntity entity; - private final ConcurrentMap clusterTierEntities = new ConcurrentHashMap<>(); - - private volatile boolean inMaintenance = false; - - DefaultClusteringService(final ClusteringServiceConfiguration configuration) { - this.configuration = configuration; - URI ehcacheUri = configuration.getClusterUri(); - this.clusterUri = extractClusterUri(ehcacheUri); - this.entityIdentifier = clusterUri.relativize(ehcacheUri).getPath(); - this.timeouts = configuration.getTimeouts(); - } - - private static URI extractClusterUri(URI uri) { - try { - return new URI(uri.getScheme(), uri.getAuthority(), null, null, null); - } catch (URISyntaxException e) { - throw new AssertionError(e); - } - } - - @Override - public ClusteringServiceConfiguration getConfiguration() { - return this.configuration; - } - - @Override - public ClientEntityFactory newClientEntityFactory(String entityIdentifier, Class entityType, long entityVersion, C configuration) { - return new AbstractClientEntityFactory(entityIdentifier, entityType, entityVersion, configuration) { - @Override - protected Connection getConnection() { - if (!isConnected()) { - throw new IllegalStateException(getClass().getSimpleName() + " not started."); - } - return clusterConnection; - } - }; - } - - @Override - public boolean isConnected() { - return clusterConnection != null; - } - - @Override - public void start(final ServiceProvider serviceProvider) { - initClusterConnection(); - createEntityFactory(); - try { - if (configuration.isAutoCreate()) { - entity = autoCreateEntity(); - } else { - try { - entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration()); - } catch (DestroyInProgressException | EntityNotFoundException e) { - throw new IllegalStateException("The cluster tier manager '" + entityIdentifier + "' does not exist." - + " Please review your configuration.", e); - } catch (TimeoutException e) { - throw new RuntimeException("Could not connect to the cluster tier manager '" + entityIdentifier - + "'; retrieve operation timed out", e); - } - } - } catch (RuntimeException e) { - entityFactory = null; - closeConnection(); - throw e; - } - } - - @Override - public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { - initClusterConnection(); - createEntityFactory(); - if(maintenanceScope == MaintenanceScope.CACHE_MANAGER) { - if (!entityFactory.acquireLeadership(entityIdentifier)) { - entityFactory = null; - closeConnection(); - throw new IllegalStateException("Couldn't acquire cluster-wide maintenance lease"); - } - } - inMaintenance = true; - } - - private void createEntityFactory() { - entityFactory = new ClusterTierManagerClientEntityFactory(clusterConnection, timeouts); - } - - private void initClusterConnection() { - try { - Properties properties = new Properties(); - properties.put(ConnectionPropertyNames.CONNECTION_NAME, CONNECTION_PREFIX + entityIdentifier); - properties.put(ConnectionPropertyNames.CONNECTION_TIMEOUT, Long.toString(timeouts.getConnectionTimeout().toMillis())); - clusterConnection = ConnectionFactory.connect(clusterUri, properties); - } catch (ConnectionException ex) { - throw new RuntimeException(ex); - } - } - - private ClusterTierManagerClientEntity autoCreateEntity() throws ClusterTierManagerValidationException, IllegalStateException { - while (true) { - try { - entityFactory.create(entityIdentifier, configuration.getServerConfiguration()); - } catch (ClusterTierManagerCreationException e) { - throw new IllegalStateException("Could not create the cluster tier manager '" + entityIdentifier + "'.", e); - } catch (EntityAlreadyExistsException | EntityBusyException e) { - //ignore - entity already exists - try to retrieve - } - try { - return entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration()); - } catch (DestroyInProgressException e) { - silentDestroy(); - } catch (EntityNotFoundException e) { - //ignore - loop and try to create - } catch (TimeoutException e) { - throw new RuntimeException("Could not connect to the cluster tier manager '" + entityIdentifier - + "'; retrieve operation timed out", e); - } - } - } - - private void silentDestroy() { - LOGGER.debug("Found a broken ClusterTierManager - trying to clean it up"); - try { - // Random sleep to enable racing clients to have a window to do the cleanup - Thread.sleep(new Random().nextInt(1000)); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - try { - entityFactory.destroy(entityIdentifier); - } catch (EntityBusyException e) { - // Ignore - we have a racy client - LOGGER.debug("ClusterTierManager {} marked busy when trying to clean it up", entityIdentifier); - } - } - - @Override - public void stop() { - LOGGER.info("Closing connection to cluster {}", this.clusterUri); - - /* - * Entity close() operations must *not* be called; if the server connection is disconnected, the entity - * close operations will stall attempting to communicate with the server. (EntityClientEndpointImpl.close() - * calls a "closeHook" method provided by ClientEntityManagerImpl which ultimately winds up in - * InFlightMessage.waitForAcks -- a method that can wait forever.) Theoretically, the connection close will - * take care of server-side cleanup in the event the server is connected. - */ - entityFactory = null; - inMaintenance = false; - - clusterTierEntities.clear(); - entity = null; - - closeConnection(); - } - - @Override - public void destroyAll() throws CachePersistenceException { - if (!inMaintenance) { - throw new IllegalStateException("Maintenance mode required"); - } - LOGGER.info("destroyAll called for cluster tiers on {}", this.clusterUri); - - try { - entityFactory.destroy(entityIdentifier); - } catch (EntityBusyException e) { - throw new CachePersistenceException("Can not delete cluster tiers on " + this.clusterUri, e); - } - } - - @Override - public boolean handlesResourceType(ResourceType resourceType) { - return (Arrays.asList(ClusteredResourceType.Types.values()).contains(resourceType)); - } - - @Override - public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration config) throws CachePersistenceException { - ClusteredSpace clusteredSpace = knownPersistenceSpaces.get(name); - if(clusteredSpace != null) { - return clusteredSpace.identifier; - } else { - ClusteredCacheIdentifier cacheIdentifier = new DefaultClusterCacheIdentifier(name); - clusteredSpace = knownPersistenceSpaces.putIfAbsent(name, new ClusteredSpace(cacheIdentifier)); - if(clusteredSpace == null) { - return cacheIdentifier; - } else { - return clusteredSpace.identifier; - } - } - } - - @Override - public void releasePersistenceSpaceIdentifier(PersistenceSpaceIdentifier identifier) throws CachePersistenceException { - ClusteredCacheIdentifier clusterCacheIdentifier = (ClusteredCacheIdentifier) identifier; - if (knownPersistenceSpaces.remove(clusterCacheIdentifier.getId()) == null) { - throw new CachePersistenceException("Unknown identifier: " + clusterCacheIdentifier); - } - } - - @Override - public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { - ClusteredCacheIdentifier clusterCacheIdentifier = (ClusteredCacheIdentifier) identifier; - ClusteredSpace clusteredSpace = knownPersistenceSpaces.get(clusterCacheIdentifier.getId()); - if (clusteredSpace == null) { - throw new CachePersistenceException("Clustered space not found for identifier: " + clusterCacheIdentifier); - } - ConcurrentMap stateRepositories = clusteredSpace.stateRepositories; - ClusterStateRepository currentRepo = stateRepositories.get(name); - if(currentRepo != null) { - return currentRepo; - } else { - ClusterStateRepository newRepo = new ClusterStateRepository(clusterCacheIdentifier, name, clusterTierEntities.get(clusterCacheIdentifier.getId())); - currentRepo = stateRepositories.putIfAbsent(name, newRepo); - if (currentRepo == null) { - return newRepo; - } else { - return currentRepo; - } - } - } - - private void checkStarted() { - if(!isStarted()) { - throw new IllegalStateException(getClass().getName() + " should be started to call destroy"); - } - } - - @Override - public void destroy(String name) throws CachePersistenceException { - checkStarted(); - - // will happen when in maintenance mode - if(entity == null) { - try { - entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration()); - } catch (EntityNotFoundException e) { - // No entity on the server, so no need to destroy anything - } catch (TimeoutException e) { - throw new CachePersistenceException("Could not connect to the cluster tier manager '" + entityIdentifier - + "'; retrieve operation timed out", e); - } catch (DestroyInProgressException e) { - silentDestroy(); - // Nothing left to do - return; - } - } - - try { - if (entity != null) { - entityFactory.destroyClusteredStoreEntity(entityIdentifier, name); - } - } catch (EntityNotFoundException e) { - // Ignore - does not exist, nothing to destroy - LOGGER.debug("Destruction of cluster tier {} failed as it does not exist", name); - } - } - - protected boolean isStarted() { - return entityFactory != null; - } - - @Override - public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifier cacheIdentifier, - final Store.Configuration storeConfig, - Consistency configuredConsistency, - ServerCallback invalidation) throws CachePersistenceException { - final String cacheId = cacheIdentifier.getId(); - - if (configuredConsistency == null) { - throw new NullPointerException("Consistency cannot be null"); - } - - /* - * This method is expected to be called with exactly ONE ClusteredResourcePool specified. - */ - ClusteredResourcePool clusteredResourcePool = null; - for (ClusteredResourceType type : ClusteredResourceType.Types.values()) { - ClusteredResourcePool pool = storeConfig.getResourcePools().getPoolForResource(type); - if (pool != null) { - if (clusteredResourcePool != null) { - throw new IllegalStateException("At most one clustered resource supported for a cache"); - } - clusteredResourcePool = pool; - } - } - if (clusteredResourcePool == null) { - throw new IllegalStateException("A clustered resource is required for a clustered cache"); - } - - final ServerStoreConfiguration clientStoreConfiguration = new ServerStoreConfiguration( - clusteredResourcePool.getPoolAllocation(), - storeConfig.getKeyType().getName(), - storeConfig.getValueType().getName(), - (storeConfig.getKeySerializer() == null ? null : storeConfig.getKeySerializer().getClass().getName()), - (storeConfig.getValueSerializer() == null ? null : storeConfig.getValueSerializer().getClass().getName()), - configuredConsistency - ); - - ClusterTierClientEntity storeClientEntity; - try { - storeClientEntity = entityFactory.fetchOrCreateClusteredStoreEntity(entityIdentifier, cacheId, - clientStoreConfiguration, configuration.isAutoCreate()); - clusterTierEntities.put(cacheId, storeClientEntity); - } catch (EntityNotFoundException e) { - throw new CachePersistenceException("Cluster tier proxy '" + cacheIdentifier.getId() + "' for entity '" + entityIdentifier + "' does not exist.", e); - } - - - ServerStoreProxy serverStoreProxy; - switch (configuredConsistency) { - case STRONG: - serverStoreProxy = new StrongServerStoreProxy(cacheId, storeClientEntity, invalidation); - break; - case EVENTUAL: - serverStoreProxy = new EventualServerStoreProxy(cacheId, storeClientEntity, invalidation); - break; - default: - throw new AssertionError("Unknown consistency : " + configuredConsistency); - } - - try { - storeClientEntity.validate(clientStoreConfiguration); - } catch (ClusterTierException e) { - serverStoreProxy.close(); - throw new CachePersistenceException("Unable to create cluster tier proxy '" + cacheIdentifier.getId() + "' for entity '" + entityIdentifier + "'", e); - } catch (TimeoutException e) { - serverStoreProxy.close(); - throw new CachePersistenceException("Unable to create cluster tier proxy '" - + cacheIdentifier.getId() + "' for entity '" + entityIdentifier - + "'; validate operation timed out", e); - } - - return serverStoreProxy; - } - - @Override - public void releaseServerStoreProxy(ServerStoreProxy storeProxy) { - clusterTierEntities.remove(storeProxy.getCacheId()); - storeProxy.close(); - } - - private void closeConnection() { - Connection conn = clusterConnection; - clusterConnection = null; - if(conn != null) { - try { - conn.close(); - } catch (IOException e) { - LOGGER.warn("Error closing cluster connection: " + e); - } - } - } - - /** - * Supplies the identifier to use for identifying a client-side cache to its server counterparts. - */ - private static class DefaultClusterCacheIdentifier implements ClusteredCacheIdentifier { - - private final String id; - - DefaultClusterCacheIdentifier(final String id) { - this.id = id; - } - - @Override - public String getId() { - return this.id; - } - - @Override - public Class getServiceType() { - return ClusteringService.class; - } - - @Override - public String toString() { - return getClass().getSimpleName() + "@" + id; - } - } - - private static class ClusteredSpace { - - private final ClusteredCacheIdentifier identifier; - private final ConcurrentMap stateRepositories; - - ClusteredSpace(final ClusteredCacheIdentifier identifier) { - this.identifier = identifier; - this.stateRepositories = new ConcurrentHashMap<>(); - } - } - -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ChainBuilder.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ChainBuilder.java deleted file mode 100644 index 068e7edc33..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ChainBuilder.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Util; - -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.List; - -/** - * Builds {@link Chain}s - */ -public class ChainBuilder { - - private List buffers = new ArrayList<>(); - - public ChainBuilder() { - } - - private ChainBuilder(List buffers) { - this.buffers = buffers; - } - - //TODO: optimize this & make this mutable - public ChainBuilder add(final ByteBuffer payload) { - List newList = new ArrayList<>(); - newList.addAll(this.buffers); - newList.add(payload); - return new ChainBuilder(newList); - } - - public Chain build() { - ByteBuffer[] elements = new ByteBuffer[buffers.size()]; - buffers.toArray(elements); - return Util.getChain(false, elements); - } - -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java deleted file mode 100644 index 44cd04eaa9..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ /dev/null @@ -1,819 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.Cache; -import org.ehcache.CachePersistenceException; -import org.ehcache.clustered.client.config.ClusteredResourceType; -import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; -import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; -import org.ehcache.clustered.client.internal.store.operations.ChainResolver; -import org.ehcache.clustered.client.internal.store.operations.EternalChainResolver; -import org.ehcache.clustered.client.internal.store.operations.ConditionalRemoveOperation; -import org.ehcache.clustered.client.internal.store.operations.ConditionalReplaceOperation; -import org.ehcache.clustered.client.internal.store.operations.ExpiryChainResolver; -import org.ehcache.clustered.client.internal.store.operations.PutIfAbsentOperation; -import org.ehcache.clustered.client.internal.store.operations.PutOperation; -import org.ehcache.clustered.client.internal.store.operations.RemoveOperation; -import org.ehcache.clustered.client.internal.store.operations.ReplaceOperation; -import org.ehcache.clustered.client.internal.store.operations.Result; -import org.ehcache.clustered.client.internal.store.operations.codecs.OperationsCodec; -import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.clustered.client.service.ClusteringService.ClusteredCacheIdentifier; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.config.ResourceType; -import org.ehcache.core.CacheConfigurationChangeListener; -import org.ehcache.core.Ehcache; -import org.ehcache.core.events.CacheEventListenerConfiguration; -import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessTimeoutException; -import org.ehcache.core.spi.store.events.StoreEventSource; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.core.spi.store.tiering.AuthoritativeTier; -import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; -import org.ehcache.core.statistics.StoreOperationOutcomes; -import org.ehcache.core.spi.time.TimeSource; -import org.ehcache.core.spi.time.TimeSourceService; -import org.ehcache.core.statistics.StoreOperationOutcomes.EvictionOutcome; -import org.ehcache.core.statistics.TierOperationOutcomes; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; -import org.ehcache.core.events.NullStoreEventDispatcher; -import org.ehcache.impl.store.HashUtils; -import org.ehcache.spi.persistence.StateRepository; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.spi.serialization.StatefulSerializer; -import org.ehcache.spi.service.ServiceDependencies; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.terracotta.statistics.MappedOperationStatistic; -import org.terracotta.statistics.StatisticsManager; -import org.terracotta.statistics.observer.OperationObserver; - -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.TimeoutException; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; - -import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; -import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; -import static org.terracotta.statistics.StatisticBuilder.operation; - -/** - * Supports a {@link Store} in a clustered environment. - */ -public class ClusteredStore implements AuthoritativeTier { - - private static final String STATISTICS_TAG = "Clustered"; - private static final int TIER_HEIGHT = ClusteredResourceType.Types.UNKNOWN.getTierHeight(); //TierHeight is the same for all ClusteredResourceType.Types - static final String CHAIN_COMPACTION_THRESHOLD_PROP = "ehcache.client.chain.compaction.threshold"; - static final int DEFAULT_CHAIN_COMPACTION_THRESHOLD = 4; - - private final int chainCompactionLimit; - private final OperationsCodec codec; - private final ChainResolver resolver; - - private final TimeSource timeSource; - - private volatile ServerStoreProxy storeProxy; - private volatile InvalidationValve invalidationValve; - - private final OperationObserver getObserver; - private final OperationObserver putObserver; - private final OperationObserver removeObserver; - private final OperationObserver putIfAbsentObserver; - private final OperationObserver conditionalRemoveObserver; - private final OperationObserver replaceObserver; - private final OperationObserver conditionalReplaceObserver; - // Needed for JSR-107 compatibility even if unused - private final OperationObserver evictionObserver; - private final OperationObserver getAndFaultObserver; - - - private ClusteredStore(final OperationsCodec codec, final ChainResolver resolver, TimeSource timeSource) { - this.chainCompactionLimit = Integer.getInteger(CHAIN_COMPACTION_THRESHOLD_PROP, DEFAULT_CHAIN_COMPACTION_THRESHOLD); - this.codec = codec; - this.resolver = resolver; - this.timeSource = timeSource; - - this.getObserver = operation(StoreOperationOutcomes.GetOutcome.class).of(this).named("get").tag(STATISTICS_TAG).build(); - this.putObserver = operation(StoreOperationOutcomes.PutOutcome.class).of(this).named("put").tag(STATISTICS_TAG).build(); - this.removeObserver = operation(StoreOperationOutcomes.RemoveOutcome.class).of(this).named("remove").tag(STATISTICS_TAG).build(); - this.putIfAbsentObserver = operation(StoreOperationOutcomes.PutIfAbsentOutcome.class).of(this).named("putIfAbsent").tag(STATISTICS_TAG).build(); - this.conditionalRemoveObserver = operation(StoreOperationOutcomes.ConditionalRemoveOutcome.class).of(this).named("conditionalRemove").tag(STATISTICS_TAG).build(); - this.replaceObserver = operation(StoreOperationOutcomes.ReplaceOutcome.class).of(this).named("replace").tag(STATISTICS_TAG).build(); - this.conditionalReplaceObserver = operation(StoreOperationOutcomes.ConditionalReplaceOutcome.class).of(this).named("conditionalReplace").tag(STATISTICS_TAG).build(); - this.evictionObserver = operation(StoreOperationOutcomes.EvictionOutcome.class).of(this).named("eviction").tag(STATISTICS_TAG).build(); - this.getAndFaultObserver = operation(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class).of(this).named("getAndFault").tag(STATISTICS_TAG).build(); - - Set tags = new HashSet<>(Arrays.asList(STATISTICS_TAG, "tier")); - StatisticsManager.createPassThroughStatistic(this, "mappings", tags, () -> -1L); - StatisticsManager.createPassThroughStatistic(this, "maxMappings", tags, () -> -1L); - StatisticsManager.createPassThroughStatistic(this, "allocatedMemory", tags, () -> -1L); - StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", tags, () -> -1L); - - } - - /** - * For tests - */ - ClusteredStore(OperationsCodec codec, EternalChainResolver resolver, ServerStoreProxy proxy, TimeSource timeSource) { - this(codec, resolver, timeSource); - this.storeProxy = proxy; - } - - @Override - public ValueHolder get(final K key) throws StoreAccessException { - getObserver.begin(); - ValueHolder value; - try { - value = getInternal(key); - } catch (TimeoutException e) { - getObserver.end(StoreOperationOutcomes.GetOutcome.TIMEOUT); - return null; - } - if(value == null) { - getObserver.end(StoreOperationOutcomes.GetOutcome.MISS); - return null; - } else { - getObserver.end(StoreOperationOutcomes.GetOutcome.HIT); - return value; - } - } - - private ValueHolder getInternal(K key) throws StoreAccessException, TimeoutException { - ClusteredValueHolder holder = null; - try { - Chain chain = storeProxy.get(extractLongKey(key)); - if(!chain.isEmpty()) { - ResolvedChain resolvedChain = resolver.resolve(chain, key, timeSource.getTimeMillis()); - - if (resolvedChain.isCompacted()) { - Chain compactedChain = resolvedChain.getCompactedChain(); - storeProxy.replaceAtHead(extractLongKey(key), chain, compactedChain); - } - - Result resolvedResult = resolvedChain.getResolvedResult(key); - if (resolvedResult != null) { - V value = resolvedResult.getValue(); - long expirationTime = resolvedChain.getExpirationTime(); - if (expirationTime == Long.MAX_VALUE) { - holder = new ClusteredValueHolder<>(value); - } else { - holder = new ClusteredValueHolder<>(value, expirationTime); - } - } - } - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } - return holder; - } - - private long extractLongKey(K key) { - return HashUtils.intHashToLong(key.hashCode()); - } - - @Override - public boolean containsKey(final K key) throws StoreAccessException { - try { - return getInternal(key) != null; - } catch (TimeoutException e) { - return false; - } - } - - @Override - public PutStatus put(final K key, final V value) throws StoreAccessException { - putObserver.begin(); - PutStatus status = silentPut(key, value); - switch (status) { - case PUT: - putObserver.end(StoreOperationOutcomes.PutOutcome.PUT); - break; - case NOOP: - putObserver.end(StoreOperationOutcomes.PutOutcome.NOOP); - break; - default: - throw new AssertionError("Invalid put status: " + status); - } - return status; - } - - private PutStatus silentPut(final K key, final V value) throws StoreAccessException { - try { - PutOperation operation = new PutOperation<>(key, value, timeSource.getTimeMillis()); - ByteBuffer payload = codec.encode(operation); - long extractedKey = extractLongKey(key); - storeProxy.append(extractedKey, payload); - return PutStatus.PUT; - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } catch (TimeoutException e) { - throw new StoreAccessTimeoutException(e); - } - } - - @Override - public ValueHolder putIfAbsent(final K key, final V value) throws StoreAccessException { - putIfAbsentObserver.begin(); - try { - PutIfAbsentOperation operation = new PutIfAbsentOperation<>(key, value, timeSource.getTimeMillis()); - ByteBuffer payload = codec.encode(operation); - long extractedKey = extractLongKey(key); - Chain chain = storeProxy.getAndAppend(extractedKey, payload); - ResolvedChain resolvedChain = resolver.resolve(chain, key, timeSource.getTimeMillis()); - - if (resolvedChain.getCompactionCount() > chainCompactionLimit) { - Chain compactedChain = resolvedChain.getCompactedChain(); - storeProxy.replaceAtHead(extractedKey, chain, compactedChain); - } - - Result result = resolvedChain.getResolvedResult(key); - if(result == null) { - putIfAbsentObserver.end(StoreOperationOutcomes.PutIfAbsentOutcome.PUT); - return null; - } else { - putIfAbsentObserver.end(StoreOperationOutcomes.PutIfAbsentOutcome.HIT); - return new ClusteredValueHolder<>(result.getValue()); - } - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } catch (TimeoutException e) { - throw new StoreAccessTimeoutException(e); - } - } - - @Override - public boolean remove(final K key) throws StoreAccessException { - removeObserver.begin(); - if(silentRemove(key)) { - removeObserver.end(StoreOperationOutcomes.RemoveOutcome.REMOVED); - return true; - } else { - removeObserver.end(StoreOperationOutcomes.RemoveOutcome.MISS); - return false; - } - } - - private boolean silentRemove(final K key) throws StoreAccessException { - try { - RemoveOperation operation = new RemoveOperation<>(key, timeSource.getTimeMillis()); - ByteBuffer payload = codec.encode(operation); - long extractedKey = extractLongKey(key); - Chain chain = storeProxy.getAndAppend(extractedKey, payload); - ResolvedChain resolvedChain = resolver.resolve(chain, key, timeSource.getTimeMillis()); - - if(resolvedChain.getResolvedResult(key) != null) { - storeProxy.replaceAtHead(extractedKey, chain, resolvedChain.getCompactedChain()); - return true; - } else { - return false; - } - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } catch (TimeoutException e) { - throw new StoreAccessTimeoutException(e); - } - } - - @Override - public RemoveStatus remove(final K key, final V value) throws StoreAccessException { - conditionalRemoveObserver.begin(); - try { - ConditionalRemoveOperation operation = new ConditionalRemoveOperation<>(key, value, timeSource.getTimeMillis()); - ByteBuffer payload = codec.encode(operation); - long extractedKey = extractLongKey(key); - Chain chain = storeProxy.getAndAppend(extractedKey, payload); - ResolvedChain resolvedChain = resolver.resolve(chain, key, timeSource.getTimeMillis()); - - Result result = resolvedChain.getResolvedResult(key); - if(result != null) { - if(value.equals(result.getValue())) { - storeProxy.replaceAtHead(extractedKey, chain, resolvedChain.getCompactedChain()); - - conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED); - return RemoveStatus.REMOVED; - } else { - conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS); - return RemoveStatus.KEY_PRESENT; - } - } else { - conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS); - return RemoveStatus.KEY_MISSING; - } - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } catch (TimeoutException e) { - throw new StoreAccessTimeoutException(e); - } - } - - @Override - public ValueHolder replace(final K key, final V value) throws StoreAccessException { - replaceObserver.begin(); - try { - ReplaceOperation operation = new ReplaceOperation<>(key, value, timeSource.getTimeMillis()); - ByteBuffer payload = codec.encode(operation); - long extractedKey = extractLongKey(key); - Chain chain = storeProxy.getAndAppend(extractedKey, payload); - ResolvedChain resolvedChain = resolver.resolve(chain, key, timeSource.getTimeMillis()); - - if (resolvedChain.getCompactionCount() > chainCompactionLimit) { - Chain compactedChain = resolvedChain.getCompactedChain(); - storeProxy.replaceAtHead(extractedKey, chain, compactedChain); - } - - Result result = resolvedChain.getResolvedResult(key); - if(result == null) { - replaceObserver.end(StoreOperationOutcomes.ReplaceOutcome.MISS); - return null; - } else { - replaceObserver.end(StoreOperationOutcomes.ReplaceOutcome.REPLACED); - return new ClusteredValueHolder<>(result.getValue()); - } - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } catch (TimeoutException e) { - throw new StoreAccessTimeoutException(e); - } - } - - @Override - public ReplaceStatus replace(final K key, final V oldValue, final V newValue) throws StoreAccessException { - conditionalReplaceObserver.begin(); - try { - ConditionalReplaceOperation operation = new ConditionalReplaceOperation<>(key, oldValue, newValue, timeSource - .getTimeMillis()); - ByteBuffer payload = codec.encode(operation); - long extractedKey = extractLongKey(key); - Chain chain = storeProxy.getAndAppend(extractedKey, payload); - ResolvedChain resolvedChain = resolver.resolve(chain, key, timeSource.getTimeMillis()); - - if (resolvedChain.getCompactionCount() > chainCompactionLimit) { - Chain compactedChain = resolvedChain.getCompactedChain(); - storeProxy.replaceAtHead(extractedKey, chain, compactedChain); - } - - Result result = resolvedChain.getResolvedResult(key); - if(result != null) { - if(oldValue.equals(result.getValue())) { - conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED); - return ReplaceStatus.HIT; - } else { - conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS); - return ReplaceStatus.MISS_PRESENT; - } - } else { - conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS); - return ReplaceStatus.MISS_NOT_PRESENT; - } - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } catch (TimeoutException e) { - throw new StoreAccessTimeoutException(e); - } - } - - @Override - public void clear() throws StoreAccessException { - try { - storeProxy.clear(); - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } catch (TimeoutException e) { - throw new StoreAccessTimeoutException(e); - } - } - - @Override - public StoreEventSource getStoreEventSource() { - // TODO: Is there a StoreEventSource for a ServerStore? - return new NullStoreEventDispatcher<>(); - } - - @Override - public Iterator>> iterator() { - // TODO: Make appropriate ServerStoreProxy call - throw new UnsupportedOperationException("Implement me"); - } - - @Override - public ValueHolder compute(final K key, final BiFunction mappingFunction) - throws StoreAccessException { - // TODO: Make appropriate ServerStoreProxy call - throw new UnsupportedOperationException("Implement me"); - } - - @Override - public ValueHolder compute(final K key, final BiFunction mappingFunction, final Supplier replaceEqual) - throws StoreAccessException { - // TODO: Make appropriate ServerStoreProxy call - throw new UnsupportedOperationException("Implement me"); - } - - @Override - public ValueHolder computeIfAbsent(final K key, final Function mappingFunction) - throws StoreAccessException { - // TODO: Make appropriate ServerStoreProxy call - throw new UnsupportedOperationException("Implement me"); - } - - /** - * The assumption is that this method will be invoked only by cache.putAll and cache.removeAll methods. - */ - @Override - public Map> bulkCompute(final Set keys, final Function>, Iterable>> remappingFunction) - throws StoreAccessException { - Map> valueHolderMap = new HashMap<>(); - if(remappingFunction instanceof Ehcache.PutAllFunction) { - Ehcache.PutAllFunction putAllFunction = (Ehcache.PutAllFunction)remappingFunction; - Map entriesToRemap = putAllFunction.getEntriesToRemap(); - for(Map.Entry entry: entriesToRemap.entrySet()) { - PutStatus putStatus = silentPut(entry.getKey(), entry.getValue()); - if(putStatus == PutStatus.PUT) { - putAllFunction.getActualPutCount().incrementAndGet(); - valueHolderMap.put(entry.getKey(), new ClusteredValueHolder<>(entry.getValue())); - } - } - } else if(remappingFunction instanceof Ehcache.RemoveAllFunction) { - Ehcache.RemoveAllFunction removeAllFunction = (Ehcache.RemoveAllFunction)remappingFunction; - for (K key : keys) { - boolean removed = silentRemove(key); - if(removed) { - removeAllFunction.getActualRemoveCount().incrementAndGet(); - } - } - } else { - throw new UnsupportedOperationException("This compute method is not yet capable of handling generic computation functions"); - } - return valueHolderMap; - } - - @Override - public Map> bulkCompute(final Set keys, final Function>, Iterable>> remappingFunction, final Supplier replaceEqual) - throws StoreAccessException { - // TODO: Make appropriate ServerStoreProxy call - throw new UnsupportedOperationException("Implement me"); - } - - /** - * The assumption is that this method will be invoked only by cache.getAll method. - */ - @Override - public Map> bulkComputeIfAbsent(final Set keys, final Function, Iterable>> mappingFunction) - throws StoreAccessException { - if(mappingFunction instanceof Ehcache.GetAllFunction) { - Map> map = new HashMap<>(); - for (K key : keys) { - ValueHolder value; - try { - value = getInternal(key); - } catch (TimeoutException e) { - // This timeout handling is safe **only** in the context of a get/read operation! - value = null; - } - ValueHolder holder = (value != null) ? value : null; - map.put(key, holder); - } - return map; - } else { - throw new UnsupportedOperationException("This compute method is not yet capable of handling generic computation functions"); - } - } - - @Override - public List getConfigurationChangeListeners() { - // TODO: Make appropriate ServerStoreProxy call - return Collections.emptyList(); - } - - @Override - public ValueHolder getAndFault(K key) throws StoreAccessException { - getAndFaultObserver.begin(); - ValueHolder value; - try { - value = getInternal(key); - } catch (TimeoutException e) { - getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT); - return null; - } - if(value == null) { - getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); - return null; - } else { - getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT); - return value; - } - } - - @Override - public ValueHolder computeIfAbsentAndFault(K key, Function mappingFunction) throws StoreAccessException { - return computeIfAbsent(key, mappingFunction); - } - - @Override - public boolean flush(K key, ValueHolder valueHolder) { - // TODO wire this once metadata are maintained - return true; - } - - @Override - public void setInvalidationValve(InvalidationValve valve) { - this.invalidationValve = valve; - } - - - /** - * Provider of {@link ClusteredStore} instances. - */ - @ServiceDependencies({TimeSourceService.class, ClusteringService.class}) - public static class Provider implements Store.Provider, AuthoritativeTier.Provider { - - private static final Logger LOGGER = LoggerFactory.getLogger(Provider.class); - - private static final Set> CLUSTER_RESOURCES; - static { - Set> resourceTypes = new HashSet<>(); - Collections.addAll(resourceTypes, ClusteredResourceType.Types.values()); - CLUSTER_RESOURCES = Collections.unmodifiableSet(resourceTypes); - } - - private volatile ServiceProvider serviceProvider; - private volatile ClusteringService clusteringService; - - private final Map, StoreConfig> createdStores = new ConcurrentWeakIdentityHashMap<>(); - private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap<>(); - - @Override - public ClusteredStore createStore(final Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { - ClusteredStore store = createStoreInternal(storeConfig, serviceConfigs); - Collection> tieredOps = new ArrayList<>(); - - MappedOperationStatistic get = - new MappedOperationStatistic<>( - store, TierOperationOutcomes.GET_TRANSLATION, "get", TIER_HEIGHT, "get", STATISTICS_TAG); - StatisticsManager.associate(get).withParent(store); - tieredOps.add(get); - - MappedOperationStatistic evict = - new MappedOperationStatistic<>( - store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG); - StatisticsManager.associate(evict).withParent(store); - tieredOps.add(evict); - - tierOperationStatistics.put(store, tieredOps); - return store; - } - - private ClusteredStore createStoreInternal(Configuration storeConfig, Object[] serviceConfigs) { - DefaultCacheLoaderWriterConfiguration loaderWriterConfiguration = findSingletonAmongst(DefaultCacheLoaderWriterConfiguration.class, serviceConfigs); - if (loaderWriterConfiguration != null) { - throw new IllegalStateException("CacheLoaderWriter is not supported with clustered tiers"); - } - - CacheEventListenerConfiguration eventListenerConfiguration = findSingletonAmongst(CacheEventListenerConfiguration.class, serviceConfigs); - if (eventListenerConfiguration != null) { - throw new IllegalStateException("CacheEventListener is not supported with clustered tiers"); - } - - if (clusteringService == null) { - throw new IllegalStateException(Provider.class.getCanonicalName() + ".createStore called without ClusteringServiceConfiguration"); - } - - final HashSet> clusteredResourceTypes = - new HashSet<>(storeConfig.getResourcePools().getResourceTypeSet()); - clusteredResourceTypes.retainAll(CLUSTER_RESOURCES); - - if (clusteredResourceTypes.isEmpty()) { - throw new IllegalStateException(Provider.class.getCanonicalName() + ".createStore called without ClusteredResourcePools"); - } - if (clusteredResourceTypes.size() != 1) { - throw new IllegalStateException(Provider.class.getCanonicalName() + ".createStore can not create clustered tier with multiple clustered resources"); - } - - ClusteredStoreConfiguration clusteredStoreConfiguration = findSingletonAmongst(ClusteredStoreConfiguration.class, serviceConfigs); - if (clusteredStoreConfiguration == null) { - clusteredStoreConfiguration = new ClusteredStoreConfiguration(); - } - ClusteredCacheIdentifier cacheId = findSingletonAmongst(ClusteredCacheIdentifier.class, serviceConfigs); - - TimeSource timeSource = serviceProvider.getService(TimeSourceService.class).getTimeSource(); - - OperationsCodec codec = new OperationsCodec<>(storeConfig.getKeySerializer(), storeConfig.getValueSerializer()); - - ChainResolver resolver; - Expiry expiry = storeConfig.getExpiry(); - if (Expirations.noExpiration().equals(expiry)) { - resolver = new EternalChainResolver<>(codec); - } else { - resolver = new ExpiryChainResolver<>(codec, expiry); - } - - - ClusteredStore store = new ClusteredStore<>(codec, resolver, timeSource); - - createdStores.put(store, new StoreConfig(cacheId, storeConfig, clusteredStoreConfiguration.getConsistency())); - return store; - } - - @Override - public void releaseStore(final Store resource) { - if (createdStores.remove(resource) == null) { - throw new IllegalArgumentException("Given clustered tier is not managed by this provider : " + resource); - } - ClusteredStore clusteredStore = (ClusteredStore)resource; - this.clusteringService.releaseServerStoreProxy(clusteredStore.storeProxy); - StatisticsManager.nodeFor(clusteredStore).clean(); - tierOperationStatistics.remove(clusteredStore); - } - - @Override - public void initStore(final Store resource) { - StoreConfig storeConfig = createdStores.get(resource); - if (storeConfig == null) { - throw new IllegalArgumentException("Given clustered tier is not managed by this provider : " + resource); - } - final ClusteredStore clusteredStore = (ClusteredStore) resource; - ClusteredCacheIdentifier cacheIdentifier = storeConfig.getCacheIdentifier(); - try { - clusteredStore.storeProxy = clusteringService.getServerStoreProxy(cacheIdentifier, storeConfig.getStoreConfig(), storeConfig.getConsistency(), - new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - EvictionOutcome result = EvictionOutcome.SUCCESS; - clusteredStore.evictionObserver.begin(); - if (clusteredStore.invalidationValve != null) { - try { - LOGGER.debug("CLIENT: calling invalidation valve for hash {}", hash); - clusteredStore.invalidationValve.invalidateAllWithHash(hash); - } catch (StoreAccessException sae) { - //TODO: what should be done here? delegate to resilience strategy? - LOGGER.error("Error invalidating hash {}", hash, sae); - result = StoreOperationOutcomes.EvictionOutcome.FAILURE; - } - } - clusteredStore.evictionObserver.end(result); - } - - @Override - public void onInvalidateAll() { - if (clusteredStore.invalidationValve != null) { - try { - LOGGER.debug("CLIENT: calling invalidation valve for all"); - clusteredStore.invalidationValve.invalidateAll(); - } catch (StoreAccessException sae) { - //TODO: what should be done here? delegate to resilience strategy? - LOGGER.error("Error invalidating all", sae); - } - } - } - - @Override - public Chain compact(Chain chain) { - return clusteredStore.resolver.applyOperation(chain, clusteredStore.timeSource.getTimeMillis()); - } - }); - } catch (CachePersistenceException e) { - throw new RuntimeException("Unable to create cluster tier proxy - " + cacheIdentifier, e); - } - - Serializer keySerializer = clusteredStore.codec.getKeySerializer(); - if (keySerializer instanceof StatefulSerializer) { - StateRepository stateRepository = null; - try { - stateRepository = clusteringService.getStateRepositoryWithin(cacheIdentifier, cacheIdentifier.getId() + "-Key"); - } catch (CachePersistenceException e) { - throw new RuntimeException(e); - } - ((StatefulSerializer)keySerializer).init(stateRepository); - } - Serializer valueSerializer = clusteredStore.codec.getValueSerializer(); - if (valueSerializer instanceof StatefulSerializer) { - StateRepository stateRepository = null; - try { - stateRepository = clusteringService.getStateRepositoryWithin(cacheIdentifier, cacheIdentifier.getId() + "-Value"); - } catch (CachePersistenceException e) { - throw new RuntimeException(e); - } - ((StatefulSerializer)valueSerializer).init(stateRepository); - } - } - - @Override - public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { - if (clusteringService == null || resourceTypes.size() > 1 || Collections.disjoint(resourceTypes, CLUSTER_RESOURCES)) { - // A ClusteredStore requires a ClusteringService *and* ClusteredResourcePool instances - return 0; - } - return 1; - } - - @Override - public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { - if (clusteringService == null) { - return 0; - } else { - return CLUSTER_RESOURCES.contains(authorityResource) ? 1 : 0; - } - } - - @Override - public void start(final ServiceProvider serviceProvider) { - this.serviceProvider = serviceProvider; - this.clusteringService = this.serviceProvider.getService(ClusteringService.class); - } - - @Override - public void stop() { - this.serviceProvider = null; - createdStores.clear(); - } - - @Override - public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - ClusteredStore authoritativeTier = createStoreInternal(storeConfig, serviceConfigs); - Collection> tieredOps = new ArrayList<>(); - - MappedOperationStatistic get = - new MappedOperationStatistic<>( - authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", TIER_HEIGHT, "getAndFault", STATISTICS_TAG); - StatisticsManager.associate(get).withParent(authoritativeTier); - tieredOps.add(get); - - MappedOperationStatistic evict = - new MappedOperationStatistic<>( - authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG); - StatisticsManager.associate(evict).withParent(authoritativeTier); - tieredOps.add(evict); - - tierOperationStatistics.put(authoritativeTier, tieredOps); - return authoritativeTier; - } - - @Override - public void releaseAuthoritativeTier(AuthoritativeTier resource) { - releaseStore(resource); - } - - @Override - public void initAuthoritativeTier(AuthoritativeTier resource) { - initStore(resource); - } - } - - private static class StoreConfig { - - private final ClusteredCacheIdentifier cacheIdentifier; - private final Store.Configuration storeConfig; - private final Consistency consistency; - - StoreConfig(ClusteredCacheIdentifier cacheIdentifier, Configuration storeConfig, Consistency consistency) { - this.cacheIdentifier = cacheIdentifier; - this.storeConfig = storeConfig; - this.consistency = consistency; - } - - public Configuration getStoreConfig() { - return this.storeConfig; - } - - public ClusteredCacheIdentifier getCacheIdentifier() { - return this.cacheIdentifier; - } - - public Consistency getConsistency() { - return consistency; - } - } -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java deleted file mode 100644 index 72bf749aa7..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.client.internal.store.ClusterTierClientEntity.ResponseListener; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ClientInvalidateAll; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ClientInvalidateHash; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ServerInvalidateHash; -import org.ehcache.clustered.common.internal.messages.EhcacheResponseType; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.AppendMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAck; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAllAck; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetAndAppendMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ReplaceAtHeadMessage; -import org.ehcache.clustered.common.internal.store.Chain; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.nio.ByteBuffer; -import java.util.Objects; -import java.util.concurrent.TimeoutException; - -import static java.util.Objects.requireNonNull; - -/** - * Provides client-side access to the services of a {@code ServerStore}. - */ -class CommonServerStoreProxy implements ServerStoreProxy { - - private static final Logger LOGGER = LoggerFactory.getLogger(CommonServerStoreProxy.class); - - private final String cacheId; - private final ClusterTierClientEntity entity; - - CommonServerStoreProxy(final String cacheId, final ClusterTierClientEntity entity, final ServerCallback invalidation) { - this.cacheId = requireNonNull(cacheId, "Cache-ID must be non-null"); - this.entity = requireNonNull(entity, "ClusterTierClientEntity must be non-null"); - requireNonNull(invalidation, "ServerCallback must be non-null"); - - entity.addResponseListener(ServerInvalidateHash.class, response -> { - long key = response.getKey(); - LOGGER.debug("CLIENT: on cache {}, server requesting hash {} to be invalidated", cacheId, key); - invalidation.onInvalidateHash(key); - }); - entity.addResponseListener(ClientInvalidateHash.class, response -> { - long key = response.getKey(); - int invalidationId = response.getInvalidationId(); - - LOGGER.debug("CLIENT: doing work to invalidate hash {} from cache {} (ID {})", key, cacheId, invalidationId); - invalidation.onInvalidateHash(key); - - try { - LOGGER.debug("CLIENT: ack'ing invalidation of hash {} from cache {} (ID {})", key, cacheId, invalidationId); - entity.invokeAndWaitForSend(new ClientInvalidationAck(key, invalidationId), false); - } catch (Exception e) { - //TODO: what should be done here? - LOGGER.error("error acking client invalidation of hash {} on cache {}", key, cacheId, e); - } - }); - entity.addResponseListener(ClientInvalidateAll.class, response -> { - int invalidationId = response.getInvalidationId(); - - LOGGER.debug("CLIENT: doing work to invalidate all from cache {} (ID {})", cacheId, invalidationId); - invalidation.onInvalidateAll(); - - try { - LOGGER.debug("CLIENT: ack'ing invalidation of all from cache {} (ID {})", cacheId, invalidationId); - entity.invokeAndWaitForSend(new ClientInvalidationAllAck(invalidationId), false); - } catch (Exception e) { - //TODO: what should be done here? - LOGGER.error("error acking client invalidation of all on cache {}", cacheId, e); - } - }); - } - - @Override - public String getCacheId() { - return cacheId; - } - - void addResponseListener(Class listenerClass, SimpleClusterTierClientEntity.ResponseListener listener) { - entity.addResponseListener(listenerClass, listener); - } - - @SuppressWarnings("unchecked") - @Override - public void close() { - entity.close(); - } - - @Override - public Chain get(long key) throws TimeoutException { - EhcacheEntityResponse response; - try { - response = entity.invokeAndWaitForComplete(new GetMessage(key), false); - } catch (TimeoutException e) { - throw e; - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - if (response != null && response.getResponseType() == EhcacheResponseType.GET_RESPONSE) { - return ((EhcacheEntityResponse.GetResponse)response).getChain(); - } else { - throw new ServerStoreProxyException("Response for get operation was invalid : " + - (response != null ? response.getResponseType() : "null message")); - } - } - - @Override - public void append(long key, ByteBuffer payLoad) throws TimeoutException { - try { - entity.invokeAndWaitForReceive(new AppendMessage(key, payLoad), true); - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - } - - @Override - public Chain getAndAppend(long key, ByteBuffer payLoad) throws TimeoutException { - EhcacheEntityResponse response; - try { - response = entity.invokeAndWaitForRetired(new GetAndAppendMessage(key, payLoad), true); - } catch (TimeoutException e) { - throw e; - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - if (response != null && response.getResponseType() == EhcacheResponseType.GET_RESPONSE) { - return ((EhcacheEntityResponse.GetResponse)response).getChain(); - } else { - throw new ServerStoreProxyException("Response for getAndAppend operation was invalid : " + - (response != null ? response.getResponseType() : "null message")); - } - } - - @Override - public void replaceAtHead(long key, Chain expect, Chain update) { - // TODO: Optimize this method to just send sequences for expect Chain - try { - entity.invokeAndWaitForSend(new ReplaceAtHeadMessage(key, expect, update), false); - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - } - - @Override - public void clear() throws TimeoutException { - try { - entity.invokeAndWaitForRetired(new ServerStoreOpMessage.ClearMessage(), true); - } catch (TimeoutException e) { - throw e; - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - } -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/InternalClusterTierClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/InternalClusterTierClientEntity.java deleted file mode 100644 index d7fd0b1625..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/InternalClusterTierClientEntity.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.client.config.Timeouts; - -/** - * InternalClusterTierClientEntity - */ -public interface InternalClusterTierClientEntity extends ClusterTierClientEntity { - - void setTimeouts(Timeouts timeouts); - - void setStoreIdentifier(String storeIdentifier); - -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ResolvedChain.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ResolvedChain.java deleted file mode 100644 index 6e3aed757d..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ResolvedChain.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.client.internal.store.operations.Result; -import org.ehcache.clustered.common.internal.store.Chain; - -import java.util.Collections; -import java.util.Map; - -/** - * Represents the result of a {@link Chain} resolution. - * Implementors would be wrappers over the compacted chain and the resolved operations. - * A resolver may or may not have resolved all the different keys in a chain. - * - * @param the Key type - */ -public interface ResolvedChain { - - Chain getCompactedChain(); - - Result getResolvedResult(K key); - - /** - * Indicates whether the {@link #getCompactedChain()} is effectively compacted - * compared to the original chain it was built from. - * - * @return {@code true} if the chain has been compacted during resolution, {@code false} otherwise - */ - boolean isCompacted(); - - /** - * @return the number of chain elements that were compacted if there was any compaction - */ - int getCompactionCount(); - - /** - * @return the unix epoch at which the entry should expire - */ - long getExpirationTime(); - - /** - * Represents the {@link ResolvedChain} result of a resolver that resolves - * all the keys in a {@link Chain} - */ - class Impl implements ResolvedChain { - - private final Chain compactedChain; - private final Map> resolvedOperations; - private final int compactionCount; - private final long expirationTime; - - public Impl(Chain compactedChain, Map> resolvedOperations, int compactionCount, long expirationTime) { - this.compactedChain = compactedChain; - this.resolvedOperations = resolvedOperations; - this.compactionCount = compactionCount; - this.expirationTime = expirationTime; - } - - public Impl(Chain compactedChain, K key, Result result, int compactedSize, long expirationTime) { - this(compactedChain, Collections.singletonMap(key, result), compactedSize, expirationTime); - } - - public Chain getCompactedChain() { - return this.compactedChain; - } - - public Result getResolvedResult(K key) { - return resolvedOperations.get(key); - } - - @Override - public boolean isCompacted() { - return compactionCount > 0; - } - - public int getCompactionCount() { - return compactionCount; - } - - @Override - public long getExpirationTime() { - return expirationTime; - } - } -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java deleted file mode 100644 index 355aaccbfb..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.ServerStore; - -/** - * @author Ludovic Orban - */ -public interface ServerStoreProxy extends ServerStore { - - /** - * The invalidation listener - */ - interface ServerCallback { - /** - * Callback for invalidation of hash requests - * - * @param hash the hash of the keys to invalidate - */ - void onInvalidateHash(long hash); - - /** - * Callback for invalidation of all requests - */ - void onInvalidateAll(); - - Chain compact(Chain chain); - } - - /** - * Gets the identifier linking a client-side cache to a {@code ServerStore} instance. - * - * @return the cache identifier - */ - String getCacheId(); - - /** - * Closes this proxy. - */ - void close(); - -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java deleted file mode 100644 index 8d1457b1a6..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store.operations; - -import org.ehcache.clustered.client.internal.store.ChainBuilder; -import org.ehcache.clustered.client.internal.store.ResolvedChain; -import org.ehcache.clustered.client.internal.store.operations.codecs.OperationsCodec; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -/** - * An abstract chain resolver. - *

- * Operation application is performed in subclasses specialized for eternal and non-eternal caches. - * - * @see EternalChainResolver - * @see ExpiryChainResolver - * - * @param key type - * @param value type - */ -public abstract class ChainResolver { - protected static final Logger LOG = LoggerFactory.getLogger(EternalChainResolver.class); - protected static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; - protected final OperationsCodec codec; - - public ChainResolver(final OperationsCodec codec) { - this.codec = codec; - } - - /** - * Extract the {@code Element}s from the provided {@code Chain} that are not associated with the provided key - * and create a new {@code Chain} - * - * Separate the {@code Element}s from the provided {@code Chain} that are associated and not associated with - * the provided key. Create a new chain with the unassociated {@code Element}s. Resolve the associated elements - * and append the resolved {@code Element} to the newly created chain. - * - * @param chain a heterogeneous {@code Chain} - * @param key a key - * @param now time when the chain is being resolved - * @return a resolved chain, result of resolution of chain provided - */ - public ResolvedChain resolve(Chain chain, K key, long now) { - PutOperation result = null; - ChainBuilder newChainBuilder = new ChainBuilder(); - boolean matched = false; - for (Element element : chain) { - ByteBuffer payload = element.getPayload(); - Operation operation = codec.decode(payload); - - if(key.equals(operation.getKey())) { - matched = true; - result = applyOperation(key, result, operation, now); - } else { - payload.rewind(); - newChainBuilder = newChainBuilder.add(payload); - } - } - - if(result == null) { - if (matched) { - Chain newChain = newChainBuilder.build(); - return new ResolvedChain.Impl<>(newChain, key, null, chain.length() - newChain.length(), Long.MAX_VALUE); - } else { - return new ResolvedChain.Impl<>(chain, key, null, 0, Long.MAX_VALUE); - } - } else { - Chain newChain = newChainBuilder.add(codec.encode(result)).build(); - return new ResolvedChain.Impl<>(newChain, key, result, chain.length() - newChain.length(), result.expirationTime()); - } - } - - /** - * Compacts the given chain by resolving every key within. - * - * @param chain a compacted heterogenous {@code Chain} - * @param now time when the chain is being resolved - * @return a compacted chain - */ - public Chain applyOperation(Chain chain, long now) { - //absent hash-collisions this should always be a 1 entry map - Map> compacted = new HashMap<>(2); - for (Element element : chain) { - ByteBuffer payload = element.getPayload(); - Operation operation = codec.decode(payload); - compacted.compute(operation.getKey(), (k, v) -> applyOperation(k, v, operation, now)); - } - - ChainBuilder builder = new ChainBuilder(); - for (PutOperation operation : compacted.values()) { - builder = builder.add(codec.encode(operation)); - } - return builder.build(); - } - - /** - * Applies the given operation to the current state at the time specified. - * - * @param key cache key - * @param existing current state - * @param operation operation to apply - * @param now current time - * @return an equivalent put operation - */ - protected abstract PutOperation applyOperation(K key, PutOperation existing, Operation operation, long now); -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/EternalChainResolver.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/EternalChainResolver.java deleted file mode 100644 index 9868fd5c8a..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/EternalChainResolver.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store.operations; - -import org.ehcache.clustered.client.internal.store.operations.codecs.OperationsCodec; - -import static java.lang.Math.max; - -/** - * A specialized chain resolver for eternal caches. - * - * @see org.ehcache.expiry.Expirations#noExpiration() - * - * @param key type - * @param value type - */ -public class EternalChainResolver extends ChainResolver { - - public EternalChainResolver(final OperationsCodec codec) { - super(codec); - } - - /** - * Applies the given operation returning a result that never expires. - * - * @param key cache key - * @param existing current state - * @param operation operation to apply - * @param now current time - * @return the equivalent put operation - */ - protected PutOperation applyOperation(K key, PutOperation existing, Operation operation, long now) { - final Result newValue = operation.apply(existing); - if (newValue == null) { - return null; - } else { - return newValue.asOperationExpiringAt(Long.MAX_VALUE); - } - } -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ExpiryChainResolver.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ExpiryChainResolver.java deleted file mode 100644 index fd23dce9bf..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ExpiryChainResolver.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store.operations; - -import org.ehcache.clustered.client.internal.store.operations.codecs.OperationsCodec; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expiry; - -import static java.util.Objects.requireNonNull; - -/** - * A specialized chain resolver for non-eternal caches. - * - * @param key type - * @param value type - */ -public class ExpiryChainResolver extends ChainResolver { - - private final Expiry expiry; - - /** - * Creates a resolver with the given codec and expiry policy. - * - * @param codec operation codec - * @param expiry expiry policy - */ - public ExpiryChainResolver(final OperationsCodec codec, Expiry expiry) { - super(codec); - this.expiry = requireNonNull(expiry, "Expiry cannot be null"); - } - - /** - * Applies the given operation returning a result with an expiry time determined by this resolvers expiry policy. - *

- * If the resolved operations expiry time has passed then {@code null} is returned. - * - * @param key cache key - * @param existing current state - * @param operation operation to apply - * @param now current time - * @return the equivalent put operation - */ - @Override - protected PutOperation applyOperation(K key, PutOperation existing, Operation operation, long now) { - final Result newValue = operation.apply(existing); - if (newValue == null) { - return null; - } else { - long expirationTime = calculateExpiryTime(key, existing, operation, newValue); - - if (now >= expirationTime) { - return null; - } else { - return newValue.asOperationExpiringAt(expirationTime); - } - } - } - - /** - * Calculates the expiration time of the new state based on this resolvers expiry policy. - * - * @param key cache key - * @param existing current state - * @param operation operation to apply - * @param newValue new state - * @return the calculated expiry time - */ - private long calculateExpiryTime(K key, PutOperation existing, Operation operation, Result newValue) { - if (operation.isExpiryAvailable()) { - return operation.expirationTime(); - } else { - try { - Duration duration; - if (existing == null) { - duration = requireNonNull(expiry.getExpiryForCreation(key, newValue.getValue())); - } else { - duration = expiry.getExpiryForUpdate(key, existing::getValue, newValue.getValue()); - if (duration == null) { - return existing.expirationTime(); - } - } - if (duration.isInfinite()) { - return Long.MAX_VALUE; - } else { - long time = TIME_UNIT.convert(duration.getLength(), duration.getTimeUnit()); - return time + operation.timeStamp(); - } - } catch (Exception ex) { - LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", ex); - return Long.MIN_VALUE; - } - } - } -} diff --git a/clustered/client/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory b/clustered/client/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory deleted file mode 100644 index 3f9d3fbd63..0000000000 --- a/clustered/client/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory +++ /dev/null @@ -1,2 +0,0 @@ -org.ehcache.clustered.client.internal.service.ClusteringServiceFactory -org.ehcache.clustered.client.internal.store.ClusteredStoreProviderFactory diff --git a/clustered/client/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser b/clustered/client/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser deleted file mode 100644 index aa772aacfb..0000000000 --- a/clustered/client/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser +++ /dev/null @@ -1 +0,0 @@ -org.ehcache.clustered.client.internal.config.xml.ClusteringServiceConfigurationParser \ No newline at end of file diff --git a/clustered/client/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser b/clustered/client/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser deleted file mode 100644 index aa772aacfb..0000000000 --- a/clustered/client/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser +++ /dev/null @@ -1 +0,0 @@ -org.ehcache.clustered.client.internal.config.xml.ClusteringServiceConfigurationParser \ No newline at end of file diff --git a/clustered/client/src/main/resources/ehcache-clustered-ext.xsd b/clustered/client/src/main/resources/ehcache-clustered-ext.xsd deleted file mode 100644 index 90d0524da7..0000000000 --- a/clustered/client/src/main/resources/ehcache-clustered-ext.xsd +++ /dev/null @@ -1,221 +0,0 @@ - - - - - - - - - - - - Used within the /config/service element of an Ehcache configuration, this element - describes cluster service properties. - - - - - - - - Specifies the server-side configuration of the entity to be accessed and or created. - - - - - - - - - - Specifies the server endpoint to use for identifying cluster configuration. - - - - - - - Specifies the amount of time a cache read operation will wait for a response from a cluster - server before abandoning the cluster operation. - - - - - - - Specifies the amount of time a cache write operation will wait for a response from a cluster - server before abandoning the cluster operation. - - - - - - - Specifies the amount of time a cache will wait to connect to a cluster - server before abandoning the cluster operation. - - - - - - - - - - - - Identifies a single cluster member by URL. - - - - - - - - - - - - - - - - - Specifies the default server-side storage resource to use for storing cache data. - - - - - - - Defines a pool of server-side storage resource to be shared amongst multiple caches. - - - - - - - xml:lang="en"> - True if server side components should be automatically created if they are absent. - - - - - - - - - - - - - - - - - Shared pool name. - - - - - - - - - - - - Clustered cache resource with a dedicated size. - - - - - - - - - - - - - - - - Clustered cache resource sharing a pool with other cache resources. - - - - - - - - - Name of the shared pool this resource uses. - - - - - - - - - Clustered cache resource which inherits the resource pool configured on the server. - - - - - - - - - - Optional reference to a server-side storage resource. - - - - - - - - - - Required reference to a server-side storage resource. - - - - - - - - - - - - - - - - - - diff --git a/clustered/client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java deleted file mode 100644 index 6a80761325..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered; - -import org.ehcache.Cache; -import org.ehcache.PersistentCacheManager; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; -import org.ehcache.clustered.client.internal.UnitTestConnectionService; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.MemoryUnit; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.net.URI; - -public class ClusteredResourcePoolUpdationTest { - - private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); - - private static PersistentCacheManager cacheManager; - private static Cache dedicatedCache; - private static Cache sharedCache; - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @BeforeClass - public static void setUp() throws Exception { - UnitTestConnectionService.add(CLUSTER_URI, - new UnitTestConnectionService.PassthroughServerBuilder() - .resource("primary-server-resource", 8, MemoryUnit.MB) - .resource("secondary-server-resource", 8, MemoryUnit.MB) - .build()); - - cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER_URI).autoCreate() - .defaultServerResource("primary-server-resource") - .resourcePool("resource-pool-a", 2, MemoryUnit.MB, "secondary-server-resource") - .resourcePool("resource-pool-b", 4, MemoryUnit.MB)) - .withCache("dedicated-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB)))) - .withCache("shared-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool-a")))) - .build(); - cacheManager.init(); - - dedicatedCache = cacheManager.getCache("dedicated-cache", Long.class, String.class); - sharedCache = cacheManager.getCache("shared-cache", Long.class, String.class); - } - - @AfterClass - public static void tearDown() throws Exception { - cacheManager.close(); - UnitTestConnectionService.remove(CLUSTER_URI); - } - - @Test - public void testClusteredDedicatedResourcePoolUpdation() throws Exception { - expectedException.expect(UnsupportedOperationException.class); - expectedException.expectMessage("Updating CLUSTERED resource is not supported"); - dedicatedCache.getRuntimeConfiguration().updateResourcePools( - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 32, MemoryUnit.MB)) - .build() - ); - } - - @Test - public void testClusteredSharedResourcePoolUpdation() throws Exception { - expectedException.expect(UnsupportedOperationException.class); - expectedException.expectMessage("Updating CLUSTERED resource is not supported"); - sharedCache.getRuntimeConfiguration().updateResourcePools( - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool-a")) - .build() - ); - } -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java deleted file mode 100644 index 62babd8882..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client; - -import org.ehcache.Cache; -import org.ehcache.PersistentCacheManager; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; -import org.ehcache.clustered.client.internal.UnitTestConnectionService; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.impl.internal.TimeSourceConfiguration; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.net.URI; -import java.util.concurrent.TimeUnit; - -import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; -import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; -import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; - -/** - * - */ -public class BasicClusteredCacheExpiryTest { - - private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); - private static final CacheManagerBuilder commonClusteredCacheManagerBuilder = - newCacheManagerBuilder() - .with(cluster(CLUSTER_URI).autoCreate()) - .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(1L, TimeUnit.MILLISECONDS))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG))); - - @Before - public void definePassthroughServer() throws Exception { - UnitTestConnectionService.add(CLUSTER_URI, - new UnitTestConnectionService.PassthroughServerBuilder() - .resource("primary-server-resource", 64, MemoryUnit.MB) - .resource("secondary-server-resource", 64, MemoryUnit.MB) - .build()); - } - - @After - public void removePassthroughServer() throws Exception { - UnitTestConnectionService.remove(CLUSTER_URI); - } - - @Test - public void testGetExpiredSingleClient() { - - TestTimeSource timeSource = new TestTimeSource(); - TimeSourceConfiguration timeSourceConfiguration = new TimeSourceConfiguration(timeSource); - - final CacheManagerBuilder clusteredCacheManagerBuilder = - commonClusteredCacheManagerBuilder.using(timeSourceConfiguration); - - final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); - - final Cache cache = cacheManager.getCache("clustered-cache", Long.class, String.class); - - cache.put(1L, "value"); - assertThat(cache.get(1L), is("value")); - - timeSource.advanceTime(1); - - assertThat(cache.get(1L), nullValue()); - - cacheManager.close(); - - } - - @Test - public void testGetExpiredTwoClients() { - - TestTimeSource timeSource = new TestTimeSource(); - TimeSourceConfiguration timeSourceConfiguration = new TimeSourceConfiguration(timeSource); - - final CacheManagerBuilder clusteredCacheManagerBuilder = - commonClusteredCacheManagerBuilder.using(timeSourceConfiguration); - - final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); - final PersistentCacheManager cacheManager2 = clusteredCacheManagerBuilder.build(true); - - final Cache cache1 = cacheManager1.getCache("clustered-cache", Long.class, String.class); - final Cache cache2 = cacheManager2.getCache("clustered-cache", Long.class, String.class); - - assertThat(cache2.get(1L), nullValue()); - cache1.put(1L, "value1"); - assertThat(cache1.get(1L), is("value1")); - timeSource.advanceTime(1L); - - assertThat(cache2.get(1L), nullValue()); - assertThat(cache1.get(1L), nullValue()); - - cacheManager2.close(); - cacheManager1.close(); - } - - @Test - public void testContainsKeyExpiredTwoClients() { - - TestTimeSource timeSource = new TestTimeSource(); - TimeSourceConfiguration timeSourceConfiguration = new TimeSourceConfiguration(timeSource); - - final CacheManagerBuilder clusteredCacheManagerBuilder = - commonClusteredCacheManagerBuilder.using(timeSourceConfiguration); - - final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); - final PersistentCacheManager cacheManager2 = clusteredCacheManagerBuilder.build(true); - - final Cache cache1 = cacheManager1.getCache("clustered-cache", Long.class, String.class); - final Cache cache2 = cacheManager2.getCache("clustered-cache", Long.class, String.class); - - assertThat(cache2.get(1L), nullValue()); - cache1.put(1L, "value1"); - assertThat(cache1.containsKey(1L), is(true)); - timeSource.advanceTime(1L); - - assertThat(cache1.containsKey(1L), is(false)); - assertThat(cache2.containsKey(1L), is(false)); - - cacheManager2.close(); - cacheManager1.close(); - - } - -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java deleted file mode 100644 index a7febc46de..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java +++ /dev/null @@ -1,245 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client; - -import org.ehcache.Cache; -import org.ehcache.PersistentCacheManager; -import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; -import org.ehcache.clustered.client.internal.UnitTestConnectionService; -import org.ehcache.clustered.client.internal.UnitTestConnectionService.PassthroughServerBuilder; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.statistics.CacheStatistics; -import org.ehcache.impl.internal.statistics.DefaultStatisticsService; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.io.Serializable; -import java.math.BigInteger; -import java.net.URI; -import java.util.Random; -import java.util.concurrent.atomic.LongAdder; - -import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; -import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; -import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; -import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; -import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; - -/** - * Provides basic tests for creation of a cache using a {@link org.ehcache.clustered.client.internal.store.ClusteredStore ClusteredStore}. - */ -public class BasicClusteredCacheTest { - - private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); - - @Before - public void definePassthroughServer() throws Exception { - UnitTestConnectionService.add(CLUSTER_URI, - new PassthroughServerBuilder() - .resource("primary-server-resource", 64, MemoryUnit.MB) - .resource("secondary-server-resource", 64, MemoryUnit.MB) - .build()); - } - - @After - public void removePassthroughServer() throws Exception { - UnitTestConnectionService.remove(CLUSTER_URI); - } - - @Test - public void testClusteredCacheSingleClient() throws Exception { - - final CacheManagerBuilder clusteredCacheManagerBuilder = - newCacheManagerBuilder() - .with(cluster(CLUSTER_URI).autoCreate()) - .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); - final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); - - final Cache cache = cacheManager.getCache("clustered-cache", Long.class, String.class); - - cache.put(1L, "value"); - assertThat(cache.get(1L), is("value")); - - cacheManager.close(); - } - - @Test - public void testClusteredCacheTwoClients() throws Exception { - final CacheManagerBuilder clusteredCacheManagerBuilder = - newCacheManagerBuilder() - .with(cluster(CLUSTER_URI).autoCreate()) - .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) - .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG))) - ; - - final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); - final PersistentCacheManager cacheManager2 = clusteredCacheManagerBuilder.build(true); - - final Cache cache1 = cacheManager1.getCache("clustered-cache", Long.class, String.class); - final Cache cache2 = cacheManager2.getCache("clustered-cache", Long.class, String.class); - - assertThat(cache2.get(1L), nullValue()); - cache1.put(1L, "value1"); - assertThat(cache2.get(1L), is("value1")); - assertThat(cache1.get(1L), is("value1")); - cache1.put(1L, "value2"); - assertThat(cache2.get(1L), is("value2")); - assertThat(cache1.get(1L), is("value2")); - - cacheManager2.close(); - cacheManager1.close(); - } - - @Test - public void testClustered3TierCacheTwoClients() throws Exception { - final CacheManagerBuilder clusteredCacheManagerBuilder = - newCacheManagerBuilder() - .with(cluster(CLUSTER_URI).autoCreate()) - .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, EntryUnit.ENTRIES).offheap(1, MemoryUnit.MB) - .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG))) - ; - - final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); - final PersistentCacheManager cacheManager2 = clusteredCacheManagerBuilder.build(true); - - final Cache cache1 = cacheManager1.getCache("clustered-cache", Long.class, String.class); - final Cache cache2 = cacheManager2.getCache("clustered-cache", Long.class, String.class); - - assertThat(cache2.get(1L), nullValue()); - cache1.put(1L, "value1"); - cache1.put(2L, "value2"); - cache1.put(3L, "value3"); - assertThat(cache2.get(1L), is("value1")); - assertThat(cache2.get(2L), is("value2")); - assertThat(cache2.get(3L), is("value3")); - assertThat(cache2.get(1L), is("value1")); - assertThat(cache2.get(2L), is("value2")); - assertThat(cache2.get(3L), is("value3")); - assertThat(cache1.get(1L), is("value1")); - assertThat(cache1.get(2L), is("value2")); - assertThat(cache1.get(3L), is("value3")); - assertThat(cache1.get(1L), is("value1")); - assertThat(cache1.get(2L), is("value2")); - assertThat(cache1.get(3L), is("value3")); - cache1.put(1L, "value11"); - cache1.put(2L, "value12"); - cache1.put(3L, "value13"); - assertThat(cache2.get(1L), is("value11")); - assertThat(cache2.get(2L), is("value12")); - assertThat(cache2.get(3L), is("value13")); - assertThat(cache2.get(1L), is("value11")); - assertThat(cache2.get(2L), is("value12")); - assertThat(cache2.get(3L), is("value13")); - assertThat(cache1.get(1L), is("value11")); - assertThat(cache1.get(2L), is("value12")); - assertThat(cache1.get(3L), is("value13")); - assertThat(cache1.get(1L), is("value11")); - assertThat(cache1.get(2L), is("value12")); - assertThat(cache1.get(3L), is("value13")); - - cacheManager2.close(); - cacheManager1.close(); - } - - @Test - public void testTieredClusteredCache() throws Exception { - final CacheManagerBuilder clusteredCacheManagerBuilder = - newCacheManagerBuilder() - .with(cluster(CLUSTER_URI).autoCreate()) - .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, - heap(2) - .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); - final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); - - final Cache cache = cacheManager.getCache("clustered-cache", Long.class, String.class); - - cache.put(1L, "value"); - assertThat(cache.get(1L), is("value")); - - cacheManager.close(); - } - - @Test - public void testClusteredCacheWithSerializableValue() throws Exception { - final CacheManagerBuilder clusteredCacheManagerBuilder = - newCacheManagerBuilder().with(cluster(CLUSTER_URI).autoCreate()) - .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, Person.class, - newResourcePoolsBuilder().with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); - PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); - - Cache cache = cacheManager.getCache("clustered-cache", Long.class, Person.class); - - cache.put(38L, new Person("Clustered Joe", 28)); - - cacheManager.close(); - - cacheManager = clusteredCacheManagerBuilder.build(true); - cache = cacheManager.getCache("clustered-cache", Long.class, Person.class); - - assertThat(cache.get(38L).name, is("Clustered Joe")); - } - - @Test - public void testLargeValues() throws Exception { - DefaultStatisticsService statisticsService = new DefaultStatisticsService(); - CacheManagerBuilder clusteredCacheManagerBuilder = - newCacheManagerBuilder() - .using(statisticsService) - .with(cluster(CLUSTER_URI).autoCreate()) - .withCache("small-cache", newCacheConfigurationBuilder(Long.class, BigInteger.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(clusteredDedicated("secondary-server-resource", 4, MemoryUnit.MB)))); - - // The idea here is to add big things in the cache, and cause eviction of them to see if something crashes - - try(PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { - - Cache cache = cacheManager.getCache("small-cache", Long.class, BigInteger.class); - - Random random = new Random(); - for (long i = 0; i < 100; i++) { - BigInteger value = new BigInteger(30 * 1024 * 128 * (1 + random.nextInt(10)), random); - cache.put(i, value); - } - } - } - - public static class Person implements Serializable { - final String name; - final int age; - - public Person(String name, int age) { - this.name = name; - this.age = age; - } - } -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/CacheManagerDestroyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/CacheManagerDestroyTest.java deleted file mode 100644 index 128712f18c..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/CacheManagerDestroyTest.java +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client; - -import org.ehcache.Cache; -import org.ehcache.CachePersistenceException; -import org.ehcache.PersistentCacheManager; -import org.ehcache.StateTransitionException; -import org.ehcache.Status; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.internal.UnitTestConnectionService; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.MemoryUnit; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.net.URI; - -import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; -import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; -import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; -import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; - -public class CacheManagerDestroyTest { - - private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); - - private static final CacheManagerBuilder clusteredCacheManagerBuilder = - newCacheManagerBuilder() - .with(cluster(CLUSTER_URI).autoCreate()); - - @Before - public void definePassthroughServer() throws Exception { - UnitTestConnectionService.add(CLUSTER_URI, - new UnitTestConnectionService.PassthroughServerBuilder() - .resource("primary-server-resource", 64, MemoryUnit.MB) - .resource("secondary-server-resource", 64, MemoryUnit.MB) - .build()); - } - - @After - public void removePassthroughServer() throws Exception { - UnitTestConnectionService.remove(CLUSTER_URI); - } - - @Test - public void testDestroyCacheManagerWithSingleClient() throws CachePersistenceException { - PersistentCacheManager persistentCacheManager = clusteredCacheManagerBuilder.build(true); - - persistentCacheManager.close(); - persistentCacheManager.destroy(); - - assertThat(persistentCacheManager.getStatus(), is(Status.UNINITIALIZED)); - } - - @Test - public void testCreateDestroyCreate() throws Exception { - PersistentCacheManager cacheManager = newCacheManagerBuilder().with(cluster(CLUSTER_URI).autoCreate() - .defaultServerResource("primary-server-resource")) - .withCache("my-cache", newCacheConfigurationBuilder(Long.class, String.class, heap(10).with(ClusteredResourcePoolBuilder - .clusteredDedicated(2, MemoryUnit.MB)))) - .build(true); - - cacheManager.close(); - cacheManager.destroy(); - - cacheManager.init(); - } - - @Test - public void testDestroyCacheManagerWithMultipleClients() throws CachePersistenceException { - PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true); - PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true); - - persistentCacheManager1.close(); - - try { - persistentCacheManager1.destroy(); - fail("StateTransitionException expected"); - } catch (StateTransitionException e) { - assertThat(e.getMessage(), is("Couldn't acquire cluster-wide maintenance lease")); - } - - assertThat(persistentCacheManager1.getStatus(), is(Status.UNINITIALIZED)); - - assertThat(persistentCacheManager2.getStatus(), is(Status.AVAILABLE)); - - Cache cache = persistentCacheManager2.createCache("test", newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); - - cache.put(1L, "One"); - - assertThat(cache.get(1L), is("One")); - - persistentCacheManager2.close(); - } - - @Test - public void testDestroyCacheManagerDoesNotAffectsExistingCacheWithExistingClientsConnected() throws CachePersistenceException { - - CacheManagerBuilder cacheManagerBuilder = clusteredCacheManagerBuilder - .withCache("test", newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); - - PersistentCacheManager persistentCacheManager1 = cacheManagerBuilder.build(true); - PersistentCacheManager persistentCacheManager2 = cacheManagerBuilder.build(true); - - persistentCacheManager1.close(); - try { - persistentCacheManager1.destroy(); - fail("StateTransitionException expected"); - } catch (StateTransitionException e) { - assertThat(e.getMessage(), is("Couldn't acquire cluster-wide maintenance lease")); - } - - Cache cache = persistentCacheManager2.getCache("test", Long.class, String.class); - - cache.put(1L, "One"); - - assertThat(cache.get(1L), is("One")); - - persistentCacheManager2.close(); - } - - @Test - public void testCloseCacheManagerSingleClient() { - CacheManagerBuilder cacheManagerBuilder = clusteredCacheManagerBuilder - .withCache("test", newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); - - PersistentCacheManager persistentCacheManager1 = cacheManagerBuilder.build(true); - - persistentCacheManager1.close(); - - persistentCacheManager1.init(); - - Cache cache = persistentCacheManager1.getCache("test", Long.class, String.class); - cache.put(1L, "One"); - - assertThat(cache.get(1L), is("One")); - - persistentCacheManager1.close(); - } - - @Test - public void testCloseCacheManagerMultipleClients() { - CacheManagerBuilder cacheManagerBuilder = clusteredCacheManagerBuilder - .withCache("test", newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); - - PersistentCacheManager persistentCacheManager1 = cacheManagerBuilder.build(true); - PersistentCacheManager persistentCacheManager2 = cacheManagerBuilder.build(true); - - Cache cache = persistentCacheManager1.getCache("test", Long.class, String.class); - cache.put(1L, "One"); - - assertThat(cache.get(1L), is("One")); - - persistentCacheManager1.close(); - assertThat(persistentCacheManager1.getStatus(), is(Status.UNINITIALIZED)); - - Cache cache2 = persistentCacheManager2.getCache("test", Long.class, String.class); - - assertThat(cache2.get(1L), is("One")); - - persistentCacheManager2.close(); - } - -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java deleted file mode 100644 index 65c965c1b4..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client; - -import org.ehcache.Cache; -import org.ehcache.CachePersistenceException; -import org.ehcache.PersistentCacheManager; -import org.ehcache.Status; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; -import org.ehcache.clustered.client.internal.UnitTestConnectionService; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.MemoryUnit; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.net.URI; - -import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clustered; -import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; -import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; -import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; - -public class ClusteredCacheDestroyTest { - - private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); - private static final String CLUSTERED_CACHE = "clustered-cache"; - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - private static final CacheManagerBuilder clusteredCacheManagerBuilder = - newCacheManagerBuilder() - .with(cluster(CLUSTER_URI).autoCreate()) - .withCache(CLUSTERED_CACHE, newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 32, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG))); - - @Before - public void definePassthroughServer() throws Exception { - UnitTestConnectionService.add(CLUSTER_URI, - new UnitTestConnectionService.PassthroughServerBuilder() - .resource("primary-server-resource", 64, MemoryUnit.MB) - .resource("secondary-server-resource", 64, MemoryUnit.MB) - .build()); - } - - @After - public void removePassthroughServer() throws Exception { - UnitTestConnectionService.remove(CLUSTER_URI); - } - - @Test - public void testDestroyCacheWhenSingleClientIsConnected() throws CachePersistenceException { - PersistentCacheManager persistentCacheManager = clusteredCacheManagerBuilder.build(true); - - persistentCacheManager.destroyCache(CLUSTERED_CACHE); - - final Cache cache = persistentCacheManager.getCache(CLUSTERED_CACHE, Long.class, String.class); - - assertThat(cache, nullValue()); - - persistentCacheManager.close(); - } - - @Test - public void testDestroyFreesUpTheAllocatedResource() throws CachePersistenceException { - - PersistentCacheManager persistentCacheManager = clusteredCacheManagerBuilder.build(true); - - CacheConfigurationBuilder configBuilder = newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 34, MemoryUnit.MB))); - - try { - Cache anotherCache = persistentCacheManager.createCache("another-cache", configBuilder); - fail(); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), is("Cache 'another-cache' creation in EhcacheManager failed.")); - } - - persistentCacheManager.destroyCache(CLUSTERED_CACHE); - - Cache anotherCache = persistentCacheManager.createCache("another-cache", configBuilder); - - anotherCache.put(1L, "One"); - assertThat(anotherCache.get(1L), is("One")); - - persistentCacheManager.close(); - } - - @Test - public void testDestroyUnknownCacheAlias() throws Exception { - clusteredCacheManagerBuilder.build(true).close(); - - PersistentCacheManager cacheManager = newCacheManagerBuilder().with(cluster(CLUSTER_URI).expecting()).build(true); - - cacheManager.destroyCache(CLUSTERED_CACHE); - - try { - cacheManager.createCache(CLUSTERED_CACHE, newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder() - .with(clustered()))); - fail("Expected exception as clustered store no longer exists"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), containsString(CLUSTERED_CACHE)); - } - cacheManager.close(); - } - - @Test - public void testDestroyCacheWhenMultipleClientsConnected() { - PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true); - PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true); - - final Cache cache1 = persistentCacheManager1.getCache(CLUSTERED_CACHE, Long.class, String.class); - - final Cache cache2 = persistentCacheManager2.getCache(CLUSTERED_CACHE, Long.class, String.class); - - try { - persistentCacheManager1.destroyCache(CLUSTERED_CACHE); - fail(); - } catch (CachePersistenceException e) { - assertThat(e.getMessage(), containsString("Cannot destroy cluster tier")); - } - - try { - cache1.put(1L, "One"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), is("State is UNINITIALIZED")); - } - - assertThat(cache2.get(1L), nullValue()); - - cache2.put(1L, "One"); - - assertThat(cache2.get(1L), is("One")); - - persistentCacheManager1.close(); - persistentCacheManager2.close(); - } - - private static Throwable getRootCause(Throwable t) { - if (t.getCause() == null || t.getCause() == t) { - return t; - } - return getRootCause(t.getCause()); - } - - @Test - public void testDestroyCacheWithCacheManagerStopped() throws CachePersistenceException { - PersistentCacheManager persistentCacheManager = clusteredCacheManagerBuilder.build(true); - persistentCacheManager.close(); - persistentCacheManager.destroyCache(CLUSTERED_CACHE); - assertThat(persistentCacheManager.getStatus(), is(Status.UNINITIALIZED)); - } - - @Test - public void testDestroyCacheWithTwoCacheManagerOnSameCache_forbiddenWhenInUse() throws CachePersistenceException { - PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true); - PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true); - - expectedException.expect(CachePersistenceException.class); - expectedException.expectMessage("Cannot destroy cluster tier 'clustered-cache': in use by other client(s)"); - persistentCacheManager1.destroyCache(CLUSTERED_CACHE); - } - - @Test - public void testDestroyCacheWithTwoCacheManagerOnSameCache_firstRemovesSecondDestroy() throws CachePersistenceException { - PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true); - PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true); - - persistentCacheManager2.removeCache(CLUSTERED_CACHE); - - persistentCacheManager1.destroyCache(CLUSTERED_CACHE); - } - - @Test - public void testDestroyCacheWithTwoCacheManagerOnSameCache_secondDoesntHaveTheCacheButPreventExclusiveAccessToCluster() throws CachePersistenceException { - PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(false); - PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true); - - persistentCacheManager2.removeCache(CLUSTERED_CACHE); - - persistentCacheManager1.destroyCache(CLUSTERED_CACHE); - } -} - diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/NonClusteredCacheTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/NonClusteredCacheTest.java deleted file mode 100644 index 50256cb2ec..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/NonClusteredCacheTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client; - -import org.ehcache.CacheManager; -import org.ehcache.clustered.client.internal.store.ClusteredStore; -import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.internal.util.ClassLoading; -import org.ehcache.core.spi.service.ServiceFactory; -import org.hamcrest.Matchers; -import org.junit.Test; - -import java.util.HashSet; -import java.util.Set; - -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; - -/** - * Ensures that a non-clustered {@code CacheManager} can be created when clustered classes are - * available in classpath. - */ -public class NonClusteredCacheTest { - - @Test - public void testNonClustered() throws Exception { - - /* - * Ensure the cluster provider classes are loadable through the ServiceLoader mechanism. - */ - Set> targetProviders = new HashSet<>(); - targetProviders.add(ClusteredStore.Provider.class); - targetProviders.add(ClusteringService.class); - for (ServiceFactory factory : ClassLoading.libraryServiceLoaderFor(ServiceFactory.class)) { - if (targetProviders.remove(factory.getServiceType())) { - if (targetProviders.isEmpty()) { - break; - } - } - } - assertThat(targetProviders, is(Matchers.empty())); - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder( - String.class, - String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10, EntryUnit.ENTRIES) - .offheap(1, MemoryUnit.MB) - .build()) - .build(); - - - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().build(true); - - cacheManager.createCache("cache-1", cacheConfiguration); - cacheManager.createCache("cache-2", cacheConfiguration); - - cacheManager.close(); - } -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWIthClusteredCacheTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWIthClusteredCacheTest.java deleted file mode 100644 index 0a0dc345e5..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWIthClusteredCacheTest.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client; - -import bitronix.tm.BitronixTransactionManager; -import bitronix.tm.TransactionManagerServices; - -import org.ehcache.PersistentCacheManager; -import org.ehcache.StateTransitionException; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; -import org.ehcache.clustered.client.internal.UnitTestConnectionService; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.event.CacheEvent; -import org.ehcache.event.CacheEventListener; -import org.ehcache.event.EventType; -import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; -import org.ehcache.transactions.xa.txmgr.btm.BitronixTransactionManagerLookup; -import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProviderConfiguration; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.net.URI; -import java.util.Map; - -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; - -/** - * This class should be removed as and when following features are done. - */ -public class UnSupportedCombinationsWIthClusteredCacheTest { - - @Before - public void resetPassthroughServer() throws Exception { - UnitTestConnectionService.add("terracotta://localhost/my-application", - new UnitTestConnectionService.PassthroughServerBuilder() - .resource("primary-server-resource", 128, MemoryUnit.MB) - .resource("secondary-server-resource", 96, MemoryUnit.MB) - .build()); - } - - @After - public void removePassthroughServer() throws Exception { - UnitTestConnectionService.remove("terracotta://localhost/my-application"); - } - - @Test - public void testClusteredCacheWithLoaderWriter() { - - final CacheManagerBuilder clusteredCacheManagerBuilder - = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) - .autoCreate()); - final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); - - try { - CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) - .withLoaderWriter(new TestLoaderWriter()) - .build(); - - cacheManager.createCache("test", config); - fail("IllegalStateException expected"); - } catch (IllegalStateException e){ - assertThat(e.getCause().getMessage(), is("CacheLoaderWriter is not supported with clustered tiers")); - } - cacheManager.close(); - } - - @Test - public void testClusteredCacheWithEventListeners() { - - CacheEventListenerConfigurationBuilder cacheEventListenerConfiguration = CacheEventListenerConfigurationBuilder - .newEventListenerConfiguration(new TestEventListener(), EventType.CREATED, EventType.UPDATED) // <1> - .unordered().asynchronous(); // <2> - - final CacheManagerBuilder clusteredCacheManagerBuilder - = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) - .autoCreate()); - final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); - - try { - CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) - .add(cacheEventListenerConfiguration) - .build(); - - cacheManager.createCache("test", config); - fail("IllegalStateException expected"); - } catch (IllegalStateException e){ - assertThat(e.getCause().getMessage(), is("CacheEventListener is not supported with clustered tiers")); - } - cacheManager.close(); - } - - @Test - public void testClusteredCacheWithXA() throws Exception { - TransactionManagerServices.getConfiguration().setJournal("null"); - - BitronixTransactionManager transactionManager = - TransactionManagerServices.getTransactionManager(); - - PersistentCacheManager persistentCacheManager = null; - try { - CacheManagerBuilder.newCacheManagerBuilder() - .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")).autoCreate()) - .withCache("xaCache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB)) - ) - .add(new XAStoreConfiguration("xaCache")) - .build() - ) - .build(true); - } catch (StateTransitionException e) { - assertThat(e.getCause().getCause().getMessage(), is("Unsupported resource type : interface org.ehcache.clustered.client.config.DedicatedClusteredResourcePool")); - } - - transactionManager.shutdown(); - } - - private static class TestLoaderWriter implements CacheLoaderWriter { - - @Override - public String load(Long key) throws Exception { - return null; - } - - @Override - public Map loadAll(Iterable keys) throws BulkCacheLoadingException, Exception { - return null; - } - - @Override - public void write(Long key, String value) throws Exception { - - } - - @Override - public void writeAll(Iterable> entries) throws BulkCacheWritingException, Exception { - - } - - @Override - public void delete(Long key) throws Exception { - - } - - @Override - public void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception { - - } - } - - private static class TestEventListener implements CacheEventListener { - - @Override - public void onEvent(CacheEvent event) { - - } - } - -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/XmlUnknownCacheTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/XmlUnknownCacheTest.java deleted file mode 100644 index 4c90d5496a..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/XmlUnknownCacheTest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client; - -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.endsWith; -import static org.junit.Assert.fail; - -import org.ehcache.xml.XmlConfiguration; -import org.ehcache.xml.exceptions.XmlConfigurationException; -import org.junit.Assert; -import org.junit.Test; -import static org.hamcrest.Matchers.contains; -import static org.junit.Assert.fail; - -/** - * - * @author GGIB - */ -public class XmlUnknownCacheTest { - - @Test - public void testGetUnknownCache() { - XmlConfiguration xmlConfiguration = new XmlConfiguration(this.getClass().getResource("/configs/unknown-cluster-cache.xml")); - Assert.assertThat(xmlConfiguration.getCacheConfigurations().keySet(),contains("unknownCache")); - } - - @Test - public void testGetUnknownCacheInvalidAttribute() { - try { - new XmlConfiguration(this.getClass().getResource("/configs/unknown-cluster-cache-invalid-attribute.xml")); - fail("Expected XmlConfigurationException"); - } catch(XmlConfigurationException xce) { - Assert.assertThat(xce.getCause().getMessage(), endsWith("Attribute 'unit' is not allowed to appear in element 'tc:clustered'.")); - } - } - - @Test - public void testGetUnknownCacheInvalidElement() { - try { - new XmlConfiguration(this.getClass().getResource("/configs/unknown-cluster-cache-invalid-element.xml")); - fail("Expected XmlConfigurationException"); - } catch(XmlConfigurationException xce) { - Assert.assertThat(xce.getCause().getMessage(), endsWith("Element 'tc:clustered' must have no character or element information item [children], because the type's content type is empty.")); - } - } - -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java deleted file mode 100644 index 464a67578a..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.config; - -import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; -import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.clustered.common.ServerSideConfiguration; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.net.URI; -import java.util.Collections; - -import static net.bytebuddy.matcher.ElementMatchers.is; -import static org.assertj.core.api.Assertions.assertThat; - -public class ClusteringServiceConfigurationTest { - - private static URI DEFAULT_URI = URI.create("terracotta://localhost:9450"); - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @Test - public void testGetConnectionUrlNull() throws Exception { - expectedException.expect(NullPointerException.class); - new ClusteringServiceConfiguration((URI)null); - } - - @Test - public void testGetConnectionUrl() throws Exception { - assertThat(new ClusteringServiceConfiguration(DEFAULT_URI).getClusterUri()).isEqualTo(DEFAULT_URI); - } - - @Test - public void testTimeouts() throws Exception { - Timeouts timeouts = TimeoutsBuilder.timeouts().build(); - assertThat(new ClusteringServiceConfiguration(DEFAULT_URI, timeouts).getTimeouts()).isSameAs(timeouts); - } - - @Test - public void testDefaultTimeouts() throws Exception { - assertThat(new ClusteringServiceConfiguration(DEFAULT_URI).getTimeouts()).isEqualTo(TimeoutsBuilder.timeouts().build()); - } - - @Test - public void testTimeoutsCannotBeNull2Args() throws Exception { - expectedException.expect(NullPointerException.class); - new ClusteringServiceConfiguration(DEFAULT_URI, (Timeouts) null); - } - - @Test - public void testTimeoutsCannotBeNull3Args() throws Exception { - expectedException.expect(NullPointerException.class); - new ClusteringServiceConfiguration(DEFAULT_URI, (Timeouts) null, new ServerSideConfiguration(Collections.emptyMap())); - } - - @Test - public void testTimeoutsCannotBeNull4Args() throws Exception { - expectedException.expect(NullPointerException.class); - new ClusteringServiceConfiguration(DEFAULT_URI, (Timeouts) null, true, new ServerSideConfiguration(Collections.emptyMap())); - } - - @Test - public void testGetServiceType() throws Exception { - assertThat(new ClusteringServiceConfiguration(DEFAULT_URI).getServiceType()).isEqualTo(ClusteringService.class); - } - - @Test - public void testGetAutoCreate() throws Exception { - assertThat(new ClusteringServiceConfiguration(DEFAULT_URI, true, - new ServerSideConfiguration(Collections.emptyMap())).isAutoCreate()).isTrue(); - } - - @Test - public void testBuilder() throws Exception { - assertThat(new ClusteringServiceConfiguration(DEFAULT_URI) - .builder(CacheManagerBuilder.newCacheManagerBuilder())).isExactlyInstanceOf(CacheManagerBuilder.class); - } -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java b/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java deleted file mode 100644 index 31e3292bff..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java +++ /dev/null @@ -1,257 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.docs; - -import org.ehcache.Cache; -import org.ehcache.PersistentCacheManager; -import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; -import org.ehcache.clustered.client.internal.UnitTestConnectionService; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.xml.XmlConfiguration; -import org.junit.After; -import org.junit.Test; - -import java.net.URI; - -import org.junit.Before; - -/** - * Samples demonstrating use of a clustered cache. - */ -public class GettingStarted { - - @Before - public void resetPassthroughServer() throws Exception { - UnitTestConnectionService.add("terracotta://localhost/my-application", - new UnitTestConnectionService.PassthroughServerBuilder() - .resource("primary-server-resource", 128, MemoryUnit.MB) - .resource("secondary-server-resource", 96, MemoryUnit.MB) - .build()); - } - - @After - public void removePassthroughServer() throws Exception { - UnitTestConnectionService.remove("terracotta://localhost/my-application"); - } - - @Test - public void clusteredCacheManagerExample() throws Exception { - // tag::clusteredCacheManagerExample[] - CacheManagerBuilder clusteredCacheManagerBuilder = - CacheManagerBuilder.newCacheManagerBuilder() // <1> - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) // <2> - .autoCreate()); // <3> - PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); // <4> - - cacheManager.close(); // <5> - // end::clusteredCacheManagerExample[] - } - - @Test - public void clusteredCacheManagerWithServerSideConfigExample() throws Exception { - // tag::clusteredCacheManagerWithServerSideConfigExample[] - CacheManagerBuilder clusteredCacheManagerBuilder = - CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")).autoCreate() - .defaultServerResource("primary-server-resource") // <1> - .resourcePool("resource-pool-a", 28, MemoryUnit.MB, "secondary-server-resource") // <2> - .resourcePool("resource-pool-b", 32, MemoryUnit.MB)) // <3> - .withCache("clustered-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, // <4> - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 32, MemoryUnit.MB)))) // <5> - .withCache("shared-cache-1", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool-a")))) // <6> - .withCache("shared-cache-2", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool-a")))); // <7> - PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); // <8> - - cacheManager.close(); - // end::clusteredCacheManagerWithServerSideConfigExample[] - } - - @Test - public void clusteredCacheManagerWithDynamicallyAddedCacheExample() throws Exception { - // tag::clusteredCacheManagerWithDynamicallyAddedCacheExample[] - CacheManagerBuilder clusteredCacheManagerBuilder - = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) - .autoCreate() - .defaultServerResource("primary-server-resource") - .resourcePool("resource-pool-a", 28, MemoryUnit.MB)); - PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); - cacheManager.init(); - - try { - CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))).build(); - - Cache cache = cacheManager.createCache("clustered-cache", config); - - } finally { - cacheManager.close(); - } - // end::clusteredCacheManagerWithDynamicallyAddedCacheExample[] - } - - @Test - public void explicitConsistencyConfiguration() throws Exception { - CacheManagerBuilder clusteredCacheManagerBuilder - = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) - .autoCreate() - .defaultServerResource("primary-server-resource") - .resourcePool("resource-pool-a", 32, MemoryUnit.MB)); - PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); - cacheManager.init(); - - try { - // tag::clusteredCacheConsistency[] - CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) // <1> - .build(); - - Cache cache = cacheManager.createCache("clustered-cache", config); - cache.put(42L, "All you need to know!"); // <2> - - // end::clusteredCacheConsistency[] - } finally { - cacheManager.close(); - } - } - - @Test - public void clusteredCacheTieredExample() throws Exception { - CacheManagerBuilder clusteredCacheManagerBuilder - = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) - .autoCreate() - .defaultServerResource("primary-server-resource") - .resourcePool("resource-pool-a", 32, MemoryUnit.MB)); - PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); - cacheManager.init(); - - try { - // tag::clusteredCacheTieredExample[] - CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(2, MemoryUnit.MB) // <1> - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) // <2> - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) - .build(); - - Cache cache = cacheManager.createCache("clustered-cache-tiered", config); - cache.put(42L, "All you need to know!"); - - // end::clusteredCacheTieredExample[] - } finally { - cacheManager.close(); - } - } - - @Test - public void clusteredCacheManagerLifecycleExamples() throws Exception { - // tag::clusteredCacheManagerLifecycle[] - CacheManagerBuilder autoCreate = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) - .autoCreate() // <1> - .resourcePool("resource-pool", 32, MemoryUnit.MB, "primary-server-resource")) - .withCache("clustered-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool")))); - - CacheManagerBuilder expecting = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) - .expecting() // <2> - .resourcePool("resource-pool", 32, MemoryUnit.MB, "primary-server-resource")) - .withCache("clustered-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool")))); - - CacheManagerBuilder configless = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application"))) - // <3> - .withCache("clustered-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool")))); - // end::clusteredCacheManagerLifecycle[] - - autoCreate.build(true).close(); - expecting.build(true).close(); - configless.build(true).close(); - } - - @Test - public void loadDocsXml() throws Exception { - new XmlConfiguration(getClass().getResource("/configs/docs/ehcache-clustered.xml")); - } - - @Test - public void unknownClusteredCacheExample() - { - // tag::unspecifiedClusteredCacheExample[] - - CacheManagerBuilder cacheManagerBuilderAutoCreate = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) - .autoCreate() // <1> - .resourcePool("resource-pool", 32, MemoryUnit.MB, "primary-server-resource")); - - PersistentCacheManager cacheManager1 = cacheManagerBuilderAutoCreate.build(false); - cacheManager1.init(); - - CacheConfiguration cacheConfigDedicated = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) // <2> - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) - .build(); - - Cache cacheDedicated = cacheManager1.createCache("my-dedicated-cache", cacheConfigDedicated); // <3> - - CacheManagerBuilder cacheManagerBuilderExpecting = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) - .expecting() // <4> - .resourcePool("resource-pool", 32, MemoryUnit.MB, "primary-server-resource")); - - PersistentCacheManager cacheManager2 = cacheManagerBuilderExpecting.build(false); - cacheManager2.init(); - - CacheConfiguration cacheConfigUnspecified = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clustered())) // <5> - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) - .build(); - - Cache cacheUnspecified = cacheManager2.createCache("my-dedicated-cache", cacheConfigUnspecified); // <6> - - // end::unspecifiedClusteredCacheExample[] - - cacheManager1.close(); - cacheManager2.close(); - } - -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/docs/Tiering.java b/clustered/client/src/test/java/org/ehcache/clustered/client/docs/Tiering.java deleted file mode 100644 index 561836d52f..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/docs/Tiering.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.docs; - -import java.net.URI; - -import org.ehcache.PersistentCacheManager; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.internal.UnitTestConnectionService; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; - -/** - * Tiering - */ -public class Tiering { - - private static final URI CLUSTER_URI = URI.create("terracotta://example.com/my-application"); - - @Before - public void definePassthroughServer() throws Exception { - UnitTestConnectionService.add(CLUSTER_URI, - new UnitTestConnectionService.PassthroughServerBuilder() - .resource("primary-server-resource", 64, MemoryUnit.MB) - .resource("secondary-server-resource", 64, MemoryUnit.MB) - .build()); - } - - @After - public void removePassthroughServer() throws Exception { - UnitTestConnectionService.remove(CLUSTER_URI); - } - - @Test - public void testSingleTier() { - // tag::clusteredOnly[] - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, // <1> - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated(2, MemoryUnit.GB))); // <2> - // end::clusteredOnly[] - } - - @Test - public void threeTiersCacheManager() throws Exception { - // tag::threeTiersCacheManager[] - PersistentCacheManager persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .with(cluster(CLUSTER_URI).autoCreate()) // <1> - .withCache("threeTierCache", - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10, EntryUnit.ENTRIES) // <2> - .offheap(1, MemoryUnit.MB) // <3> - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)) // <4> - ) - ).build(true); - // end::threeTiersCacheManager[] - - persistentCacheManager.close(); - } -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/MockConnectionService.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/MockConnectionService.java deleted file mode 100644 index c99025050a..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/MockConnectionService.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal; - -import org.terracotta.connection.Connection; -import org.terracotta.connection.ConnectionException; -import org.terracotta.connection.ConnectionService; - -import java.net.URI; -import java.util.Properties; - -/** - * MockConnectionService - */ -public class MockConnectionService implements ConnectionService { - - public static Connection mockConnection; - - @Override - public boolean handlesURI(URI uri) { - return uri.getScheme().equals("mock"); - } - - @Override - public Connection connect(URI uri, Properties properties) throws ConnectionException { - if (mockConnection == null) { - throw new IllegalStateException("Set mock connection first"); - } - return mockConnection; - } -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParserTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParserTest.java deleted file mode 100644 index 72964e58ea..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParserTest.java +++ /dev/null @@ -1,334 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.config.xml; - -import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; -import org.ehcache.clustered.client.config.Timeouts; -import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; -import org.ehcache.config.Configuration; -import org.ehcache.core.internal.util.ClassLoading; -import org.ehcache.core.spi.service.ServiceUtils; -import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.xml.CacheManagerServiceConfigurationParser; -import org.ehcache.xml.XmlConfiguration; -import org.ehcache.xml.exceptions.XmlConfigurationException; -import org.ehcache.xml.model.TimeType; -import org.hamcrest.Matchers; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.junit.rules.TestName; -import org.w3c.dom.Attr; -import org.w3c.dom.Element; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStreamWriter; -import java.math.BigInteger; -import java.net.URL; -import java.time.Duration; -import java.time.temporal.TemporalUnit; -import java.util.Collection; -import java.util.ServiceLoader; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.transform.stream.StreamSource; - -import static java.time.temporal.ChronoUnit.MINUTES; -import static org.ehcache.xml.XmlModel.convertToJavaTimeUnit; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.*; - -/** - * Basic tests for {@link ClusteringServiceConfigurationParser}. - */ -public class ClusteringServiceConfigurationParserTest { - - @ClassRule - public static final TemporaryFolder folder = new TemporaryFolder(); - - @Rule - public final TestName testName = new TestName(); - - - /** - * Ensures the {@link ClusteringServiceConfigurationParser} is locatable as a - * {@link CacheManagerServiceConfigurationParser} instance. - */ - @Test - public void testServiceLocator() throws Exception { - final String expectedParser = ClusteringServiceConfigurationParser.class.getName(); - final ServiceLoader parsers = - ClassLoading.libraryServiceLoaderFor(CacheManagerServiceConfigurationParser.class); - foundParser: { - for (final CacheManagerServiceConfigurationParser parser : parsers) { - if (parser.getClass().getName().equals(expectedParser)) { - break foundParser; - } - } - fail("Expected parser not found"); - } - } - - /** - * Ensures the namespace declared by {@link ClusteringServiceConfigurationParser} and its - * schema are the same. - */ - @Test - public void testSchema() throws Exception { - final ClusteringServiceConfigurationParser parser = new ClusteringServiceConfigurationParser(); - final StreamSource schemaSource = (StreamSource) parser.getXmlSchema(); - - final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); - factory.setNamespaceAware(true); - factory.setIgnoringComments(true); - factory.setIgnoringElementContentWhitespace(true); - - final DocumentBuilder domBuilder = factory.newDocumentBuilder(); - final Element schema = domBuilder.parse(schemaSource.getInputStream()).getDocumentElement(); - final Attr targetNamespaceAttr = schema.getAttributeNode("targetNamespace"); - assertThat(targetNamespaceAttr, is(not(nullValue()))); - assertThat(targetNamespaceAttr.getValue(), is(parser.getNamespace().toString())); - } - - @Test - public void testGetTimeout() throws Exception { - - final String[] config = new String[] - { - "", - "", - " ", - " ", - " ", - " 5", - " 10", - " 15", - " ", - " ", - "", - "" - }; - - final Configuration configuration = new XmlConfiguration(makeConfig(config)); - - Collection> serviceCreationConfigurations = - configuration.getServiceCreationConfigurations(); - assertThat(serviceCreationConfigurations, is(not(Matchers.empty()))); - - ClusteringServiceConfiguration clusteringServiceConfiguration = - ServiceUtils.findSingletonAmongst(ClusteringServiceConfiguration.class, serviceCreationConfigurations); - assertThat(clusteringServiceConfiguration, is(notNullValue())); - - Timeouts timeouts = clusteringServiceConfiguration.getTimeouts(); - assertThat(timeouts.getReadOperationTimeout(), is(Duration.of(5, MINUTES))); - assertThat(timeouts.getWriteOperationTimeout(), is(Duration.of(10, MINUTES))); - assertThat(timeouts.getConnectionTimeout(), is(Duration.of(15, MINUTES))); - } - - @Test - public void testGetTimeoutNone() throws Exception { - - final String[] config = new String[] - { - "", - "", - " ", - " ", - " ", - " ", - " ", - "", - "" - }; - - final Configuration configuration = new XmlConfiguration(makeConfig(config)); - - Collection> serviceCreationConfigurations = - configuration.getServiceCreationConfigurations(); - assertThat(serviceCreationConfigurations, is(not(Matchers.empty()))); - - ClusteringServiceConfiguration clusteringServiceConfiguration = - ServiceUtils.findSingletonAmongst(ClusteringServiceConfiguration.class, serviceCreationConfigurations); - assertThat(clusteringServiceConfiguration, is(notNullValue())); - - assertThat(clusteringServiceConfiguration.getTimeouts(), is(TimeoutsBuilder.timeouts().build())); - } - - @Test - public void testGetTimeoutUnitDefault() throws Exception { - - final String[] config = new String[] - { - "", - "", - " ", - " ", - " ", - " 5", - " ", - " ", - "", - "" - }; - - final Configuration configuration = new XmlConfiguration(makeConfig(config)); - - Collection> serviceCreationConfigurations = - configuration.getServiceCreationConfigurations(); - assertThat(serviceCreationConfigurations, is(not(Matchers.empty()))); - - ClusteringServiceConfiguration clusteringServiceConfiguration = - ServiceUtils.findSingletonAmongst(ClusteringServiceConfiguration.class, serviceCreationConfigurations); - assertThat(clusteringServiceConfiguration, is(notNullValue())); - - TemporalUnit defaultUnit = convertToJavaTimeUnit(new TimeType().getUnit()); - assertThat(clusteringServiceConfiguration.getTimeouts().getReadOperationTimeout(), - is(equalTo(Duration.of(5, defaultUnit)))); - } - - @Test - public void testGetTimeoutUnitBad() throws Exception { - - final String[] config = new String[] - { - "", - "", - " ", - " ", - " ", - " 5", - " ", - " ", - "", - "" - }; - - try { - new XmlConfiguration(makeConfig(config)); - fail("Expecting XmlConfigurationException"); - } catch (XmlConfigurationException e) { - assertThat(e.getMessage(), containsString("Error parsing XML configuration ")); - assertThat(e.getCause().getMessage(), containsString("Value 'femtos' is not facet-valid with respect to enumeration ")); - } - } - - @Test - public void testGetTimeoutValueTooBig() throws Exception { - - final String[] config = new String[] - { - "", - "", - " ", - " ", - " ", - " " - + BigInteger.ONE.add(BigInteger.valueOf(Long.MAX_VALUE)) - + "", - " ", - " ", - "", - "" - }; - - try { - new XmlConfiguration(makeConfig(config)); - fail("Expecting XmlConfigurationException"); - } catch (XmlConfigurationException e) { - assertThat(e.getMessage(), containsString(" exceeds allowed value ")); - } - } - - @Test - public void testGetTimeoutValueOmitted() throws Exception { - - final String[] config = new String[] - { - "", - "", - " ", - " ", - " ", - " ", - " ", - " ", - "", - "" - }; - - try { - new XmlConfiguration(makeConfig(config)); - fail("Expecting XmlConfigurationException"); - } catch (XmlConfigurationException e) { - assertThat(e.getMessage(), containsString("Error parsing XML configuration ")); - assertThat(e.getCause().getMessage(), containsString("'' is not a valid value for 'integer'")); - } - } - - /** - * Constructs a temporary XML configuration file. - * - * @param lines the lines to include in the XML configuration file - * - * @return a {@code URL} pointing to the XML configuration file - * - * @throws IOException if an error is raised while creating or writing the XML configuration file - */ - @SuppressWarnings("ThrowFromFinallyBlock") - private URL makeConfig(final String[] lines) throws IOException { - final File configFile = folder.newFile(testName.getMethodName() + "_config.xml"); - - OutputStreamWriter out = null; - try { - out = new OutputStreamWriter(new FileOutputStream(configFile), "UTF-8"); - for (final String line : lines) { - out.write(line); - } - } finally { - if (out != null) { - try { - out.close(); - } catch (IOException e) { - throw new AssertionError(e); - } - } - } - - return configFile.toURI().toURL(); - } -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusterStateRepositoryReplicationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusterStateRepositoryReplicationTest.java deleted file mode 100644 index 9edde4a1e0..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusterStateRepositoryReplicationTest.java +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.service; - -import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; -import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; -import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityService; -import org.ehcache.clustered.client.internal.UnitTestConnectionService; -import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; -import org.ehcache.clustered.client.internal.store.ClusterTierClientEntityService; -import org.ehcache.clustered.client.internal.store.ServerStoreProxy; -import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; -import org.ehcache.clustered.client.internal.store.SimpleClusterTierClientEntity; -import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; -import org.ehcache.clustered.server.ClusterTierManagerServerEntityService; -import org.ehcache.clustered.server.store.ClusterTierServerEntityService; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.spi.persistence.StateHolder; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.terracotta.offheapresource.OffHeapResourcesProvider; -import org.terracotta.offheapresource.config.MemoryUnit; -import org.terracotta.passthrough.PassthroughClusterControl; -import org.terracotta.passthrough.PassthroughTestHelpers; - -import java.io.Serializable; -import java.lang.reflect.Field; -import java.net.URI; - -import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; -import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; -import static org.ehcache.config.Eviction.noAdvice; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.expiry.Expirations.noExpiration; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; - -public class ClusterStateRepositoryReplicationTest { - - private PassthroughClusterControl clusterControl; - private static String STRIPENAME = "stripe"; - private static String STRIPE_URI = "passthrough://" + STRIPENAME; - - @Before - public void setUp() throws Exception { - this.clusterControl = PassthroughTestHelpers.createActivePassive(STRIPENAME, - server -> { - server.registerServerEntityService(new ClusterTierManagerServerEntityService()); - server.registerClientEntityService(new ClusterTierManagerClientEntityService()); - server.registerServerEntityService(new ClusterTierServerEntityService()); - server.registerClientEntityService(new ClusterTierClientEntityService()); - server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); - server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); - server.registerExtendedConfiguration(new OffHeapResourcesProvider(getOffheapResourcesType("test", 32, MemoryUnit.MB))); - - UnitTestConnectionService.addServerToStripe(STRIPENAME, server); - } - ); - - clusterControl.waitForActive(); - clusterControl.waitForRunningPassivesInStandby(); - } - - @After - public void tearDown() throws Exception { - UnitTestConnectionService.removeStripe(STRIPENAME); - clusterControl.tearDown(); - } - - @Test - public void testClusteredStateRepositoryReplication() throws Exception { - ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) - .autoCreate() - .build(); - - ClusteringService service = new ClusteringServiceFactory().create(configuration); - - service.start(null); - - BaseCacheConfiguration config = new BaseCacheConfiguration<>(Long.class, String.class, noAdvice(), null, noExpiration(), - newResourcePoolsBuilder().with(clusteredDedicated("test", 2, org.ehcache.config.units.MemoryUnit.MB)).build()); - ClusteringService.ClusteredCacheIdentifier spaceIdentifier = (ClusteringService.ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("test", - config); - - ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(spaceIdentifier, new StoreConfigurationImpl<>(config, 1, null, null), Consistency.STRONG, mock(ServerCallback.class)); - - SimpleClusterTierClientEntity clientEntity = getEntity(serverStoreProxy); - - ClusterStateRepository stateRepository = new ClusterStateRepository(spaceIdentifier, "test", clientEntity); - - StateHolder testHolder = stateRepository.getPersistentStateHolder("testHolder", String.class, String.class); - testHolder.putIfAbsent("One", "One"); - testHolder.putIfAbsent("Two", "Two"); - - clusterControl.terminateActive(); - clusterControl.waitForActive(); - - assertThat(testHolder.get("One"), is("One")); - assertThat(testHolder.get("Two"), is("Two")); - - service.stop(); - } - - @Test - public void testClusteredStateRepositoryReplicationWithSerializableKV() throws Exception { - ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) - .autoCreate() - .build(); - - ClusteringService service = new ClusteringServiceFactory().create(configuration); - - service.start(null); - - BaseCacheConfiguration config = new BaseCacheConfiguration<>(Long.class, String.class, noAdvice(), null, noExpiration(), - newResourcePoolsBuilder().with(clusteredDedicated("test", 2, org.ehcache.config.units.MemoryUnit.MB)).build()); - ClusteringService.ClusteredCacheIdentifier spaceIdentifier = (ClusteringService.ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("test", - config); - - ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(spaceIdentifier, new StoreConfigurationImpl<>(config, 1, null, null), Consistency.STRONG, mock(ServerCallback.class)); - - SimpleClusterTierClientEntity clientEntity = getEntity(serverStoreProxy); - - ClusterStateRepository stateRepository = new ClusterStateRepository(new ClusteringService.ClusteredCacheIdentifier() { - @Override - public String getId() { - return "testStateRepo"; - } - - @Override - public Class getServiceType() { - return ClusteringService.class; - } - }, "test", clientEntity); - - StateHolder testMap = stateRepository.getPersistentStateHolder("testMap", TestVal.class, TestVal.class); - testMap.putIfAbsent(new TestVal("One"), new TestVal("One")); - testMap.putIfAbsent(new TestVal("Two"), new TestVal("Two")); - - clusterControl.terminateActive(); - clusterControl.waitForActive(); - - assertThat(testMap.get(new TestVal("One")), is(new TestVal("One"))); - assertThat(testMap.get(new TestVal("Two")), is(new TestVal("Two"))); - - assertThat(testMap.entrySet(), hasSize(2)); - - service.stop(); - } - - private static SimpleClusterTierClientEntity getEntity(ServerStoreProxy clusteringService) throws NoSuchFieldException, IllegalAccessException { - Field entity = clusteringService.getClass().getDeclaredField("entity"); - entity.setAccessible(true); - return (SimpleClusterTierClientEntity)entity.get(clusteringService); - } - - private static class TestVal implements Serializable { - final String val; - - - private TestVal(String val) { - this.val = val; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - TestVal testVal = (TestVal) o; - - return val != null ? val.equals(testVal.val) : testVal.val == null; - } - - @Override - public int hashCode() { - return val != null ? val.hashCode() : 0; - } - } - -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactoryTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactoryTest.java deleted file mode 100644 index f04ced19e9..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactoryTest.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.service; - -import org.ehcache.clustered.client.internal.service.ClusteringServiceFactory; -import org.ehcache.core.spi.service.ServiceFactory; -import org.ehcache.core.internal.util.ClassLoading; -import org.junit.Test; - -import java.util.ServiceLoader; - -import static org.junit.Assert.*; - -/** - * @author Clifford W. Johnson - */ -public class ClusteringServiceFactoryTest { - - @Test - public void testServiceLocator() throws Exception { - final String expectedFactory = ClusteringServiceFactory.class.getName(); - final ServiceLoader factories = ClassLoading.libraryServiceLoaderFor(ServiceFactory.class); - foundParser: { - for (final ServiceFactory factory : factories) { - if (factory.getClass().getName().equals(expectedFactory)) { - break foundParser; - } - } - fail("Expected factory not found"); - } - } - -} \ No newline at end of file diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/AbstractServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/AbstractServerStoreProxyTest.java deleted file mode 100644 index fb6ba014ad..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/AbstractServerStoreProxyTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityFactory; -import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityService; -import org.ehcache.clustered.client.internal.UnitTestConnectionService; -import org.ehcache.clustered.client.internal.UnitTestConnectionService.PassthroughServerBuilder; -import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; -import org.ehcache.clustered.common.ServerSideConfiguration; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; -import org.ehcache.clustered.server.ClusterTierManagerServerEntityService; -import org.ehcache.clustered.server.store.ObservableClusterTierServerEntityService; -import org.ehcache.config.units.MemoryUnit; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.terracotta.connection.Connection; - -import java.net.URI; -import java.util.Collections; -import java.util.Properties; - -public abstract class AbstractServerStoreProxyTest { - - private static final URI CLUSTER_URI = URI.create("terracotta://localhost"); - private static final UnitTestConnectionService CONNECTION_SERVICE = new UnitTestConnectionService(); - - protected static ObservableClusterTierServerEntityService observableClusterTierService; - - @BeforeClass - public static void createCluster() { - UnitTestConnectionService.add(CLUSTER_URI, new PassthroughServerBuilder() - .serverEntityService(new ClusterTierManagerServerEntityService()) - .clientEntityService(new ClusterTierManagerClientEntityService()) - .serverEntityService(observableClusterTierService = new ObservableClusterTierServerEntityService()) - .clientEntityService(new ClusterTierClientEntityService()) - .serverEntityService(new VoltronReadWriteLockServerEntityService()) - .clientEntityService(new VoltronReadWriteLockEntityClientService()) - .resource("defaultResource", 128, MemoryUnit.MB).build()); - } - - @AfterClass - public static void destroyCluster() { - UnitTestConnectionService.remove(CLUSTER_URI); - observableClusterTierService = null; - } - - protected static SimpleClusterTierClientEntity createClientEntity(String name, - ServerStoreConfiguration configuration, - boolean create) throws Exception { - Connection connection = CONNECTION_SERVICE.connect(CLUSTER_URI, new Properties()); - - // Create ClusterTierManagerClientEntity if needed - ClusterTierManagerClientEntityFactory entityFactory = new ClusterTierManagerClientEntityFactory(connection); - if (create) { - entityFactory.create(name, new ServerSideConfiguration("defaultResource", Collections.emptyMap())); - } - // Create or fetch the ClusterTierClientEntity - SimpleClusterTierClientEntity clientEntity = (SimpleClusterTierClientEntity) entityFactory.fetchOrCreateClusteredStoreEntity(name, name, configuration, create); - clientEntity.validate(configuration); - return clientEntity; - } - - -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ChainBuilderTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ChainBuilderTest.java deleted file mode 100644 index c692c5f13e..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ChainBuilderTest.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.clustered.common.internal.store.Util; -import org.junit.Test; - -import java.util.Iterator; - -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - -/** - */ -public class ChainBuilderTest { - - @Test - public void testChainBuilder() { - ChainBuilder cb1 = new ChainBuilder(); - - ChainBuilder cb2 = cb1.add(Util.createPayload(1L)) - .add(Util.createPayload(3L)) - .add(Util.createPayload(4L)); - - ChainBuilder cb3 = cb2.add(Util.createPayload(2L)); - - Chain chain1 = cb1.build(); - Chain chain2 = cb2.build(); - Chain chain3 = cb3.build(); - - assertChainHas(chain1); - assertChainHas(chain2, 1L, 3L, 4L); - assertChainHas(chain3, 1L, 3L, 4L, 2L); - - } - - private static void assertChainHas(Chain chain, long... payLoads) { - Iterator elements = chain.iterator(); - for (long payLoad : payLoads) { - assertThat(Util.readPayLoad(elements.next().getPayload()), is(Long.valueOf(payLoad))); - } - assertThat(elements.hasNext(), is(false)); - } -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java deleted file mode 100644 index cee4befaf5..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java +++ /dev/null @@ -1,839 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.client.TestTimeSource; -import org.ehcache.clustered.client.config.ClusteredResourcePool; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityFactory; -import org.ehcache.clustered.client.internal.UnitTestConnectionService; -import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; -import org.ehcache.clustered.client.internal.store.operations.EternalChainResolver; -import org.ehcache.clustered.client.internal.store.operations.Result; -import org.ehcache.clustered.client.internal.store.operations.codecs.OperationsCodec; -import org.ehcache.clustered.common.ServerSideConfiguration; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.Ehcache; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.core.spi.store.StoreAccessTimeoutException; -import org.ehcache.core.spi.time.TimeSource; -import org.ehcache.core.statistics.StoreOperationOutcomes; -import org.ehcache.expiry.Expirations; -import org.ehcache.impl.store.HashUtils; -import org.ehcache.impl.serialization.LongSerializer; -import org.ehcache.impl.serialization.StringSerializer; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.terracotta.connection.Connection; - -import java.net.URI; -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Collections; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.function.Function; - -import static org.ehcache.clustered.client.internal.store.ClusteredStore.DEFAULT_CHAIN_COMPACTION_THRESHOLD; -import static org.ehcache.clustered.client.internal.store.ClusteredStore.CHAIN_COMPACTION_THRESHOLD_PROP; -import static org.ehcache.clustered.util.StatisticsTestUtils.validateStat; -import static org.ehcache.clustered.util.StatisticsTestUtils.validateStats; -import static org.ehcache.core.spi.store.Store.ValueHolder.NO_EXPIRE; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.junit.Assert.*; -import static org.mockito.ArgumentMatchers.isNull; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyLong; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class ClusteredStoreTest { - - private static final String CACHE_IDENTIFIER = "testCache"; - private static final URI CLUSTER_URI = URI.create("terracotta://localhost"); - - private ClusteredStore store; - - @Before - public void setup() throws Exception { - UnitTestConnectionService.add( - CLUSTER_URI, - new UnitTestConnectionService.PassthroughServerBuilder().resource("defaultResource", 8, MemoryUnit.MB).build() - ); - - Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); - ClusterTierManagerClientEntityFactory entityFactory = new ClusterTierManagerClientEntityFactory(connection); - - ServerSideConfiguration serverConfig = - new ServerSideConfiguration("defaultResource", Collections.emptyMap()); - entityFactory.create("TestCacheManager", serverConfig); - - ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(4, MemoryUnit.MB); - ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), - Long.class.getName(), String.class.getName(), LongSerializer.class.getName(), StringSerializer.class.getName(), null); - ClusterTierClientEntity clientEntity = entityFactory.fetchOrCreateClusteredStoreEntity("TestCacheManager", CACHE_IDENTIFIER, serverStoreConfiguration, true); - clientEntity.validate(serverStoreConfiguration); - ServerStoreProxy serverStoreProxy = new CommonServerStoreProxy(CACHE_IDENTIFIER, clientEntity, mock(ServerCallback.class)); - - TestTimeSource testTimeSource = new TestTimeSource(); - - OperationsCodec codec = new OperationsCodec<>(new LongSerializer(), new StringSerializer()); - EternalChainResolver resolver = new EternalChainResolver<>(codec); - store = new ClusteredStore<>(codec, resolver, serverStoreProxy, testTimeSource); - } - - @After - public void tearDown() throws Exception { - UnitTestConnectionService.remove("terracotta://localhost/my-application"); - } - - @Test - public void testPut() throws Exception { - assertThat(store.put(1L, "one"), is(Store.PutStatus.PUT)); - validateStats(store, EnumSet.of(StoreOperationOutcomes.PutOutcome.PUT)); - assertThat(store.put(1L, "another one"), is(Store.PutStatus.PUT)); - assertThat(store.put(1L, "yet another one"), is(Store.PutStatus.PUT)); - validateStat(store, StoreOperationOutcomes.PutOutcome.PUT, 3); - } - - @Test(expected = StoreAccessTimeoutException.class) - @SuppressWarnings("unchecked") - public void testPutTimeout() throws Exception { - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); - TimeSource timeSource = mock(TimeSource.class); - doThrow(TimeoutException.class).when(proxy).append(anyLong(), isNull()); - ClusteredStore store = new ClusteredStore<>(codec, null, proxy, timeSource); - store.put(1L, "one"); - } - - @Test - public void testGet() throws Exception { - assertThat(store.get(1L), nullValue()); - validateStats(store, EnumSet.of(StoreOperationOutcomes.GetOutcome.MISS)); - store.put(1L, "one"); - assertThat(store.get(1L).value(), is("one")); - validateStats(store, EnumSet.of(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.HIT)); - } - - @Test(expected = StoreAccessException.class) - public void testGetThrowsOnlySAE() throws Exception { - @SuppressWarnings("unchecked") - OperationsCodec codec = mock(OperationsCodec.class); - @SuppressWarnings("unchecked") - EternalChainResolver chainResolver = mock(EternalChainResolver.class); - ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); - when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); - TestTimeSource testTimeSource = mock(TestTimeSource.class); - ClusteredStore store = new ClusteredStore<>(codec, chainResolver, serverStoreProxy, testTimeSource); - store.get(1L); - } - - @Test - @SuppressWarnings("unchecked") - public void testGetTimeout() throws Exception { - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - long longKey = HashUtils.intHashToLong(new Long(1L).hashCode()); - when(proxy.get(longKey)).thenThrow(TimeoutException.class); - ClusteredStore store = new ClusteredStore<>(null, null, proxy, null); - assertThat(store.get(1L), nullValue()); - validateStats(store, EnumSet.of(StoreOperationOutcomes.GetOutcome.TIMEOUT)); - } - - @Test - public void testGetThatCompactsInvokesReplace() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - timeSource.advanceTime(134556L); - long now = timeSource.getTimeMillis(); - @SuppressWarnings("unchecked") - OperationsCodec operationsCodec = new OperationsCodec<>(new LongSerializer(), new StringSerializer()); - @SuppressWarnings("unchecked") - EternalChainResolver chainResolver = mock(EternalChainResolver.class); - @SuppressWarnings("unchecked") - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.isCompacted()).thenReturn(true); - when(chainResolver.resolve(any(Chain.class), eq(42L), eq(now))).thenReturn(resolvedChain); - ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); - Chain chain = mock(Chain.class); - when(chain.isEmpty()).thenReturn(false); - long longKey = HashUtils.intHashToLong(new Long(42L).hashCode()); - when(serverStoreProxy.get(longKey)).thenReturn(chain); - - ClusteredStore clusteredStore = new ClusteredStore<>(operationsCodec, chainResolver, - serverStoreProxy, timeSource); - clusteredStore.get(42L); - verify(serverStoreProxy).replaceAtHead(eq(longKey), eq(chain), isNull()); - } - - @Test - public void testGetThatDoesNotCompactsInvokesReplace() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - timeSource.advanceTime(134556L); - long now = timeSource.getTimeMillis(); - OperationsCodec operationsCodec = new OperationsCodec<>(new LongSerializer(), new StringSerializer()); - @SuppressWarnings("unchecked") - EternalChainResolver chainResolver = mock(EternalChainResolver.class); - @SuppressWarnings("unchecked") - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.isCompacted()).thenReturn(false); - when(chainResolver.resolve(any(Chain.class), eq(42L), eq(now))).thenReturn(resolvedChain); - ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); - Chain chain = mock(Chain.class); - when(chain.isEmpty()).thenReturn(false); - long longKey = HashUtils.intHashToLong(new Long(42L).hashCode()); - when(serverStoreProxy.get(longKey)).thenReturn(chain); - - ClusteredStore clusteredStore = new ClusteredStore<>(operationsCodec, chainResolver, - serverStoreProxy, timeSource); - clusteredStore.get(42L); - verify(serverStoreProxy, never()).replaceAtHead(eq(longKey), eq(chain), any(Chain.class)); - } - - @Test - public void testContainsKey() throws Exception { - assertThat(store.containsKey(1L), is(false)); - store.put(1L, "one"); - assertThat(store.containsKey(1L), is(true)); - validateStat(store, StoreOperationOutcomes.GetOutcome.HIT, 0); - validateStat(store, StoreOperationOutcomes.GetOutcome.MISS, 0); - } - - @Test(expected = StoreAccessException.class) - public void testContainsKeyThrowsOnlySAE() throws Exception { - @SuppressWarnings("unchecked") - OperationsCodec codec = mock(OperationsCodec.class); - @SuppressWarnings("unchecked") - EternalChainResolver chainResolver = mock(EternalChainResolver.class); - ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); - when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); - TestTimeSource testTimeSource = mock(TestTimeSource.class); - ClusteredStore store = new ClusteredStore<>(codec, chainResolver, serverStoreProxy, testTimeSource); - store.containsKey(1L); - } - - @Test - public void testRemove() throws Exception { - assertThat(store.remove(1L), is(false)); - validateStats(store, EnumSet.of(StoreOperationOutcomes.RemoveOutcome.MISS)); - store.put(1L, "one"); - assertThat(store.remove(1L), is(true)); - assertThat(store.containsKey(1L), is(false)); - validateStats(store, EnumSet.of(StoreOperationOutcomes.RemoveOutcome.MISS, StoreOperationOutcomes.RemoveOutcome.REMOVED)); - } - - @Test(expected = StoreAccessException.class) - public void testRemoveThrowsOnlySAE() throws Exception { - @SuppressWarnings("unchecked") - OperationsCodec codec = mock(OperationsCodec.class); - @SuppressWarnings("unchecked") - EternalChainResolver chainResolver = mock(EternalChainResolver.class); - ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); - when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); - TestTimeSource testTimeSource = mock(TestTimeSource.class); - ClusteredStore store = new ClusteredStore<>(codec, chainResolver, serverStoreProxy, testTimeSource); - store.remove(1L); - } - - @Test(expected = StoreAccessTimeoutException.class) - @SuppressWarnings("unchecked") - public void testRemoveTimeout() throws Exception { - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); - TimeSource timeSource = mock(TimeSource.class); - when(proxy.getAndAppend(anyLong(), isNull())).thenThrow(TimeoutException.class); - ClusteredStore store = new ClusteredStore<>(codec, null, proxy, timeSource); - store.remove(1L); - } - - @Test - public void testClear() throws Exception { - assertThat(store.containsKey(1L), is(false)); - store.clear(); - assertThat(store.containsKey(1L), is(false)); - - store.put(1L, "one"); - store.put(2L, "two"); - store.put(3L, "three"); - assertThat(store.containsKey(1L), is(true)); - - store.clear(); - - assertThat(store.containsKey(1L), is(false)); - assertThat(store.containsKey(2L), is(false)); - assertThat(store.containsKey(3L), is(false)); - } - - @Test(expected = StoreAccessException.class) - public void testClearThrowsOnlySAE() throws Exception { - @SuppressWarnings("unchecked") - OperationsCodec codec = mock(OperationsCodec.class); - @SuppressWarnings("unchecked") - EternalChainResolver chainResolver = mock(EternalChainResolver.class); - ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); - doThrow(new RuntimeException()).when(serverStoreProxy).clear(); - TestTimeSource testTimeSource = mock(TestTimeSource.class); - ClusteredStore store = new ClusteredStore<>(codec, chainResolver, serverStoreProxy, testTimeSource); - store.clear(); - } - - @Test(expected = StoreAccessTimeoutException.class) - public void testClearTimeout() throws Exception { - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - @SuppressWarnings("unchecked") - OperationsCodec codec = mock(OperationsCodec.class); - TimeSource timeSource = mock(TimeSource.class); - doThrow(TimeoutException.class).when(proxy).clear(); - ClusteredStore store = new ClusteredStore<>(codec, null, proxy, timeSource); - store.clear(); - } - - @Test - public void testPutIfAbsent() throws Exception { - assertThat(store.putIfAbsent(1L, "one"), nullValue()); - validateStats(store, EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.PUT)); - assertThat(store.putIfAbsent(1L, "another one").value(), is("one")); - validateStats(store, EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.PUT, StoreOperationOutcomes.PutIfAbsentOutcome.HIT)); - } - - @Test(expected = StoreAccessException.class) - public void testPutIfAbsentThrowsOnlySAE() throws Exception { - @SuppressWarnings("unchecked") - OperationsCodec codec = mock(OperationsCodec.class); - @SuppressWarnings("unchecked") - EternalChainResolver chainResolver = mock(EternalChainResolver.class); - ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); - when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); - TestTimeSource testTimeSource = mock(TestTimeSource.class); - ClusteredStore store = new ClusteredStore<>(codec, chainResolver, serverStoreProxy, testTimeSource); - store.putIfAbsent(1L, "one"); - } - - @Test(expected = StoreAccessTimeoutException.class) - @SuppressWarnings("unchecked") - public void testPutIfAbsentTimeout() throws Exception { - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); - TimeSource timeSource = mock(TimeSource.class); - when(proxy.getAndAppend(anyLong(), isNull())).thenThrow(TimeoutException.class); - ClusteredStore store = new ClusteredStore<>(codec, null, proxy, timeSource); - store.putIfAbsent(1L, "one"); - } - - @Test - public void testConditionalRemove() throws Exception { - assertThat(store.remove(1L, "one"), is(Store.RemoveStatus.KEY_MISSING)); - validateStats(store, EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS)); - store.put(1L, "one"); - assertThat(store.remove(1L, "one"), is(Store.RemoveStatus.REMOVED)); - validateStats(store, EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS, StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED)); - store.put(1L, "another one"); - assertThat(store.remove(1L, "one"), is(Store.RemoveStatus.KEY_PRESENT)); - validateStat(store, StoreOperationOutcomes.ConditionalRemoveOutcome.MISS, 2); - } - - @Test(expected = StoreAccessException.class) - public void testConditionalRemoveThrowsOnlySAE() throws Exception { - @SuppressWarnings("unchecked") - OperationsCodec codec = mock(OperationsCodec.class); - @SuppressWarnings("unchecked") - EternalChainResolver chainResolver = mock(EternalChainResolver.class); - ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); - when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); - TestTimeSource testTimeSource = mock(TestTimeSource.class); - ClusteredStore store = new ClusteredStore<>(codec, chainResolver, serverStoreProxy, testTimeSource); - store.remove(1L, "one"); - } - - @Test(expected = StoreAccessTimeoutException.class) - @SuppressWarnings("unchecked") - public void testConditionalRemoveTimeout() throws Exception { - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); - TimeSource timeSource = mock(TimeSource.class); - when(proxy.getAndAppend(anyLong(), isNull())).thenThrow(TimeoutException.class); - ClusteredStore store = new ClusteredStore<>(codec, null, proxy, timeSource); - store.remove(1L, "one"); - } - - @Test - public void testReplace() throws Exception { - assertThat(store.replace(1L, "one"), nullValue()); - validateStats(store, EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.MISS)); - store.put(1L, "one"); - assertThat(store.replace(1L, "another one").value(), is("one")); - validateStats(store, EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.MISS, StoreOperationOutcomes.ReplaceOutcome.REPLACED)); - } - - @Test(expected = StoreAccessException.class) - public void testReplaceThrowsOnlySAE() throws Exception { - @SuppressWarnings("unchecked") - OperationsCodec codec = mock(OperationsCodec.class); - @SuppressWarnings("unchecked") - EternalChainResolver chainResolver = mock(EternalChainResolver.class); - ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); - when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); - TestTimeSource testTimeSource = mock(TestTimeSource.class); - ClusteredStore store = new ClusteredStore<>(codec, chainResolver, serverStoreProxy, testTimeSource); - store.replace(1L, "one"); - } - - @Test(expected = StoreAccessTimeoutException.class) - @SuppressWarnings("unchecked") - public void testReplaceTimeout() throws Exception { - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); - TimeSource timeSource = mock(TimeSource.class); - when(proxy.getAndAppend(anyLong(), isNull())).thenThrow(TimeoutException.class); - ClusteredStore store = new ClusteredStore<>(codec, null, proxy, timeSource); - store.replace(1L, "one"); - } - - @Test - public void testConditionalReplace() throws Exception { - assertThat(store.replace(1L, "one" , "another one"), is(Store.ReplaceStatus.MISS_NOT_PRESENT)); - validateStats(store, EnumSet.of(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS)); - store.put(1L, "some other one"); - assertThat(store.replace(1L, "one" , "another one"), is(Store.ReplaceStatus.MISS_PRESENT)); - validateStat(store, StoreOperationOutcomes.ConditionalReplaceOutcome.MISS, 2); - validateStat(store, StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED, 0); - assertThat(store.replace(1L, "some other one" , "another one"), is(Store.ReplaceStatus.HIT)); - validateStat(store, StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED, 1); - validateStat(store, StoreOperationOutcomes.ConditionalReplaceOutcome.MISS, 2); - } - - @Test(expected = StoreAccessException.class) - public void testConditionalReplaceThrowsOnlySAE() throws Exception { - @SuppressWarnings("unchecked") - OperationsCodec codec = mock(OperationsCodec.class); - @SuppressWarnings("unchecked") - EternalChainResolver chainResolver = mock(EternalChainResolver.class); - ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); - when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); - TestTimeSource testTimeSource = mock(TestTimeSource.class); - ClusteredStore store = new ClusteredStore<>(codec, chainResolver, serverStoreProxy, testTimeSource); - store.replace(1L, "one", "another one"); - } - - @Test(expected = StoreAccessTimeoutException.class) - @SuppressWarnings("unchecked") - public void testConditionalReplaceTimeout() throws Exception { - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); - TimeSource timeSource = mock(TimeSource.class); - when(proxy.getAndAppend(anyLong(), isNull())).thenThrow(TimeoutException.class); - ClusteredStore store = new ClusteredStore<>(codec, null, proxy, timeSource); - store.replace(1L, "one", "another one"); - } - - @Test - public void testBulkComputePutAll() throws Exception { - store.put(1L, "another one"); - Map map = new HashMap<>(); - map.put(1L, "one"); - map.put(2L, "two"); - Ehcache.PutAllFunction putAllFunction = new Ehcache.PutAllFunction<>(null, map, null); - Map> valueHolderMap = store.bulkCompute(new HashSet<>(Arrays.asList(1L, 2L)), putAllFunction); - - assertThat(valueHolderMap.get(1L).value(), is(map.get(1L))); - assertThat(store.get(1L).value(), is(map.get(1L))); - assertThat(valueHolderMap.get(2L).value(), is(map.get(2L))); - assertThat(store.get(2L).value(), is(map.get(2L))); - assertThat(putAllFunction.getActualPutCount().get(), is(2)); - validateStats(store, EnumSet.of(StoreOperationOutcomes.PutOutcome.PUT)); //outcome of the initial store put - } - - @Test - public void testBulkComputeRemoveAll() throws Exception { - store.put(1L, "one"); - store.put(2L, "two"); - store.put(3L, "three"); - Ehcache.RemoveAllFunction removeAllFunction = new Ehcache.RemoveAllFunction<>(); - Map> valueHolderMap = store.bulkCompute(new HashSet<>(Arrays.asList(1L, 2L, 4L)), removeAllFunction); - - assertThat(valueHolderMap.get(1L), nullValue()); - assertThat(store.get(1L), nullValue()); - assertThat(valueHolderMap.get(2L), nullValue()); - assertThat(store.get(2L), nullValue()); - assertThat(valueHolderMap.get(4L), nullValue()); - assertThat(store.get(4L), nullValue()); - validateStats(store, EnumSet.noneOf(StoreOperationOutcomes.RemoveOutcome.class)); - } - - @Test(expected = UnsupportedOperationException.class) - public void testBulkComputeThrowsForGenericFunction() throws Exception { - @SuppressWarnings("unchecked") - Function>, Iterable>> remappingFunction - = mock(Function.class); - store.bulkCompute(new HashSet<>(Arrays.asList(1L, 2L)), remappingFunction); - } - - @Test - public void testBulkComputeIfAbsentGetAll() throws Exception { - store.put(1L, "one"); - store.put(2L, "two"); - Ehcache.GetAllFunction getAllAllFunction = new Ehcache.GetAllFunction<>(); - Map> valueHolderMap = store.bulkComputeIfAbsent(new HashSet<>(Arrays.asList(1L, 2L)), getAllAllFunction); - - assertThat(valueHolderMap.get(1L).value(), is("one")); - assertThat(store.get(1L).value(), is("one")); - assertThat(valueHolderMap.get(2L).value(), is("two")); - assertThat(store.get(2L).value(), is("two")); - } - - @Test(expected = UnsupportedOperationException.class) - public void testBulkComputeIfAbsentThrowsForGenericFunction() throws Exception { - @SuppressWarnings("unchecked") - Function, Iterable>> mappingFunction - = mock(Function.class); - store.bulkComputeIfAbsent(new HashSet<>(Arrays.asList(1L, 2L)), mappingFunction); - } - - @Test - @SuppressWarnings("unchecked") - public void testPutIfAbsentReplacesChainOnlyOnCompressionThreshold() throws Exception { - Result result = mock(Result.class); - when(result.getValue()).thenReturn("one"); - - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.getResolvedResult(anyLong())).thenReturn(result); - when(resolvedChain.getCompactedChain()).thenReturn(mock(Chain.class)); - - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenReturn(mock(Chain.class)); - - EternalChainResolver resolver = mock(EternalChainResolver.class); - when(resolver.resolve(any(Chain.class), anyLong(), anyLong())).thenReturn(resolvedChain); - - OperationsCodec codec = mock(OperationsCodec.class); - when(codec.encode(any())).thenReturn(ByteBuffer.allocate(0)); - TimeSource timeSource = mock(TimeSource.class); - - ClusteredStore store = new ClusteredStore(codec, resolver, proxy, timeSource); - - when(resolvedChain.getCompactionCount()).thenReturn(DEFAULT_CHAIN_COMPACTION_THRESHOLD - 1); // less than the default threshold - store.putIfAbsent(1L, "one"); - verify(proxy, never()).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - - when(resolvedChain.getCompactionCount()).thenReturn(DEFAULT_CHAIN_COMPACTION_THRESHOLD); // equal to the default threshold - store.putIfAbsent(1L, "one"); - verify(proxy, never()).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - - when(resolvedChain.getCompactionCount()).thenReturn(DEFAULT_CHAIN_COMPACTION_THRESHOLD + 1); // greater than the default threshold - store.putIfAbsent(1L, "one"); - verify(proxy).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - } - - @Test - @SuppressWarnings("unchecked") - public void testReplaceReplacesChainOnlyOnCompressionThreshold() throws Exception { - Result result = mock(Result.class); - when(result.getValue()).thenReturn("one"); - - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.getResolvedResult(anyLong())).thenReturn(result); - when(resolvedChain.getCompactedChain()).thenReturn(mock(Chain.class)); - - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenReturn(mock(Chain.class)); - - EternalChainResolver resolver = mock(EternalChainResolver.class); - when(resolver.resolve(any(Chain.class), anyLong(), anyLong())).thenReturn(resolvedChain); - - OperationsCodec codec = mock(OperationsCodec.class); - when(codec.encode(any())).thenReturn(ByteBuffer.allocate(0)); - TimeSource timeSource = mock(TimeSource.class); - - ClusteredStore store = new ClusteredStore(codec, resolver, proxy, timeSource); - - when(resolvedChain.getCompactionCount()).thenReturn(DEFAULT_CHAIN_COMPACTION_THRESHOLD - 1); // less than the default threshold - store.replace(1L, "one"); - verify(proxy, never()).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - - when(resolvedChain.getCompactionCount()).thenReturn(DEFAULT_CHAIN_COMPACTION_THRESHOLD); // equal to the default threshold - store.replace(1L, "one"); - verify(proxy, never()).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - - when(resolvedChain.getCompactionCount()).thenReturn(DEFAULT_CHAIN_COMPACTION_THRESHOLD + 1); // greater than the default threshold - store.replace(1L, "one"); - verify(proxy).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - } - - @Test - @SuppressWarnings("unchecked") - public void testConditionalReplaceReplacesChainOnlyOnCompressionThreshold() throws Exception { - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.getResolvedResult(anyLong())).thenReturn(mock(Result.class)); - when(resolvedChain.getCompactedChain()).thenReturn(mock(Chain.class)); - - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenReturn(mock(Chain.class)); - - EternalChainResolver resolver = mock(EternalChainResolver.class); - when(resolver.resolve(any(Chain.class), anyLong(), anyLong())).thenReturn(resolvedChain); - - OperationsCodec codec = mock(OperationsCodec.class); - when(codec.encode(any())).thenReturn(ByteBuffer.allocate(0)); - TimeSource timeSource = mock(TimeSource.class); - - ClusteredStore store = new ClusteredStore(codec, resolver, proxy, timeSource); - - when(resolvedChain.getCompactionCount()).thenReturn(DEFAULT_CHAIN_COMPACTION_THRESHOLD - 1); // less than the default threshold - store.replace(1L, "one", "anotherOne"); - verify(proxy, never()).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - - when(resolvedChain.getCompactionCount()).thenReturn(DEFAULT_CHAIN_COMPACTION_THRESHOLD); // equal to the default threshold - store.replace(1L, "one", "anotherOne"); - verify(proxy, never()).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - - when(resolvedChain.getCompactionCount()).thenReturn(DEFAULT_CHAIN_COMPACTION_THRESHOLD + 1); // greater than the default threshold - store.replace(1L, "one", "anotherOne"); - verify(proxy).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - } - - @Test - @SuppressWarnings("unchecked") - public void testCustomCompressionThreshold() throws Exception { - int customThreshold = 4; - try { - System.setProperty(CHAIN_COMPACTION_THRESHOLD_PROP, String.valueOf(customThreshold)); - - Result result = mock(Result.class); - when(result.getValue()).thenReturn("one"); - - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.getResolvedResult(anyLong())).thenReturn(result); - when(resolvedChain.getCompactedChain()).thenReturn(mock(Chain.class)); - - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenReturn(mock(Chain.class)); - - EternalChainResolver resolver = mock(EternalChainResolver.class); - when(resolver.resolve(any(Chain.class), anyLong(), anyLong())).thenReturn(resolvedChain); - - OperationsCodec codec = mock(OperationsCodec.class); - when(codec.encode(any())).thenReturn(ByteBuffer.allocate(0)); - TimeSource timeSource = mock(TimeSource.class); - - ClusteredStore store = new ClusteredStore(codec, resolver, proxy, timeSource); - - when(resolvedChain.getCompactionCount()).thenReturn(customThreshold - 1); // less than the custom threshold - store.putIfAbsent(1L, "one"); - verify(proxy, never()).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - - when(resolvedChain.getCompactionCount()).thenReturn(customThreshold); // equal to the custom threshold - store.replace(1L, "one"); - verify(proxy, never()).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - - when(resolvedChain.getCompactionCount()).thenReturn(customThreshold + 1); // greater than the custom threshold - store.replace(1L, "one"); - verify(proxy).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - } finally { - System.clearProperty(CHAIN_COMPACTION_THRESHOLD_PROP); - } - } - - @Test - @SuppressWarnings("unchecked") - public void testRemoveReplacesChainOnHits() throws Exception { - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.getCompactedChain()).thenReturn(mock(Chain.class)); - when(resolvedChain.getResolvedResult(anyLong())).thenReturn(mock(Result.class)); //simulate a key hit on chain resolution - when(resolvedChain.getCompactionCount()).thenReturn(1); - - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenReturn(mock(Chain.class)); - - EternalChainResolver resolver = mock(EternalChainResolver.class); - when(resolver.resolve(any(Chain.class), anyLong(), anyLong())).thenReturn(resolvedChain); - - OperationsCodec codec = mock(OperationsCodec.class); - when(codec.encode(any())).thenReturn(ByteBuffer.allocate(0)); - TimeSource timeSource = mock(TimeSource.class); - - ClusteredStore store = new ClusteredStore(codec, resolver, proxy, timeSource); - - store.remove(1L); - verify(proxy).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - } - - @Test - @SuppressWarnings("unchecked") - public void testRemoveDoesNotReplaceChainOnMisses() throws Exception { - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.getResolvedResult(anyLong())).thenReturn(null); //simulate a key miss on chain resolution - - EternalChainResolver resolver = mock(EternalChainResolver.class); - when(resolver.resolve(any(Chain.class), anyLong(), anyLong())).thenReturn(resolvedChain); - - OperationsCodec codec = mock(OperationsCodec.class); - when(codec.encode(any())).thenReturn(ByteBuffer.allocate(0)); - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenReturn(mock(Chain.class)); - TimeSource timeSource = mock(TimeSource.class); - - ClusteredStore store = new ClusteredStore(codec, resolver, proxy, timeSource); - - store.remove(1L); - verify(proxy, never()).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - } - - @Test - @SuppressWarnings("unchecked") - public void testConditionalRemoveReplacesChainOnHits() throws Exception { - Result result = mock(Result.class); - when(result.getValue()).thenReturn("foo"); - - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.getCompactedChain()).thenReturn(mock(Chain.class)); - when(resolvedChain.getResolvedResult(anyLong())).thenReturn(result); //simulate a key hit on chain resolution - when(resolvedChain.getCompactionCount()).thenReturn(1); - - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenReturn(mock(Chain.class)); - - EternalChainResolver resolver = mock(EternalChainResolver.class); - when(resolver.resolve(any(Chain.class), anyLong(), anyLong())).thenReturn(resolvedChain); - - OperationsCodec codec = mock(OperationsCodec.class); - when(codec.encode(any())).thenReturn(ByteBuffer.allocate(0)); - TimeSource timeSource = mock(TimeSource.class); - - ClusteredStore store = new ClusteredStore(codec, resolver, proxy, timeSource); - - store.remove(1L, "foo"); - verify(proxy).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - } - - @Test - @SuppressWarnings("unchecked") - public void testConditionalRemoveDoesNotReplaceChainOnKeyMiss() throws Exception { - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.getResolvedResult(anyLong())).thenReturn(null); //simulate a key miss on chain resolution - - EternalChainResolver resolver = mock(EternalChainResolver.class); - when(resolver.resolve(any(Chain.class), anyLong(), anyLong())).thenReturn(resolvedChain); - - OperationsCodec codec = mock(OperationsCodec.class); - when(codec.encode(any())).thenReturn(ByteBuffer.allocate(0)); - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenReturn(mock(Chain.class)); - TimeSource timeSource = mock(TimeSource.class); - - ClusteredStore store = new ClusteredStore(codec, resolver, proxy, timeSource); - - store.remove(1L, "foo"); - verify(proxy, never()).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - } - - @Test - @SuppressWarnings("unchecked") - public void testConditionalRemoveDoesNotReplaceChainOnKeyHitValueMiss() throws Exception { - Result result = mock(Result.class); - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.getResolvedResult(anyLong())).thenReturn(result); //simulate a key kit - when(result.getValue()).thenReturn("bar"); //but a value miss - - - EternalChainResolver resolver = mock(EternalChainResolver.class); - when(resolver.resolve(any(Chain.class), anyLong(), anyLong())).thenReturn(resolvedChain); - - OperationsCodec codec = mock(OperationsCodec.class); - when(codec.encode(any())).thenReturn(ByteBuffer.allocate(0)); - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenReturn(mock(Chain.class)); - TimeSource timeSource = mock(TimeSource.class); - - ClusteredStore store = new ClusteredStore(codec, resolver, proxy, timeSource); - - store.remove(1L, "foo"); - verify(proxy, never()).replaceAtHead(anyLong(), any(Chain.class), any(Chain.class)); - } - - @Test - public void testExpirationIsSentToHigherTiers() throws Exception { - @SuppressWarnings("unchecked") - Result result = mock(Result.class); - when(result.getValue()).thenReturn("bar"); - - @SuppressWarnings("unchecked") - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.getResolvedResult(anyLong())).thenReturn(result); - when(resolvedChain.getExpirationTime()).thenReturn(1000L); - - @SuppressWarnings("unchecked") - EternalChainResolver resolver = mock(EternalChainResolver.class); - when(resolver.resolve(any(Chain.class), anyLong(), anyLong())).thenReturn(resolvedChain); - - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - when(proxy.get(anyLong())).thenReturn(mock(Chain.class)); - - @SuppressWarnings("unchecked") - OperationsCodec codec = mock(OperationsCodec.class); - TimeSource timeSource = mock(TimeSource.class); - - ClusteredStore store = new ClusteredStore<>(codec, resolver, proxy, timeSource); - - Store.ValueHolder vh = store.get(1L); - - long expirationTime = vh.expirationTime(TimeUnit.MILLISECONDS); - assertThat(expirationTime, is(1000L)); - } - - @Test - public void testNoExpireIsSentToHigherTiers() throws Exception { - @SuppressWarnings("unchecked") - Result result = mock(Result.class); - when(result.getValue()).thenReturn("bar"); - - @SuppressWarnings("unchecked") - ResolvedChain resolvedChain = mock(ResolvedChain.class); - when(resolvedChain.getResolvedResult(anyLong())).thenReturn(result); - when(resolvedChain.getExpirationTime()).thenReturn(Long.MAX_VALUE); // no expire - - @SuppressWarnings("unchecked") - EternalChainResolver resolver = mock(EternalChainResolver.class); - when(resolver.resolve(any(Chain.class), anyLong(), anyLong())).thenReturn(resolvedChain); - - ServerStoreProxy proxy = mock(ServerStoreProxy.class); - when(proxy.get(anyLong())).thenReturn(mock(Chain.class)); - - @SuppressWarnings("unchecked") - OperationsCodec codec = mock(OperationsCodec.class); - TimeSource timeSource = mock(TimeSource.class); - - ClusteredStore store = new ClusteredStore<>(codec, resolver, proxy, timeSource); - - Store.ValueHolder vh = store.get(1L); - - long expirationTime = vh.expirationTime(TimeUnit.MILLISECONDS); - assertThat(expirationTime, is(NO_EXPIRE)); - } -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxyTest.java deleted file mode 100644 index cfd9e0d75e..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxyTest.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.client.config.ClusteredResourcePool; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.impl.serialization.LongSerializer; -import org.junit.Test; - -import java.util.Iterator; - -import static org.ehcache.clustered.common.internal.store.Util.createPayload; -import static org.ehcache.clustered.common.internal.store.Util.getChain; -import static org.ehcache.clustered.common.internal.store.Util.readPayLoad; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; - -public class CommonServerStoreProxyTest extends AbstractServerStoreProxyTest { - - private static ClusterTierClientEntity createClientEntity(String name) throws Exception { - ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(16L, MemoryUnit.MB); - - ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class - .getName(), - Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class - .getName(), null); - - return createClientEntity(name, serverStoreConfiguration, true); - - } - - @Test - public void testGetKeyNotPresent() throws Exception { - ClusterTierClientEntity clientEntity = createClientEntity("testGetKeyNotPresent"); - CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testGetKeyNotPresent", clientEntity, mock(ServerCallback.class)); - - Chain chain = serverStoreProxy.get(1); - - assertThat(chain.isEmpty(), is(true)); - } - - @Test - public void testAppendKeyNotPresent() throws Exception { - ClusterTierClientEntity clientEntity = createClientEntity("testAppendKeyNotPresent"); - CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testAppendKeyNotPresent", clientEntity, mock(ServerCallback.class)); - - serverStoreProxy.append(2, createPayload(2)); - - Chain chain = serverStoreProxy.get(2); - assertThat(chain.isEmpty(), is(false)); - assertThat(readPayLoad(chain.iterator().next().getPayload()), is(2L)); - } - - @Test - public void testGetAfterMultipleAppendsOnSameKey() throws Exception { - ClusterTierClientEntity clientEntity = createClientEntity("testGetAfterMultipleAppendsOnSameKey"); - CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testGetAfterMultipleAppendsOnSameKey", clientEntity, mock(ServerCallback.class)); - - serverStoreProxy.append(3L, createPayload(3L)); - serverStoreProxy.append(3L, createPayload(33L)); - serverStoreProxy.append(3L, createPayload(333L)); - - Chain chain = serverStoreProxy.get(3L); - - assertThat(chain.isEmpty(), is(false)); - - assertChainHas(chain, 3L, 33L, 333l); - } - - @Test - public void testGetAndAppendKeyNotPresent() throws Exception { - ClusterTierClientEntity clientEntity = createClientEntity("testGetAndAppendKeyNotPresent"); - CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testGetAndAppendKeyNotPresent", clientEntity, mock(ServerCallback.class)); - Chain chain = serverStoreProxy.getAndAppend(4L, createPayload(4L)); - - assertThat(chain.isEmpty(), is(true)); - - chain = serverStoreProxy.get(4L); - - assertThat(chain.isEmpty(), is(false)); - assertChainHas(chain, 4L); - } - - @Test - public void testGetAndAppendMultipleTimesOnSameKey() throws Exception { - ClusterTierClientEntity clientEntity = createClientEntity("testGetAndAppendMultipleTimesOnSameKey"); - CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testGetAndAppendMultipleTimesOnSameKey", clientEntity, mock(ServerCallback.class)); - serverStoreProxy.getAndAppend(5L, createPayload(5L)); - serverStoreProxy.getAndAppend(5L, createPayload(55L)); - serverStoreProxy.getAndAppend(5L, createPayload(555L)); - Chain chain = serverStoreProxy.getAndAppend(5l, createPayload(5555L)); - - assertThat(chain.isEmpty(), is(false)); - assertChainHas(chain, 5L, 55L, 555L); - } - - @Test - public void testReplaceAtHeadSuccessFull() throws Exception { - ClusterTierClientEntity clientEntity = createClientEntity("testReplaceAtHeadSuccessFull"); - CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testReplaceAtHeadSuccessFull", clientEntity, mock(ServerCallback.class)); - serverStoreProxy.append(20L, createPayload(200L)); - serverStoreProxy.append(20L, createPayload(2000L)); - serverStoreProxy.append(20L, createPayload(20000L)); - - Chain expect = serverStoreProxy.get(20L); - Chain update = getChain(false, createPayload(400L)); - - serverStoreProxy.replaceAtHead(20l, expect, update); - - Chain afterReplace = serverStoreProxy.get(20L); - assertChainHas(afterReplace, 400L); - - serverStoreProxy.append(20L, createPayload(4000L)); - serverStoreProxy.append(20L, createPayload(40000L)); - - serverStoreProxy.replaceAtHead(20L, afterReplace, getChain(false, createPayload(800L))); - - Chain anotherReplace = serverStoreProxy.get(20L); - - assertChainHas(anotherReplace, 800L, 4000L, 40000L); - } - - @Test - public void testClear() throws Exception { - ClusterTierClientEntity clientEntity = createClientEntity("testClear"); - CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testClear", clientEntity, mock(ServerCallback.class)); - serverStoreProxy.append(1L, createPayload(100L)); - - serverStoreProxy.clear(); - Chain chain = serverStoreProxy.get(1); - assertThat(chain.isEmpty(), is(true)); - } - - private static void assertChainHas(Chain chain, long... payLoads) { - Iterator elements = chain.iterator(); - for (long payLoad : payLoads) { - assertThat(readPayLoad(elements.next().getPayload()), is(Long.valueOf(payLoad))); - } - assertThat(elements.hasNext(), is(false)); - } - -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java deleted file mode 100644 index a4ba9ed54e..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java +++ /dev/null @@ -1,236 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.client.config.ClusteredResourcePool; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.server.store.ObservableClusterTierServerEntityService.ObservableClusterTierActiveEntity; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.impl.serialization.LongSerializer; -import org.junit.Test; - -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; - -import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; -import static org.ehcache.clustered.common.internal.store.Util.createPayload; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; - -public class EventualServerStoreProxyTest extends AbstractServerStoreProxyTest { - - private static SimpleClusterTierClientEntity createClientEntity(String name, boolean create) throws Exception { - ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(16L, MemoryUnit.MB); - - ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), - Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class - .getName(), Consistency.EVENTUAL); - - return createClientEntity(name, serverStoreConfiguration, create); - } - - @Test - public void testServerSideEvictionFiresInvalidations() throws Exception { - SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testServerSideEvictionFiresInvalidations", true); - SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testServerSideEvictionFiresInvalidations", false); - - final List store1InvalidatedHashes = new CopyOnWriteArrayList<>(); - final List store2InvalidatedHashes = new CopyOnWriteArrayList<>(); - - EventualServerStoreProxy serverStoreProxy1 = new EventualServerStoreProxy("testServerSideEvictionFiresInvalidations", clientEntity1, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - store1InvalidatedHashes.add(hash); - } - - @Override - public void onInvalidateAll() { - fail("should not be called"); - } - - @Override - public Chain compact(Chain chain) { - throw new AssertionError(); - } - }); - EventualServerStoreProxy serverStoreProxy2 = new EventualServerStoreProxy("testServerSideEvictionFiresInvalidations", clientEntity2, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - store2InvalidatedHashes.add(hash); - } - - @Override - public void onInvalidateAll() { - fail("should not be called"); - } - - @Override - public Chain compact(Chain chain) { - return chain; - } - }); - - final int ITERATIONS = 40; - for (int i = 0; i < ITERATIONS; i++) { - serverStoreProxy1.append(i, createPayload(i, 512 * 1024)); - } - - int evictionCount = 0; - int entryCount = 0; - for (int i = 0; i < ITERATIONS; i++) { - Chain elements1 = serverStoreProxy1.get(i); - Chain elements2 = serverStoreProxy2.get(i); - assertThat(chainsEqual(elements1, elements2), is(true)); - if (!elements1.isEmpty()) { - entryCount++; - } else { - evictionCount++; - } - } - - // there has to be server-side evictions, otherwise this test is useless - assertThat(store1InvalidatedHashes.size(), greaterThan(0)); - // test that each time the server evicted, the originating client got notified - assertThat(store1InvalidatedHashes.size(), is(ITERATIONS - entryCount)); - // test that each time the server evicted, the other client got notified on top of normal invalidations - assertThat(store2InvalidatedHashes.size(), is(ITERATIONS + evictionCount)); - - assertThatClientsWaitingForInvalidationIsEmpty("testServerSideEvictionFiresInvalidations"); - } - - @Test - public void testHashInvalidationListenerWithAppend() throws Exception { - SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testHashInvalidationListenerWithAppend", true); - SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testHashInvalidationListenerWithAppend", false); - - final CountDownLatch latch = new CountDownLatch(1); - final AtomicReference invalidatedHash = new AtomicReference<>(); - - - EventualServerStoreProxy serverStoreProxy1 = new EventualServerStoreProxy("testHashInvalidationListenerWithAppend", clientEntity1, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - invalidatedHash.set(hash); - latch.countDown(); - } - - @Override - public void onInvalidateAll() { - throw new AssertionError("Should not be called"); - } - - @Override - public Chain compact(Chain chain) { - throw new AssertionError(); - } - }); - EventualServerStoreProxy serverStoreProxy2 = new EventualServerStoreProxy("testServerSideEvictionFiresInvalidations", clientEntity2, mock(ServerCallback.class)); - - serverStoreProxy2.append(1L, createPayload(1L)); - - latch.await(5, TimeUnit.SECONDS); - assertThat(invalidatedHash.get(), is(1L)); - assertThatClientsWaitingForInvalidationIsEmpty("testHashInvalidationListenerWithAppend"); - } - - @Test - public void testHashInvalidationListenerWithGetAndAppend() throws Exception { - SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testHashInvalidationListenerWithGetAndAppend", true); - SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testHashInvalidationListenerWithGetAndAppend", false); - - final CountDownLatch latch = new CountDownLatch(1); - final AtomicReference invalidatedHash = new AtomicReference<>(); - - - EventualServerStoreProxy serverStoreProxy1 = new EventualServerStoreProxy("testHashInvalidationListenerWithGetAndAppend", clientEntity1, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - invalidatedHash.set(hash); - latch.countDown(); - } - - @Override - public void onInvalidateAll() { - throw new AssertionError("Should not be called"); - } - - @Override - public Chain compact(Chain chain) { - throw new AssertionError(); - } - }); - EventualServerStoreProxy serverStoreProxy2 = new EventualServerStoreProxy("testHashInvalidationListenerWithGetAndAppend", clientEntity2, mock(ServerCallback.class)); - - serverStoreProxy2.getAndAppend(1L, createPayload(1L)); - - latch.await(5, TimeUnit.SECONDS); - assertThat(invalidatedHash.get(), is(1L)); - assertThatClientsWaitingForInvalidationIsEmpty("testHashInvalidationListenerWithGetAndAppend"); - } - - @Test - public void testAllInvalidationListener() throws Exception { - SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testAllInvalidationListener", true); - SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testAllInvalidationListener", false); - - final CountDownLatch latch = new CountDownLatch(1); - final AtomicBoolean invalidatedAll = new AtomicBoolean(); - - EventualServerStoreProxy serverStoreProxy1 = new EventualServerStoreProxy("testAllInvalidationListener", clientEntity1, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - throw new AssertionError("Should not be called"); - } - - @Override - public void onInvalidateAll() { - invalidatedAll.set(true); - latch.countDown(); - } - - @Override - public Chain compact(Chain chain) { - throw new AssertionError(); - } - }); - EventualServerStoreProxy serverStoreProxy2 = new EventualServerStoreProxy("testAllInvalidationListener", clientEntity2, mock(ServerCallback.class)); - - serverStoreProxy2.clear(); - - latch.await(5, TimeUnit.SECONDS); - assertThat(invalidatedAll.get(), is(true)); - assertThatClientsWaitingForInvalidationIsEmpty("testAllInvalidationListener"); - } - - private static void assertThatClientsWaitingForInvalidationIsEmpty(String name) throws Exception { - ObservableClusterTierActiveEntity activeEntity = observableClusterTierService.getServedActiveEntitiesFor(name).get(0); - long now = System.currentTimeMillis(); - while (System.currentTimeMillis() < now + 5000 && activeEntity.getClientsWaitingForInvalidation().size() != 0); - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); - } - -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java deleted file mode 100644 index a76b42c651..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java +++ /dev/null @@ -1,385 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.client.config.ClusteredResourcePool; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.impl.serialization.LongSerializer; -import org.junit.Test; - -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; - -import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; -import static org.ehcache.clustered.common.internal.store.Util.createPayload; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.core.Is.is; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; - -public class StrongServerStoreProxyTest extends AbstractServerStoreProxyTest { - - private static SimpleClusterTierClientEntity createClientEntity(String name, boolean create) throws Exception { - ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(4L, MemoryUnit.MB); - - ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), - Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class - .getName(), Consistency.STRONG); - - return createClientEntity(name, serverStoreConfiguration, create); - } - - @Test - public void testServerSideEvictionFiresInvalidations() throws Exception { - SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testServerSideEvictionFiresInvalidations", true); - SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testServerSideEvictionFiresInvalidations", false); - - final List store1InvalidatedHashes = new CopyOnWriteArrayList<>(); - final List store2InvalidatedHashes = new CopyOnWriteArrayList<>(); - - StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testServerSideEvictionFiresInvalidations", clientEntity1, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - store1InvalidatedHashes.add(hash); - } - - @Override - public void onInvalidateAll() { - fail("should not be called"); - } - - @Override - public Chain compact(Chain chain) { - throw new AssertionError(); - } - }); - StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testServerSideEvictionFiresInvalidations", clientEntity2, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - store2InvalidatedHashes.add(hash); - } - - @Override - public void onInvalidateAll() { - fail("should not be called"); - } - - @Override - public Chain compact(Chain chain) { - return chain; - } - }); - - final int ITERATIONS = 40; - for (int i = 0; i < ITERATIONS; i++) { - serverStoreProxy1.append(i, createPayload(i, 512 * 1024)); - } - - int evictionCount = 0; - int entryCount = 0; - for (int i = 0; i < ITERATIONS; i++) { - Chain elements1 = serverStoreProxy1.get(i); - Chain elements2 = serverStoreProxy2.get(i); - assertThat(chainsEqual(elements1, elements2), is(true)); - if (!elements1.isEmpty()) { - entryCount++; - } else { - evictionCount++; - } - } - - // there has to be server-side evictions, otherwise this test is useless - assertThat(store1InvalidatedHashes.size(), greaterThan(0)); - // test that each time the server evicted, the originating client got notified - assertThat(store1InvalidatedHashes.size(), is(ITERATIONS - entryCount)); - // test that each time the server evicted, the other client got notified on top of normal invalidations - assertThat(store2InvalidatedHashes.size(), is(ITERATIONS + evictionCount)); - } - - @Test - public void testHashInvalidationListenerWithAppend() throws Exception { - SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testHashInvalidationListenerWithAppend", true); - SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testHashInvalidationListenerWithAppend", false); - - final AtomicReference invalidatedHash = new AtomicReference<>(); - - StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testHashInvalidationListenerWithAppend", clientEntity1, mock(ServerCallback.class)); - StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testHashInvalidationListenerWithAppend", clientEntity2, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - invalidatedHash.set(hash); - } - - @Override - public void onInvalidateAll() { - throw new AssertionError("Should not be called"); - } - - @Override - public Chain compact(Chain chain) { - throw new AssertionError(); - } - }); - - serverStoreProxy1.append(1L, createPayload(1L)); - - assertThat(invalidatedHash.get(), is(1L)); - } - - @Test - public void testConcurrentHashInvalidationListenerWithAppend() throws Exception { - SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testConcurrentHashInvalidationListenerWithAppend", true); - SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testConcurrentHashInvalidationListenerWithAppend", false); - - final AtomicBoolean invalidating = new AtomicBoolean(); - final CountDownLatch latch = new CountDownLatch(2); - StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testConcurrentHashInvalidationListenerWithAppend", clientEntity1, mock(ServerCallback.class)); - StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testConcurrentHashInvalidationListenerWithAppend", clientEntity2, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - if (!invalidating.compareAndSet(false, true)) { - fail("Both threads entered the listener concurrently"); - } - try { - Thread.sleep(100); - } catch (InterruptedException ie) { - throw new AssertionError(ie); - } - invalidating.set(false); - latch.countDown(); - } - - @Override - public void onInvalidateAll() { - throw new AssertionError("Should not be called"); - } - - @Override - public Chain compact(Chain chain) { - throw new AssertionError(); - } - }); - - ExecutorService executor = Executors.newCachedThreadPool(); - try { - executor.submit(() -> { - serverStoreProxy1.append(1L, createPayload(1L)); - return null; - }); - executor.submit(() -> { - serverStoreProxy1.append(1L, createPayload(1L)); - return null; - }); - - if (!latch.await(5, TimeUnit.SECONDS)) { - fail("Both listeners were not called"); - } - } finally { - executor.shutdown(); - } - } - - @Test - public void testHashInvalidationListenerWithGetAndAppend() throws Exception { - SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testHashInvalidationListenerWithGetAndAppend", true); - SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testHashInvalidationListenerWithGetAndAppend", false); - - final AtomicReference invalidatedHash = new AtomicReference<>(); - - StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testHashInvalidationListenerWithGetAndAppend", clientEntity1, mock(ServerCallback.class)); - StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testHashInvalidationListenerWithGetAndAppend", clientEntity2, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - invalidatedHash.set(hash); - } - - @Override - public void onInvalidateAll() { - throw new AssertionError("Should not be called"); - } - - @Override - public Chain compact(Chain chain) { - throw new AssertionError(); - } - }); - - serverStoreProxy1.getAndAppend(1L, createPayload(1L)); - - assertThat(invalidatedHash.get(), is(1L)); - } - - @Test - public void testAllInvalidationListener() throws Exception { - SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testAllInvalidationListener", true); - SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testAllInvalidationListener", false); - - final AtomicBoolean invalidatedAll = new AtomicBoolean(); - - StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testAllInvalidationListener", clientEntity1, mock(ServerCallback.class)); - StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testAllInvalidationListener", clientEntity2, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - throw new AssertionError("Should not be called"); - } - - @Override - public void onInvalidateAll() { - invalidatedAll.set(true); - } - - @Override - public Chain compact(Chain chain) { - throw new AssertionError(); - } - }); - - serverStoreProxy1.clear(); - - assertThat(invalidatedAll.get(), is(true)); - } - - @Test - public void testConcurrentAllInvalidationListener() throws Exception { - SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testConcurrentAllInvalidationListener", true); - SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testConcurrentAllInvalidationListener", false); - - final AtomicBoolean invalidating = new AtomicBoolean(); - final CountDownLatch latch = new CountDownLatch(2); - - StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testConcurrentAllInvalidationListener", clientEntity1, mock(ServerCallback.class)); - StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testConcurrentAllInvalidationListener", clientEntity2, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - throw new AssertionError("Should not be called"); - } - - @Override - public void onInvalidateAll() { - if (!invalidating.compareAndSet(false, true)) { - fail("Both threads entered the listener concurrently"); - } - try { - Thread.sleep(100); - } catch (InterruptedException ie) { - throw new AssertionError(ie); - } - invalidating.set(false); - latch.countDown(); - } - - @Override - public Chain compact(Chain chain) { - throw new AssertionError(); - } - }); - - ExecutorService executor = Executors.newCachedThreadPool(); - try { - executor.submit(() -> { - serverStoreProxy1.clear(); - return null; - }); - executor.submit(() -> { - serverStoreProxy1.clear(); - return null; - }); - - if (!latch.await(5, TimeUnit.SECONDS)) { - fail("Both listeners were not called"); - } - } finally { - executor.shutdown(); - } - } - - @Test - public void testAppendInvalidationUnblockedByDisconnection() throws Exception { - SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testAppendInvalidationUnblockedByDisconnection", true); - SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testAppendInvalidationUnblockedByDisconnection", false); - - StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testAppendInvalidationUnblockedByDisconnection", clientEntity1, mock(ServerCallback.class)); - StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testAppendInvalidationUnblockedByDisconnection", clientEntity2, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - clientEntity1.fireDisconnectionEvent(); - } - - @Override - public void onInvalidateAll() { - throw new AssertionError("Should not be called"); - } - - @Override - public Chain compact(Chain chain) { - throw new AssertionError(); - } - }); - - try { - serverStoreProxy1.append(1L, createPayload(1L)); - fail("expected RuntimeException"); - } catch (RuntimeException re) { - assertThat(re.getCause(), instanceOf(IllegalStateException.class)); - } - } - - @Test - public void testClearInvalidationUnblockedByDisconnection() throws Exception { - SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testClearInvalidationUnblockedByDisconnection", true); - SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testClearInvalidationUnblockedByDisconnection", false); - - StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testClearInvalidationUnblockedByDisconnection", clientEntity1, mock(ServerCallback.class)); - StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testClearInvalidationUnblockedByDisconnection", clientEntity2, new ServerCallback() { - @Override - public void onInvalidateHash(long hash) { - throw new AssertionError("Should not be called"); - } - - @Override - public void onInvalidateAll() { - clientEntity1.fireDisconnectionEvent(); - } - - @Override - public Chain compact(Chain chain) { - throw new AssertionError(); - } - }); - - try { - serverStoreProxy1.clear(); - fail("expected RuntimeException"); - } catch (RuntimeException re) { - assertThat(re.getCause(), instanceOf(IllegalStateException.class)); - } - } -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/EternalChainResolverTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/EternalChainResolverTest.java deleted file mode 100644 index 411dd815a3..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/EternalChainResolverTest.java +++ /dev/null @@ -1,514 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store.operations; - -import org.ehcache.clustered.client.internal.store.ChainBuilder; -import org.ehcache.clustered.client.internal.store.ResolvedChain; -import org.ehcache.clustered.client.internal.store.operations.codecs.OperationsCodec; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.impl.serialization.LongSerializer; -import org.ehcache.impl.serialization.StringSerializer; -import org.hamcrest.Description; -import org.hamcrest.Matcher; -import org.hamcrest.TypeSafeMatcher; -import org.junit.Test; - -import java.nio.ByteBuffer; - -import static org.hamcrest.Matchers.emptyIterable; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; -import static org.hamcrest.collection.IsIterableContainingInOrder.contains; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; - -public class EternalChainResolverTest { - - private static OperationsCodec codec = new OperationsCodec<>(new LongSerializer(), new StringSerializer()); - - @Test - @SuppressWarnings("unchecked") - public void testResolveMaintainsOtherKeysInOrder() throws Exception { - Operation expected = new PutOperation<>(1L, "Suresh", 0L); - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new PutOperation<>(2L, "Albin", 0L), - expected, - new PutOperation<>(2L, "Suresh", 0L), - new PutOperation<>(2L, "Mathew", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertEquals(expected, result); - assertThat(resolvedChain.isCompacted(), is(true)); - - Chain compactedChain = resolvedChain.getCompactedChain(); - assertThat(compactedChain, contains( //@SuppressWarnings("unchecked") - operation(new PutOperation<>(2L, "Albin", 0L)), - operation(new PutOperation<>(2L, "Suresh", 0L)), - operation(new PutOperation<>(2L, "Mathew", 0L)), - operation(new PutOperation<>(1L, "Suresh", 0L)))); - } - - @Test - public void testResolveEmptyChain() throws Exception { - Chain chain = getChainFromOperations(); - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertNull(result); - - assertThat(resolvedChain.isCompacted(), is(false)); - } - - @Test - public void testResolveChainWithNonExistentKey() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new PutOperation<>(2L, "Suresh", 0L), - new PutOperation<>(2L, "Mathew", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 3L, 0L); - Result result = resolvedChain.getResolvedResult(3L); - assertNull(result); - assertThat(resolvedChain.isCompacted(), is(false)); - } - - @Test - public void testResolveSinglePut() throws Exception { - Operation expected = new PutOperation<>(1L, "Albin", 0L); - Chain chain = getChainFromOperations(expected); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertEquals(expected, result); - assertThat(resolvedChain.isCompacted(), is(false)); - } - - @Test - public void testResolvePutsOnly() throws Exception { - Operation expected = new PutOperation<>(1L, "Mathew", 0L); - - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new PutOperation<>(1L, "Suresh", 0L), - new PutOperation<>(1L, "Mathew", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertEquals(expected, result); - assertThat(resolvedChain.isCompacted(), is(true)); - assertThat(resolvedChain.getCompactionCount(), is(2)); - } - - @Test - public void testResolveSingleRemove() throws Exception { - Chain chain = getChainFromOperations(new RemoveOperation<>(1L, 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertNull(result); - assertThat(resolvedChain.isCompacted(), is(true)); - assertThat(resolvedChain.getCompactionCount(), is(1)); - } - - @Test - public void testResolveRemovesOnly() throws Exception { - Chain chain = getChainFromOperations( - new RemoveOperation<>(1L, 0L), - new RemoveOperation<>(1L, 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertNull(result); - assertThat(resolvedChain.isCompacted(), is(true)); - assertThat(resolvedChain.getCompactionCount(), is(2)); - } - - @Test - public void testPutAndRemove() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new RemoveOperation<>(1L, 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertNull(result); - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - public void testResolvePutIfAbsentOnly() throws Exception { - Operation expected = new PutOperation<>(1L, "Mathew", 0L); - Chain chain = getChainFromOperations(new PutIfAbsentOperation<>(1L, "Mathew", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertEquals(expected, result); - assertThat(resolvedChain.isCompacted(), is(false)); - } - - @Test - public void testResolvePutIfAbsentsOnly() throws Exception { - Operation expected = new PutOperation<>(1L, "Albin", 0L); - Chain chain = getChainFromOperations( - new PutIfAbsentOperation<>(1L, "Albin", 0L), - new PutIfAbsentOperation<>(1L, "Suresh", 0L), - new PutIfAbsentOperation<>(1L, "Mathew", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertEquals(expected, result); - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - public void testResolvePutIfAbsentSucceeds() throws Exception { - Operation expected = new PutOperation<>(1L, "Mathew", 0L); - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new RemoveOperation<>(1L, 0L), - new PutIfAbsentOperation<>(1L, "Mathew", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertEquals(expected, result); - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - public void testResolveForSingleOperationDoesNotCompact() { - Chain chain = getChainFromOperations(new PutOperation<>(1L, "Albin", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - assertThat(resolvedChain.isCompacted(), is(false)); - assertThat(resolvedChain.getCompactionCount(), is(0)); - } - - @Test - public void testResolveForMultiplesOperationsAlwaysCompact() { - //create a random mix of operations - Chain chain = getChainFromOperations( - new PutIfAbsentOperation<>(1L, "Albin", 0L), - new PutOperation<>(1L, "Suresh", 0L), - new PutOperation<>(1L, "Mathew", 0L), - new PutOperation<>(2L, "Melbin", 0L), - new ReplaceOperation<>(1L, "Joseph", 0L), - new RemoveOperation<>(2L, 0L), - new ConditionalRemoveOperation<>(1L, "Albin", 0L), - new PutOperation<>(1L, "Gregory", 0L), - new ConditionalReplaceOperation<>(1L, "Albin", "Abraham", 0L), - new RemoveOperation<>(1L, 0L), - new PutIfAbsentOperation<>(2L, "Albin", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - assertThat(resolvedChain.isCompacted(), is(true)); - assertThat(resolvedChain.getCompactionCount(), is(8)); - } - - @Test - public void testResolveDoesNotDecodeOtherKeyOperationValues() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(2L, "Albin", 0L), - new PutOperation<>(2L, "Suresh", 0L), - new PutOperation<>(2L, "Mathew", 0L)); - - CountingLongSerializer keySerializer = new CountingLongSerializer(); - CountingStringSerializer valueSerializer = new CountingStringSerializer(); - OperationsCodec customCodec = new OperationsCodec<>(keySerializer, valueSerializer); - EternalChainResolver resolver = new EternalChainResolver<>(customCodec); - resolver.resolve(chain, 1L, 0L); - - assertThat(keySerializer.decodeCount, is(3)); - assertThat(valueSerializer.decodeCount, is(0)); - assertThat(keySerializer.encodeCount, is(0)); - assertThat(valueSerializer.encodeCount, is(0)); //No operation to resolve - } - - @Test - public void testResolveDecodesOperationValueOnlyOnDemand() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 1), - new PutOperation<>(1L, "Suresh", 2), - new PutOperation<>(1L, "Mathew", 3)); - - CountingLongSerializer keySerializer = new CountingLongSerializer(); - CountingStringSerializer valueSerializer = new CountingStringSerializer(); - OperationsCodec customCodec = new OperationsCodec<>(keySerializer, valueSerializer); - EternalChainResolver resolver = new EternalChainResolver<>(customCodec); - resolver.resolve(chain, 1L, 0L); - - assertThat(keySerializer.decodeCount, is(3)); - assertThat(valueSerializer.decodeCount, is(0)); - assertThat(valueSerializer.encodeCount, is(0)); - assertThat(keySerializer.encodeCount, is(1)); //One encode from encoding the resolved operation's key - } - - @Test - @SuppressWarnings("unchecked") - public void testCompactingTwoKeys() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new PutOperation<>(2L, "Albin", 0L), - new PutOperation<>(1L, "Suresh", 0L), - new PutOperation<>(2L, "Suresh", 0L), - new PutOperation<>(2L, "Mathew", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - - Chain compactedChain = resolver.applyOperation(chain, 0L); - - assertThat(compactedChain, containsInAnyOrder( //@SuppressWarnings("unchecked") - operation(new PutOperation<>(2L, "Mathew", 0L)), - operation(new PutOperation<>(1L, "Suresh", 0L)) - )); - } - - @Test - public void testCompactEmptyChain() throws Exception { - Chain chain = (new ChainBuilder()).build(); - EternalChainResolver resolver = new EternalChainResolver<>(codec); - Chain compacted = resolver.applyOperation(chain, 0L); - assertThat(compacted, emptyIterable()); - } - - @Test - public void testCompactSinglePut() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L) - ); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - Chain compacted = resolver.applyOperation(chain, 0L); - - assertThat(compacted, contains(operation(new PutOperation<>(1L, "Albin", 0L)))); - } - - @Test - public void testCompactMultiplePuts() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new PutOperation<>(1L, "Suresh", 0L), - new PutOperation<>(1L, "Mathew", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, contains(operation(new PutOperation<>(1L, "Mathew", 0L)))); - } - - @Test - public void testCompactSingleRemove() throws Exception { - Chain chain = getChainFromOperations(new RemoveOperation<>(1L, 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, emptyIterable()); - } - - @Test - public void testCompactMultipleRemoves() throws Exception { - Chain chain = getChainFromOperations( - new RemoveOperation<>(1L, 0L), - new RemoveOperation<>(1L, 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, emptyIterable()); - } - - @Test - public void testCompactPutAndRemove() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new RemoveOperation<>(1L, 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, emptyIterable()); - } - - @Test - public void testCompactSinglePutIfAbsent() throws Exception { - Chain chain = getChainFromOperations(new PutIfAbsentOperation<>(1L, "Mathew", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, contains(operation(new PutOperation<>(1L, "Mathew", 0L)))); - } - - @Test - public void testCompactMultiplePutIfAbsents() throws Exception { - Chain chain = getChainFromOperations( - new PutIfAbsentOperation<>(1L, "Albin", 0L), - new PutIfAbsentOperation<>(1L, "Suresh", 0L), - new PutIfAbsentOperation<>(1L, "Mathew", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, contains(operation(new PutOperation<>(1L, "Albin", 0L)))); - } - - @Test - public void testCompactPutIfAbsentAfterRemove() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new RemoveOperation<>(1L, 0L), - new PutIfAbsentOperation<>(1L, "Mathew", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, contains(operation(new PutOperation<>(1L, "Mathew", 0L)))); - } - - @Test - public void testCompactForMultipleKeysAndOperations() { - //create a random mix of operations - Chain chain = getChainFromOperations( - new PutIfAbsentOperation<>(1L, "Albin", 0L), - new PutOperation<>(1L, "Suresh", 0L), - new PutOperation<>(1L, "Mathew", 0L), - new PutOperation<>(2L, "Melbin", 0L), - new ReplaceOperation<>(1L, "Joseph", 0L), - new RemoveOperation<>(2L, 0L), - new ConditionalRemoveOperation<>(1L, "Albin", 0L), - new PutOperation<>(1L, "Gregory", 0L), - new ConditionalReplaceOperation<>(1L, "Albin", "Abraham", 0L), - new RemoveOperation<>(1L, 0L), - new PutIfAbsentOperation<>(2L, "Albin", 0L)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, contains(operation(new PutOperation<>(2L, "Albin", 0L)))); - } - - @Test - public void testCompactHasCorrectTimeStamp() { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin1", 0), - new PutOperation<>(1L, "Albin2", 1), - new RemoveOperation<>(1L, 2), - new PutOperation<>(1L, "Albin3", 3)); - - EternalChainResolver resolver = new EternalChainResolver<>(codec); - Chain compactedChain = resolver.applyOperation(chain, 3); - - assertThat(compactedChain, contains(operation(new PutOperation<>(1L, "Albin3", 3)))); - } - - @Test - public void testCompactDecodesOperationValueOnlyOnDemand() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 1), - new PutOperation<>(1L, "Suresh", 2), - new PutOperation<>(1L, "Mathew", 3)); - - CountingLongSerializer keySerializer = new CountingLongSerializer(); - CountingStringSerializer valueSerializer = new CountingStringSerializer(); - OperationsCodec customCodec = new OperationsCodec<>(keySerializer, valueSerializer); - EternalChainResolver resolver = new EternalChainResolver<>(customCodec); - resolver.applyOperation(chain, 0L); - - assertThat(keySerializer.decodeCount, is(3)); - assertThat(valueSerializer.decodeCount, is(0)); //Only one decode on creation of the resolved operation - assertThat(valueSerializer.encodeCount, is(0)); //One encode from encoding the resolved operation's key - assertThat(keySerializer.encodeCount, is(1)); //One encode from encoding the resolved operation's key - } - - @SafeVarargs - private final Chain getChainFromOperations(Operation ... operations) { - ChainBuilder chainBuilder = new ChainBuilder(); - for(Operation operation: operations) { - chainBuilder = chainBuilder.add(codec.encode(operation)); - } - return chainBuilder.build(); - } - - private Matcher operation(Operation operation) { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(Element item) { - return operation.equals(codec.decode(item.getPayload())); - } - - @Override - public void describeTo(Description description) { - description.appendText("is ").appendValue(operation); - } - }; - } - - private static class CountingLongSerializer extends LongSerializer { - - private int encodeCount = 0; - private int decodeCount = 0; - - @Override - public ByteBuffer serialize(final Long object) { - encodeCount++; - return super.serialize(object); - } - - @Override - public Long read(final ByteBuffer binary) throws ClassNotFoundException { - decodeCount++; - return super.read(binary); - } - - @Override - public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException { - return super.equals(object, binary); - } - } - - private static class CountingStringSerializer extends StringSerializer { - - private int encodeCount = 0; - private int decodeCount = 0; - - @Override - public ByteBuffer serialize(final String object) { - encodeCount++; - return super.serialize(object); - } - - @Override - public String read(final ByteBuffer binary) throws ClassNotFoundException { - decodeCount++; - return super.read(binary); - } - - @Override - public boolean equals(final String object, final ByteBuffer binary) throws ClassNotFoundException { - return super.equals(object, binary); - } - } -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ExpiryChainResolverExpiryTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ExpiryChainResolverExpiryTest.java deleted file mode 100644 index ccf230c441..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ExpiryChainResolverExpiryTest.java +++ /dev/null @@ -1,294 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store.operations; - -import org.ehcache.ValueSupplier; -import org.ehcache.clustered.client.TestTimeSource; -import org.ehcache.clustered.client.internal.store.ChainBuilder; -import org.ehcache.clustered.client.internal.store.ResolvedChain; -import org.ehcache.clustered.client.internal.store.operations.codecs.OperationsCodec; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.serialization.LongSerializer; -import org.ehcache.impl.serialization.StringSerializer; -import org.junit.Before; -import org.junit.Test; -import org.mockito.InOrder; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyLong; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.hamcrest.Matchers.is; - -public class ExpiryChainResolverExpiryTest { - - private static final OperationsCodec codec = new OperationsCodec<>(new LongSerializer(), new StringSerializer()); - - private static TestTimeSource timeSource = null; - - @Before - public void initialSetup() { - timeSource = new TestTimeSource(); - } - - @Test - @SuppressWarnings("unchecked") - public void testGetExpiryForAccessIsIgnored() { - Expiry expiry = mock(Expiry.class); - ExpiryChainResolver chainResolver = new ExpiryChainResolver<>(codec, expiry); - - when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); - - List> list = new ArrayList<>(); - list.add(new PutOperation<>(1L, "One", timeSource.getTimeMillis())); - list.add(new PutOperation<>(1L, "Second", timeSource.getTimeMillis())); - - Chain chain = getChainFromOperations(list); - - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - - verify(expiry, times(0)).getExpiryForAccess(anyLong(), any(ValueSupplier.class)); - verify(expiry, times(1)).getExpiryForCreation(anyLong(), anyString()); - verify(expiry, times(1)).getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString()); - - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - @SuppressWarnings("unchecked") - public void testGetExpiryForCreationIsInvokedOnlyOnce() { - Expiry expiry = mock(Expiry.class); - ExpiryChainResolver chainResolver = new ExpiryChainResolver<>(codec, expiry); - - when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); - - List> list = new ArrayList<>(); - list.add(new PutOperation<>(1L, "One", timeSource.getTimeMillis())); - list.add(new PutOperation<>(1L, "Second", timeSource.getTimeMillis())); - list.add(new PutOperation<>(1L, "Three", timeSource.getTimeMillis())); - list.add(new PutOperation<>(1L, "Four", timeSource.getTimeMillis())); - - Chain chain = getChainFromOperations(list); - - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - - InOrder inOrder = inOrder(expiry); - - inOrder.verify(expiry, times(1)).getExpiryForCreation(anyLong(), anyString()); - inOrder.verify(expiry, times(3)).getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString()); - - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - @SuppressWarnings("unchecked") - public void testGetExpiryForCreationIsNotInvokedForReplacedChains() { - Expiry expiry = mock(Expiry.class); - ExpiryChainResolver chainResolver = new ExpiryChainResolver<>(codec, expiry); - - when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); - - List> list = new ArrayList<>(); - list.add(new PutOperation<>(1L, "Replaced", -10L)); - list.add(new PutOperation<>(1L, "SecondAfterReplace", timeSource.getTimeMillis())); - list.add(new PutOperation<>(1L, "ThirdAfterReplace", timeSource.getTimeMillis())); - list.add(new PutOperation<>(1L, "FourthAfterReplace", timeSource.getTimeMillis())); - - Chain chain = getChainFromOperations(list); - - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - verify(expiry, times(0)).getExpiryForCreation(anyLong(), anyString()); - verify(expiry, times(3)).getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString()); - - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - @SuppressWarnings("unchecked") - public void testGetExpiryForCreationIsInvokedAfterRemoveOperations() { - - Expiry expiry = mock(Expiry.class); - ExpiryChainResolver chainResolver = new ExpiryChainResolver<>(codec, expiry); - - when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); - - List> list = new ArrayList<>(); - list.add(new PutOperation<>(1L, "Replaced", 10L)); - list.add(new PutOperation<>(1L, "SecondAfterReplace", 3L)); - list.add(new RemoveOperation<>(1L, 4L)); - list.add(new PutOperation<>(1L, "FourthAfterReplace", 5L)); - - Chain replacedChain = getChainFromOperations(list); - - ResolvedChain resolvedChain = chainResolver.resolve(replacedChain, 1L, timeSource.getTimeMillis()); - - InOrder inOrder = inOrder(expiry); - - verify(expiry, times(0)).getExpiryForAccess(anyLong(), any(ValueSupplier.class)); - inOrder.verify(expiry, times(1)).getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString()); - inOrder.verify(expiry, times(1)).getExpiryForCreation(anyLong(), anyString()); - - assertThat(resolvedChain.isCompacted(), is(true)); - - reset(expiry); - - when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); - - list = new ArrayList<>(); - list.add(new PutOperation<>(1L, "One", timeSource.getTimeMillis())); - list.add(new PutOperation<>(1L, "Second", timeSource.getTimeMillis())); - list.add(new RemoveOperation<>(1L, timeSource.getTimeMillis())); - list.add(new PutOperation<>(1L, "Four", timeSource.getTimeMillis())); - - Chain chain = getChainFromOperations(list); - - chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - - inOrder = inOrder(expiry); - - verify(expiry, times(0)).getExpiryForAccess(anyLong(), any(ValueSupplier.class)); - inOrder.verify(expiry, times(1)).getExpiryForCreation(anyLong(), anyString()); - inOrder.verify(expiry, times(1)).getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString()); - inOrder.verify(expiry, times(1)).getExpiryForCreation(anyLong(), anyString()); - - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - @SuppressWarnings("unchecked") - public void testNullGetExpiryForCreation() { - Expiry expiry = mock(Expiry.class); - ExpiryChainResolver chainResolver = new ExpiryChainResolver<>(codec, expiry); - - when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(null); - - List> list = new ArrayList<>(); - list.add(new PutOperation<>(1L, "Replaced", 10L)); - - Chain chain = getChainFromOperations(list); - - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - - assertTrue(resolvedChain.getCompactedChain().isEmpty()); - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - @SuppressWarnings("unchecked") - public void testNullGetExpiryForUpdate() { - Expiry expiry = mock(Expiry.class); - ExpiryChainResolver chainResolver = new ExpiryChainResolver<>(codec, expiry); - - when(expiry.getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString())).thenReturn(null); - - List> list = new ArrayList<>(); - list.add(new PutOperation<>(1L, "Replaced", -10L)); - list.add(new PutOperation<>(1L, "New", timeSource.getTimeMillis())); - Chain chain = getChainFromOperations(list); - - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - - assertThat(resolvedChain.getResolvedResult(1L).getValue(), is("New")); - assertTrue(getOperationsListFromChain(resolvedChain.getCompactedChain()).get(0).isExpiryAvailable()); - assertThat(getOperationsListFromChain(resolvedChain.getCompactedChain()).get(0).expirationTime(), is(10L)); - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - @SuppressWarnings("unchecked") - public void testGetExpiryForUpdateUpdatesExpirationTimeStamp() { - Expiry expiry = mock(Expiry.class); - ExpiryChainResolver chainResolver = new ExpiryChainResolver<>(codec, expiry); - - when(expiry.getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString())).thenReturn(new Duration(2L, TimeUnit.MILLISECONDS)); - - List> list = new ArrayList<>(); - list.add(new PutOperation<>(1L, "Replaced", -10L)); - list.add(new PutOperation<>(1L, "New", timeSource.getTimeMillis())); - Chain chain = getChainFromOperations(list); - - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - - assertThat(resolvedChain.getResolvedResult(1L).getValue(), is("New")); - assertTrue(getOperationsListFromChain(resolvedChain.getCompactedChain()).get(0).isExpiryAvailable()); - assertThat(getOperationsListFromChain(resolvedChain.getCompactedChain()).get(0).expirationTime(), is(2L)); - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - @SuppressWarnings("unchecked") - public void testExpiryThrowsException() { - Expiry expiry = mock(Expiry.class); - ExpiryChainResolver chainResolver = new ExpiryChainResolver<>(codec, expiry); - - when(expiry.getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString())).thenThrow(new RuntimeException("Test Update Expiry")); - when(expiry.getExpiryForCreation(anyLong(), anyString())).thenThrow(new RuntimeException("Test Create Expiry")); - - List> list = new ArrayList<>(); - list.add(new PutOperation<>(1L, "One", -10L)); - list.add(new PutOperation<>(1L, "Two", timeSource.getTimeMillis())); - Chain chain = getChainFromOperations(list); - - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - - assertThat(resolvedChain.getResolvedResult(1L), nullValue()); - - list = new ArrayList<>(); - list.add(new PutOperation<>(1L, "One", timeSource.getTimeMillis())); - list.add(new PutOperation<>(1L, "Two", timeSource.getTimeMillis())); - chain = getChainFromOperations(list); - - resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - - assertThat(resolvedChain.getResolvedResult(1L), nullValue()); - - assertThat(resolvedChain.isCompacted(), is(true)); - } - - private Chain getChainFromOperations(List> operations) { - ChainBuilder chainBuilder = new ChainBuilder(); - for(Operation operation: operations) { - chainBuilder = chainBuilder.add(codec.encode(operation)); - } - return chainBuilder.build(); - } - - private List> getOperationsListFromChain(Chain chain) { - List> list = new ArrayList<>(); - for (Element element : chain) { - Operation operation = codec.decode(element.getPayload()); - list.add(operation); - } - return list; - } - -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ExpiryChainResolverTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ExpiryChainResolverTest.java deleted file mode 100644 index d47ea2b61e..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ExpiryChainResolverTest.java +++ /dev/null @@ -1,583 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store.operations; - -import org.ehcache.clustered.client.internal.store.ChainBuilder; -import org.ehcache.clustered.client.internal.store.ResolvedChain; -import org.ehcache.clustered.client.internal.store.operations.codecs.OperationsCodec; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.impl.serialization.LongSerializer; -import org.ehcache.impl.serialization.StringSerializer; -import org.hamcrest.Description; -import org.hamcrest.Matcher; -import org.hamcrest.TypeSafeMatcher; -import org.junit.Test; - -import java.nio.ByteBuffer; -import java.util.concurrent.TimeUnit; - -import static org.ehcache.expiry.Expirations.timeToIdleExpiration; -import static org.hamcrest.Matchers.emptyIterable; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; -import static org.hamcrest.collection.IsIterableContainingInOrder.contains; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -public class ExpiryChainResolverTest { - - private static OperationsCodec codec = new OperationsCodec<>(new LongSerializer(), new StringSerializer()); - - @Test - @SuppressWarnings("unchecked") - public void testResolveMaintainsOtherKeysInOrder() throws Exception { - Operation expected = new PutOperation<>(1L, "Suresh", 0L); - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new PutOperation<>(2L, "Albin", 0L), - expected, - new PutOperation<>(2L, "Suresh", 0L), - new PutOperation<>(2L, "Mathew", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertEquals(expected, result); - assertThat(resolvedChain.isCompacted(), is(true)); - - Chain compactedChain = resolvedChain.getCompactedChain(); - assertThat(compactedChain, contains( //@SuppressWarnings("unchecked") - operation(new PutOperation<>(2L, "Albin", 0L)), - operation(new PutOperation<>(2L, "Suresh", 0L)), - operation(new PutOperation<>(2L, "Mathew", 0L)), - operation(new PutOperation<>(1L, "Suresh", 0L)))); - } - - @Test - public void testResolveEmptyChain() throws Exception { - Chain chain = getChainFromOperations(); - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertNull(result); - - assertThat(resolvedChain.isCompacted(), is(false)); - } - - @Test - public void testResolveChainWithNonExistentKey() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new PutOperation<>(2L, "Suresh", 0L), - new PutOperation<>(2L, "Mathew", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 3L, 0L); - Result result = resolvedChain.getResolvedResult(3L); - assertNull(result); - assertThat(resolvedChain.isCompacted(), is(false)); - } - - @Test - public void testResolveSinglePut() throws Exception { - Operation expected = new PutOperation<>(1L, "Albin", 0L); - Chain chain = getChainFromOperations(expected); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertEquals(expected, result); - assertThat(resolvedChain.isCompacted(), is(false)); - } - - @Test - public void testResolvePutsOnly() throws Exception { - Operation expected = new PutOperation<>(1L, "Mathew", 0L); - - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new PutOperation<>(1L, "Suresh", 0L), - new PutOperation<>(1L, "Mathew", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertEquals(expected, result); - assertThat(resolvedChain.isCompacted(), is(true)); - assertThat(resolvedChain.getCompactionCount(), is(2)); - } - - @Test - public void testResolveSingleRemove() throws Exception { - Chain chain = getChainFromOperations(new RemoveOperation<>(1L, 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertNull(result); - assertThat(resolvedChain.isCompacted(), is(true)); - assertThat(resolvedChain.getCompactionCount(), is(1)); - } - - @Test - public void testResolveRemovesOnly() throws Exception { - Chain chain = getChainFromOperations( - new RemoveOperation<>(1L, 0L), - new RemoveOperation<>(1L, 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertNull(result); - assertThat(resolvedChain.isCompacted(), is(true)); - assertThat(resolvedChain.getCompactionCount(), is(2)); - } - - @Test - public void testPutAndRemove() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new RemoveOperation<>(1L, 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertNull(result); - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - public void testResolvePutIfAbsentOnly() throws Exception { - Operation expected = new PutOperation<>(1L, "Mathew", 0L); - Chain chain = getChainFromOperations(new PutIfAbsentOperation<>(1L, "Mathew", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertEquals(expected, result); - assertThat(resolvedChain.isCompacted(), is(false)); - } - - @Test - public void testResolvePutIfAbsentsOnly() throws Exception { - Operation expected = new PutOperation<>(1L, "Albin", 0L); - Chain chain = getChainFromOperations( - new PutIfAbsentOperation<>(1L, "Albin", 0L), - new PutIfAbsentOperation<>(1L, "Suresh", 0L), - new PutIfAbsentOperation<>(1L, "Mathew", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertEquals(expected, result); - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - public void testResolvePutIfAbsentSucceeds() throws Exception { - Operation expected = new PutOperation<>(1L, "Mathew", 0L); - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new RemoveOperation<>(1L, 0L), - new PutIfAbsentOperation<>(1L, "Mathew", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - Result result = resolvedChain.getResolvedResult(1L); - assertEquals(expected, result); - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - public void testResolveForSingleOperationDoesNotCompact() { - Chain chain = getChainFromOperations(new PutOperation<>(1L, "Albin", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - assertThat(resolvedChain.isCompacted(), is(false)); - assertThat(resolvedChain.getCompactionCount(), is(0)); - } - - @Test - public void testResolveForMultiplesOperationsAlwaysCompact() { - //create a random mix of operations - Chain chain = getChainFromOperations( - new PutIfAbsentOperation<>(1L, "Albin", 0L), - new PutOperation<>(1L, "Suresh", 0L), - new PutOperation<>(1L, "Mathew", 0L), - new PutOperation<>(2L, "Melbin", 0L), - new ReplaceOperation<>(1L, "Joseph", 0L), - new RemoveOperation<>(2L, 0L), - new ConditionalRemoveOperation<>(1L, "Albin", 0L), - new PutOperation<>(1L, "Gregory", 0L), - new ConditionalReplaceOperation<>(1L, "Albin", "Abraham", 0L), - new RemoveOperation<>(1L, 0L), - new PutIfAbsentOperation<>(2L, "Albin", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 0L); - assertThat(resolvedChain.isCompacted(), is(true)); - assertThat(resolvedChain.getCompactionCount(), is(8)); - } - - @Test - public void testResolveForMultipleOperationHasCorrectIsFirstAndTimeStamp() { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin1", 0), - new PutOperation<>(1L, "Albin2", 1), - new RemoveOperation<>(1L, 2), - new PutOperation<>(1L, "AlbinAfterRemove", 3)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.timeToLiveExpiration(Duration.of(1, TimeUnit.HOURS))); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 3); - - Operation operation = codec.decode(resolvedChain.getCompactedChain().iterator().next().getPayload()); - - assertThat(operation.isExpiryAvailable(), is(true)); - assertThat(operation.expirationTime(), is(TimeUnit.HOURS.toMillis(1) + 3)); - try { - operation.timeStamp(); - fail(); - } catch (Exception ex) { - assertThat(ex.getMessage(), is("Timestamp not available")); - } - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - public void testResolveForMultipleOperationHasCorrectIsFirstAndTimeStampWithExpiry() { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin1", 0L), - new PutOperation<>(1L, "Albin2", 1L), - new PutOperation<>(1L, "Albin3", 2L), - new PutOperation<>(1L, "Albin4", 3L) - ); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.timeToLiveExpiration(new Duration(1l, TimeUnit.MILLISECONDS))); - ResolvedChain resolvedChain = resolver.resolve(chain, 1L, 3L); - - Operation operation = codec.decode(resolvedChain.getCompactedChain().iterator().next().getPayload()); - - assertThat(operation.isExpiryAvailable(), is(true)); - assertThat(operation.expirationTime(), is(4L)); - - try { - operation.timeStamp(); - fail(); - } catch (Exception ex) { - assertThat(ex.getMessage(), is("Timestamp not available")); - } - assertThat(resolvedChain.isCompacted(), is(true)); - } - - @Test - public void testResolveDoesNotDecodeOtherKeyOperationValues() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(2L, "Albin", 0L), - new PutOperation<>(2L, "Suresh", 0L), - new PutOperation<>(2L, "Mathew", 0L)); - - CountingLongSerializer keySerializer = new CountingLongSerializer(); - CountingStringSerializer valueSerializer = new CountingStringSerializer(); - OperationsCodec customCodec = new OperationsCodec<>(keySerializer, valueSerializer); - ExpiryChainResolver resolver = new ExpiryChainResolver<>(customCodec, timeToIdleExpiration(Duration.of(5, TimeUnit.SECONDS))); - resolver.resolve(chain, 1L, 0L); - - assertThat(keySerializer.decodeCount, is(3)); - assertThat(valueSerializer.decodeCount, is(0)); - assertThat(keySerializer.encodeCount, is(0)); - assertThat(valueSerializer.encodeCount, is(0)); - } - - @Test - public void testResolveDecodesOperationValueOnlyOnDemand() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 1), - new PutOperation<>(1L, "Suresh", 2), - new PutOperation<>(1L, "Mathew", 3)); - - CountingLongSerializer keySerializer = new CountingLongSerializer(); - CountingStringSerializer valueSerializer = new CountingStringSerializer(); - OperationsCodec customCodec = new OperationsCodec<>(keySerializer, valueSerializer); - ExpiryChainResolver resolver = new ExpiryChainResolver<>(customCodec, timeToIdleExpiration(Duration.of(5, TimeUnit.SECONDS))); - resolver.resolve(chain, 1L, 0L); - - assertThat(keySerializer.decodeCount, is(3)); - assertThat(valueSerializer.decodeCount, is(3)); - assertThat(valueSerializer.encodeCount, is(0)); - assertThat(keySerializer.encodeCount, is(1)); - } - - @Test - @SuppressWarnings("unchecked") - public void testCompactingTwoKeys() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new PutOperation<>(2L, "Albin", 0L), - new PutOperation<>(1L, "Suresh", 0L), - new PutOperation<>(2L, "Suresh", 0L), - new PutOperation<>(2L, "Mathew", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - - Chain compactedChain = resolver.applyOperation(chain, 0L); - - assertThat(compactedChain, containsInAnyOrder( //@SuppressWarnings("unchecked") - operation(new PutOperation<>(2L, "Mathew", 0L)), - operation(new PutOperation<>(1L, "Suresh", 0L)) - )); - } - - @Test - public void testCompactEmptyChain() throws Exception { - Chain chain = (new ChainBuilder()).build(); - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - Chain compacted = resolver.applyOperation(chain, 0L); - assertThat(compacted, emptyIterable()); - } - - @Test - public void testCompactSinglePut() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L) - ); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - Chain compacted = resolver.applyOperation(chain, 0L); - - assertThat(compacted, contains(operation(new PutOperation<>(1L, "Albin", 0L)))); - } - - @Test - public void testCompactMultiplePuts() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new PutOperation<>(1L, "Suresh", 0L), - new PutOperation<>(1L, "Mathew", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, contains(operation(new PutOperation<>(1L, "Mathew", 0L)))); - } - - @Test - public void testCompactSingleRemove() throws Exception { - Chain chain = getChainFromOperations(new RemoveOperation<>(1L, 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, emptyIterable()); - } - - @Test - public void testCompactMultipleRemoves() throws Exception { - Chain chain = getChainFromOperations( - new RemoveOperation<>(1L, 0L), - new RemoveOperation<>(1L, 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, emptyIterable()); - } - - @Test - public void testCompactPutAndRemove() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new RemoveOperation<>(1L, 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, emptyIterable()); - } - - @Test - public void testCompactSinglePutIfAbsent() throws Exception { - Chain chain = getChainFromOperations(new PutIfAbsentOperation<>(1L, "Mathew", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, contains(operation(new PutOperation<>(1L, "Mathew", 0L)))); - } - - @Test - public void testCompactMultiplePutIfAbsents() throws Exception { - Chain chain = getChainFromOperations( - new PutIfAbsentOperation<>(1L, "Albin", 0L), - new PutIfAbsentOperation<>(1L, "Suresh", 0L), - new PutIfAbsentOperation<>(1L, "Mathew", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, contains(operation(new PutOperation<>(1L, "Albin", 0L)))); - } - - @Test - public void testCompactPutIfAbsentAfterRemove() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 0L), - new RemoveOperation<>(1L, 0L), - new PutIfAbsentOperation<>(1L, "Mathew", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, contains(operation(new PutOperation<>(1L, "Mathew", 0L)))); - } - - @Test - public void testCompactForMultipleKeysAndOperations() { - //create a random mix of operations - Chain chain = getChainFromOperations( - new PutIfAbsentOperation<>(1L, "Albin", 0L), - new PutOperation<>(1L, "Suresh", 0L), - new PutOperation<>(1L, "Mathew", 0L), - new PutOperation<>(2L, "Melbin", 0L), - new ReplaceOperation<>(1L, "Joseph", 0L), - new RemoveOperation<>(2L, 0L), - new ConditionalRemoveOperation<>(1L, "Albin", 0L), - new PutOperation<>(1L, "Gregory", 0L), - new ConditionalReplaceOperation<>(1L, "Albin", "Abraham", 0L), - new RemoveOperation<>(1L, 0L), - new PutIfAbsentOperation<>(2L, "Albin", 0L)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - Chain compactedChain = resolver.applyOperation(chain, 0L); - assertThat(compactedChain, contains(operation(new PutOperation<>(2L, "Albin", 0L)))); - } - - @Test - public void testCompactHasCorrectTimeStamp() { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin1", 0), - new PutOperation<>(1L, "Albin2", 1), - new RemoveOperation<>(1L, 2), - new PutOperation<>(1L, "Albin3", 3)); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.noExpiration()); - Chain compactedChain = resolver.applyOperation(chain, 3); - - assertThat(compactedChain, contains(operation(new PutOperation<>(1L, "Albin3", 3)))); - } - - @Test - public void testCompactHasCorrectWithExpiry() { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin1", 0L), - new PutOperation<>(1L, "Albin2", 1L), - new PutOperation<>(1L, "Albin3", 2L), - new PutOperation<>(1L, "Albin4", 3L) - ); - - ExpiryChainResolver resolver = new ExpiryChainResolver<>(codec, Expirations.timeToLiveExpiration(new Duration(1l, TimeUnit.MILLISECONDS))); - Chain compactedChain = resolver.applyOperation(chain, 3L); - - assertThat(compactedChain, contains(operation(new PutOperation<>(1L, "Albin4", 3L)))); - } - - @Test - public void testCompactDecodesOperationValueOnlyOnDemand() throws Exception { - Chain chain = getChainFromOperations( - new PutOperation<>(1L, "Albin", 1), - new PutOperation<>(1L, "Suresh", 2), - new PutOperation<>(1L, "Mathew", 3)); - - CountingLongSerializer keySerializer = new CountingLongSerializer(); - CountingStringSerializer valueSerializer = new CountingStringSerializer(); - OperationsCodec customCodec = new OperationsCodec<>(keySerializer, valueSerializer); - ExpiryChainResolver resolver = new ExpiryChainResolver<>(customCodec, Expirations.noExpiration()); - resolver.applyOperation(chain, 0L); - - assertThat(keySerializer.decodeCount, is(3)); - assertThat(valueSerializer.decodeCount, is(3)); - assertThat(valueSerializer.encodeCount, is(0)); - assertThat(keySerializer.encodeCount, is(1)); - } - - @SafeVarargs - private final Chain getChainFromOperations(Operation ... operations) { - ChainBuilder chainBuilder = new ChainBuilder(); - for(Operation operation: operations) { - chainBuilder = chainBuilder.add(codec.encode(operation)); - } - return chainBuilder.build(); - } - - private Matcher operation(Operation operation) { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(Element item) { - return operation.equals(codec.decode(item.getPayload())); - } - - @Override - public void describeTo(Description description) { - description.appendText("is ").appendValue(operation); - } - }; - } - - private static class CountingLongSerializer extends LongSerializer { - - private int encodeCount = 0; - private int decodeCount = 0; - - @Override - public ByteBuffer serialize(final Long object) { - encodeCount++; - return super.serialize(object); - } - - @Override - public Long read(final ByteBuffer binary) throws ClassNotFoundException { - decodeCount++; - return super.read(binary); - } - - @Override - public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException { - return super.equals(object, binary); - } - } - - private static class CountingStringSerializer extends StringSerializer { - - private int encodeCount = 0; - private int decodeCount = 0; - - @Override - public ByteBuffer serialize(final String object) { - encodeCount++; - return super.serialize(object); - } - - @Override - public String read(final ByteBuffer binary) throws ClassNotFoundException { - decodeCount++; - return super.read(binary); - } - - @Override - public boolean equals(final String object, final ByteBuffer binary) throws ClassNotFoundException { - return super.equals(object, binary); - } - } -} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/util/StatisticsTestUtils.java b/clustered/client/src/test/java/org/ehcache/clustered/util/StatisticsTestUtils.java deleted file mode 100644 index e7cb278e0f..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/util/StatisticsTestUtils.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.util; - -import org.ehcache.core.spi.store.Store; -import org.hamcrest.Description; -import org.hamcrest.Factory; -import org.hamcrest.Matcher; -import org.hamcrest.TypeSafeMatcher; -import org.junit.Assert; -import org.terracotta.context.ContextManager; -import org.terracotta.context.TreeNode; -import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.ValueStatistic; - -import java.util.Arrays; -import java.util.EnumSet; -import java.util.List; - -/** - * StatisticsTestUtils - */ -public class StatisticsTestUtils { - /** - * Validates expected {@link OperationStatistic} updates for the - * indicated {@code Ehcache} instance. The statistics identified in {@code changed} are - * checked for a value of {@code 1}; all other statistics in the same enumeration class are - * checked for a value of {@code 0}. - * - * @param store the store instance to check - * @param changed the statistics values that should have updated values - * @param the statistics enumeration type - */ - public static > void validateStats(final Store store, final EnumSet changed) { - assert changed != null; - final EnumSet unchanged = EnumSet.complementOf(changed); - - @SuppressWarnings("unchecked") - final List> sets = Arrays.asList(changed, unchanged); - Class statsClass = null; - for (final EnumSet set : sets) { - if (!set.isEmpty()) { - statsClass = set.iterator().next().getDeclaringClass(); - break; - } - } - assert statsClass != null; - - final OperationStatistic operationStatistic = getOperationStatistic(store, statsClass); - for (final E statId : changed) { - Assert.assertThat(String.format("Value for %s.%s", statId.getDeclaringClass().getName(), statId.name()), - getStatistic(operationStatistic, statId), StatisticMatcher.equalTo(1L)); - } - for (final E statId : unchanged) { - Assert.assertThat(String.format("Value for %s.%s", statId.getDeclaringClass().getName(), statId.name()), - getStatistic(operationStatistic, statId), StatisticMatcher.equalTo(0L)); - } - } - - public static > void validateStat(final Store store, E outcome, long count) { - OperationStatistic operationStatistic = getOperationStatistic(store, outcome.getDeclaringClass()); - Assert.assertThat(getStatistic(operationStatistic, outcome), StatisticMatcher.equalTo(count)); - } - - /** - * Gets the value of the statistic indicated from an {@link OperationStatistic} - * instance. - * - * @param operationStatistic the {@code OperationStatistic} instance from which the statistic is to - * be obtained - * @param statId the {@code Enum} constant identifying the statistic for which the value must be obtained - * @param The {@code Enum} type for the statistics - * - * @return the value, possibly null, for {@code statId} about {@code ehcache} - */ - private static > Number getStatistic(final OperationStatistic operationStatistic, final E statId) { - if (operationStatistic != null) { - final ValueStatistic valueStatistic = operationStatistic.statistic(statId); - return (valueStatistic == null ? null : valueStatistic.value()); - } - return null; - } - - /** - * Gets a reference to the {@link OperationStatistic} instance holding the - * class of statistics specified for the {@code Ehcache} instance provided. - * - * @param store the store instance for which the {@code OperationStatistic} instance - * should be obtained - * @param statsClass the {@code Class} of statistics for which the {@code OperationStatistic} instance - * should be obtained - * @param the {@code Enum} type for the statistics - * - * @return a reference to the {@code OperationStatistic} instance holding the {@code statsClass} statistics; - * may be {@code null} if {@code statsClass} statistics do not exist for {@code ehcache} - */ - private static > OperationStatistic getOperationStatistic(final Store store, final Class statsClass) { - for (final TreeNode statNode : ContextManager.nodeFor(store).getChildren()) { - final Object statObj = statNode.getContext().attributes().get("this"); - if (statObj instanceof OperationStatistic) { - @SuppressWarnings("unchecked") - final OperationStatistic statistic = (OperationStatistic)statObj; - if (statistic.type().equals(statsClass)) { - return statistic; - } - } - } - return null; - } - - /** - * Local {@code org.hamcrest.TypeSafeMatcher} implementation for testing - * {@code org.terracotta.statistics.OperationStatistic} values. - */ - private static final class StatisticMatcher extends TypeSafeMatcher { - - final Number expected; - - private StatisticMatcher(final Class expectedType, final Number expected) { - super(expectedType); - this.expected = expected; - } - - @Override - protected boolean matchesSafely(final Number value) { - if (value != null) { - return (value.longValue() == this.expected.longValue()); - } else { - return this.expected.longValue() == 0L; - } - } - - @Override - public void describeTo(final Description description) { - if (this.expected.longValue() == 0L) { - description.appendText("zero or null"); - } else { - description.appendValue(this.expected); - } - } - - @Factory - public static Matcher equalTo(final Number expected) { - return new StatisticMatcher(Number.class, expected); - } - } -} diff --git a/clustered/client/src/test/resources/configs/docs/tc-config.xml b/clustered/client/src/test/resources/configs/docs/tc-config.xml deleted file mode 100644 index 57d1553f1c..0000000000 --- a/clustered/client/src/test/resources/configs/docs/tc-config.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - 128 - 96 - - - - diff --git a/clustered/clustered-dist/build.gradle b/clustered/clustered-dist/build.gradle deleted file mode 100644 index 26882348c6..0000000000 --- a/clustered/clustered-dist/build.gradle +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * NOTE: this directory had to be named clustered-dist instead of just dist - * because gradle creatively substitutes :dist for :clustered:dist or vice versa - * if groups are the same - * https://discuss.gradle.org/t/dependency-substitution-wrong-with-more-than-one-sub-project-with-same-name/7253/6 - */ - -group = 'org.ehcache' -archivesBaseName = 'ehcache-clustered' - -ext { - docsFolder = "$buildDir/docs/asciidoc" -} - -dependencies { - compileOnly project(':clustered:client') - compileOnly project(':clustered:common') - // Needed because declared as provided in the different projects - compileOnly "org.terracotta:runnel:$parent.terracottaPlatformVersion" -} - -apply plugin: 'distribution' -apply plugin: EhDistribute - -configurations { - kit - serverLibs -} - -dependencies { - compileOnly "org.terracotta.internal:client-runtime:$terracottaCoreVersion" - - serverLibs(project(':clustered:server')) { - exclude group: 'org.terracotta', module: 'entity-server-api' - exclude group: 'org.terracotta', module: 'entity-common-api' - exclude group: 'org.terracotta', module: 'packaging-support' - exclude group: 'org.terracotta.internal', module: 'tc-config-parser' - } - - // Needed because declared as provided in the different projects - serverLibs "org.terracotta:runnel:$parent.terracottaPlatformVersion" - - kit "org.terracotta.internal:terracotta-kit:$terracottaCoreVersion@zip" - - shadowCompile "org.slf4j:slf4j-api:$parent.slf4jVersion" - pomOnlyCompile "org.ehcache:ehcache:$parent.baseVersion" -} - -task copyDocs(type: Copy) { - dependsOn asciidocZip - from zipTree(asciidocZip.archivePath) - into docsFolder -} - -distributions { - main { - baseName = archivesBaseName - contents { - //tc kit - into ('') { - from configurations.kit.files.collect { zipTree(it) } - eachFile { f -> - // remove top level directory from the kit - f.path = f.path.replace("terracotta-$terracottaCoreVersion/", "") - } - exclude { f -> - // Exclude tc's client subdir and README.txt - Issue 1273 - f.path.contains('client/') || f.path.contains('README.txt') || f.path.contains('server/conf') - } - includeEmptyDirs = false - } - into ("server/plugins/lib") { - from configurations.serverLibs - } - into ('client/ehcache') { - from jar - from project(':dist').jar.archivePath.getPath() - exclude { f -> - !f.path.contains('ehcache') // do not add any transitives in this directory - } - } - into ('client/ehcache/documentation') { - from "$docsFolder/user" - } - into ('client/ehcache/javadoc') { - from javadocJar.archivePath.getPath() - from project(':dist').javadocJar.archivePath.getPath() - } - into ('client/lib') { - from configurations.shadowCompile - } - into ('') { - from 'src/assemble' - } - } - } -} - -distTar { - classifier = 'kit' - compression = Compression.GZIP -} - -distZip { - classifier = 'kit' -} - -[distTar, distZip, installDist]*.dependsOn copyDocs, javadocJar, project(':dist').jar, project(':dist').javadocJar diff --git a/clustered/clustered-dist/gradle.properties b/clustered/clustered-dist/gradle.properties deleted file mode 100644 index 048caca19b..0000000000 --- a/clustered/clustered-dist/gradle.properties +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -subPomName = Ehcache 3 Clustered Module -subPomDesc = Ehcache 3 Clustered: Defines the client jar and the kit containing the Terracotta server -javadocExclude = **/core/**, **/impl/**, **/xml/**, **/jsr107/**, **/transactions/**, **/management/**, **/tck/** - -# Set to anything to disable SPI doc and jar generation -spiJavadocDisable = true - -osgi = {"Export-Package" : ["!com.tc*", "!com.terracotta*", "!org.terracotta*"],\ - "Import-Package" : ["!com.tc*", "!com.terracotta*", "!org.terracotta*"]} diff --git a/clustered/clustered-dist/src/assemble/server/conf/tc-config.xml b/clustered/clustered-dist/src/assemble/server/conf/tc-config.xml deleted file mode 100644 index 731ef5efcf..0000000000 --- a/clustered/clustered-dist/src/assemble/server/conf/tc-config.xml +++ /dev/null @@ -1,58 +0,0 @@ - - - - - - - - 512 - - - - - - - - %H/terracotta-logs - - - - - - - - - - - - - - - - 120 - - diff --git a/clustered/common/build.gradle b/clustered/common/build.gradle deleted file mode 100644 index 22b22a1c69..0000000000 --- a/clustered/common/build.gradle +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: EhDeploy - -dependencies { - compile "org.slf4j:slf4j-api:$slf4jVersion" - provided "org.terracotta:entity-common-api:$terracottaApisVersion" - provided "org.terracotta:runnel:$terracottaPlatformVersion" -} - -tasks.withType(JavaCompile) { - options.compilerArgs += ['-Werror'] -} diff --git a/clustered/common/gradle.properties b/clustered/common/gradle.properties deleted file mode 100644 index e3f551d033..0000000000 --- a/clustered/common/gradle.properties +++ /dev/null @@ -1,21 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -subPomName = Ehcache 3 Common Clustering module -subPomDesc = The Common Clustering module of Ehcache 3 - -osgi = {"Export-Package" : ["!org.ehcache.clustered.common.internal.*"],\ - "Import-Package" : ["!org.ehcache.clustered.common.*"]} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java deleted file mode 100644 index d99391c882..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.clustered.common.internal.store.SequencedElement; -import org.ehcache.clustered.common.internal.store.Util; -import org.terracotta.runnel.Struct; -import org.terracotta.runnel.StructBuilder; -import org.terracotta.runnel.decoding.StructArrayDecoder; -import org.terracotta.runnel.decoding.StructDecoder; -import org.terracotta.runnel.encoding.StructArrayEncoder; -import org.terracotta.runnel.encoding.StructEncoder; -import org.terracotta.runnel.encoding.StructEncoderFunction; - -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.List; - -public final class ChainCodec { - - private ChainCodec() { - //no implementations please - } - - private static final Struct ELEMENT_STRUCT = StructBuilder.newStructBuilder() - .int64("sequence", 10) - .byteBuffer("payload", 20) - .build(); - - public static final Struct CHAIN_STRUCT = StructBuilder.newStructBuilder() - .structs("elements", 10, ELEMENT_STRUCT) - .build(); - - public static byte[] encode(Chain chain) { - StructEncoder encoder = CHAIN_STRUCT.encoder(); - - encode(encoder, chain); - - ByteBuffer byteBuffer = encoder.encode(); - return byteBuffer.array(); - } - - public static void encode(StructEncoder encoder, Chain chain) { - StructArrayEncoder> elementsEncoder = encoder.structs("elements"); - for (Element element : chain) { - StructEncoder elementEncoder = elementsEncoder.add(); - if (element instanceof SequencedElement) { - elementEncoder.int64("sequence", ((SequencedElement) element).getSequenceNumber()); - } - elementEncoder.byteBuffer("payload", element.getPayload()); - elementEncoder.end(); - } - elementsEncoder.end(); - } - - public static Chain decode(byte[] payload) { - StructDecoder decoder = CHAIN_STRUCT.decoder(ByteBuffer.wrap(payload)); - return decode(decoder); - } - - public static Chain decode(StructDecoder decoder) { - StructArrayDecoder> elementsDecoder = decoder.structs("elements"); - - final List elements = new ArrayList<>(); - for (int i = 0; i < elementsDecoder.length(); i++) { - StructDecoder elementDecoder = elementsDecoder.next(); - Long sequence = elementDecoder.int64("sequence"); - ByteBuffer byteBuffer = elementDecoder.byteBuffer("payload"); - elementDecoder.end(); - - if (sequence == null) { - elements.add(Util.getElement(byteBuffer)); - } else { - elements.add(Util.getElement(sequence, byteBuffer)); - } - } - - elementsDecoder.end(); - - return Util.getChain(elements); - } -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClusterTierReconnectMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClusterTierReconnectMessage.java deleted file mode 100644 index d1445a4a0b..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClusterTierReconnectMessage.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import java.util.HashSet; -import java.util.Set; - -public class ClusterTierReconnectMessage { - - private final Set hashInvalidationsInProgress; - private boolean clearInProgress = false; - - public ClusterTierReconnectMessage() { - hashInvalidationsInProgress = new HashSet<>(); - } - - public ClusterTierReconnectMessage(Set hashInvalidationsInProgress) { - this.hashInvalidationsInProgress = hashInvalidationsInProgress; - } - - public void addInvalidationsInProgress(Set hashInvalidationsInProgress) { - this.hashInvalidationsInProgress.addAll(hashInvalidationsInProgress); - } - - public Set getInvalidationsInProgress() { - return hashInvalidationsInProgress; - } - - public void clearInProgress() { - clearInProgress = true; - } - - public boolean isClearInProgress() { - return clearInProgress; - } - -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java deleted file mode 100644 index 9e9e20b892..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java +++ /dev/null @@ -1,268 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - - -import org.ehcache.clustered.common.internal.exceptions.ClusterException; -import org.ehcache.clustered.common.internal.store.Chain; -import org.terracotta.entity.EntityResponse; - -import java.util.Set; - -public abstract class EhcacheEntityResponse implements EntityResponse { - - public abstract EhcacheResponseType getResponseType(); - - public static Success success() { - return Success.INSTANCE; - } - - public static class Success extends EhcacheEntityResponse { - - private static final Success INSTANCE = new Success(); - - private Success() { - //singleton - } - - @Override - public final EhcacheResponseType getResponseType() { - return EhcacheResponseType.SUCCESS; - } - } - - public static Failure failure(ClusterException cause) { - return new Failure(cause); - } - - public static class Failure extends EhcacheEntityResponse { - - private final ClusterException cause; - - private Failure(ClusterException cause) { - this.cause = cause; - } - - public ClusterException getCause() { - return cause; - } - - @Override - public final EhcacheResponseType getResponseType() { - return EhcacheResponseType.FAILURE; - } - } - - public static GetResponse getResponse(Chain chain) { - return new GetResponse(chain); - } - - public static class GetResponse extends EhcacheEntityResponse { - - private final Chain chain; - - private GetResponse(Chain chain) { - this.chain = chain; - } - - public Chain getChain() { - return chain; - } - - @Override - public final EhcacheResponseType getResponseType() { - return EhcacheResponseType.GET_RESPONSE; - } - } - - public static HashInvalidationDone hashInvalidationDone(long key) { - return new HashInvalidationDone(key); - } - - public static class HashInvalidationDone extends EhcacheEntityResponse { - private final long key; - - private HashInvalidationDone(long key) { - this.key = key; - } - - public long getKey() { - return key; - } - - @Override - public final EhcacheResponseType getResponseType() { - return EhcacheResponseType.HASH_INVALIDATION_DONE; - } - } - - public static AllInvalidationDone allInvalidationDone() { - return new AllInvalidationDone(); - } - - public static class AllInvalidationDone extends EhcacheEntityResponse { - - private AllInvalidationDone() { - } - - @Override - public final EhcacheResponseType getResponseType() { - return EhcacheResponseType.ALL_INVALIDATION_DONE; - } - } - - public static ServerInvalidateHash serverInvalidateHash(long key) { - return new ServerInvalidateHash(key); - } - - public static class ServerInvalidateHash extends EhcacheEntityResponse { - private final long key; - - private ServerInvalidateHash(long key) { - this.key = key; - } - - public long getKey() { - return key; - } - - @Override - public final EhcacheResponseType getResponseType() { - return EhcacheResponseType.SERVER_INVALIDATE_HASH; - } - } - - public static ClientInvalidateHash clientInvalidateHash(long key, int invalidationId) { - return new ClientInvalidateHash(key, invalidationId); - } - - public static class ClientInvalidateHash extends EhcacheEntityResponse { - private final long key; - private final int invalidationId; - - private ClientInvalidateHash(long key, int invalidationId) { - this.key = key; - this.invalidationId = invalidationId; - } - - public long getKey() { - return key; - } - - public int getInvalidationId() { - return invalidationId; - } - - @Override - public final EhcacheResponseType getResponseType() { - return EhcacheResponseType.CLIENT_INVALIDATE_HASH; - } - } - - public static ClientInvalidateAll clientInvalidateAll(int invalidationId) { - return new ClientInvalidateAll(invalidationId); - } - - public static class ClientInvalidateAll extends EhcacheEntityResponse { - private final int invalidationId; - - private ClientInvalidateAll(int invalidationId) { - this.invalidationId = invalidationId; - } - - public int getInvalidationId() { - return invalidationId; - } - - @Override - public final EhcacheResponseType getResponseType() { - return EhcacheResponseType.CLIENT_INVALIDATE_ALL; - } - } - - public static MapValue mapValue(Object value) { - return new MapValue(value); - } - - public static class MapValue extends EhcacheEntityResponse { - - private final Object value; - - private MapValue(Object value) { - this.value = value; - } - - public Object getValue() { - return this.value; - } - - @Override - public final EhcacheResponseType getResponseType() { - return EhcacheResponseType.MAP_VALUE; - } - } - - public static PrepareForDestroy prepareForDestroy(Set stores) { - return new PrepareForDestroy(stores); - } - - public static class PrepareForDestroy extends EhcacheEntityResponse { - - private final Set stores; - - private PrepareForDestroy(Set stores) { - this.stores = stores; - } - - @Override - public EhcacheResponseType getResponseType() { - return EhcacheResponseType.PREPARE_FOR_DESTROY; - } - - public Set getStores() { - return stores; - } - } - - public static ResolveRequest resolveRequest(long key, Chain chain) { - return new ResolveRequest(key, chain); - } - - public static class ResolveRequest extends EhcacheEntityResponse { - - private final long key; - private final Chain chain; - - ResolveRequest(long key, Chain chain) { - this.key = key; - this.chain = chain; - } - - @Override - public EhcacheResponseType getResponseType() { - return EhcacheResponseType.RESOLVE_REQUEST; - } - - public long getKey() { - return key; - } - - public Chain getChain() { - return chain; - } - } -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java deleted file mode 100644 index e475702143..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.terracotta.runnel.EnumMapping; - -import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; - -/** - * EhcacheResponseType - */ -public enum EhcacheResponseType { - SUCCESS, - FAILURE, - GET_RESPONSE, - HASH_INVALIDATION_DONE, - CLIENT_INVALIDATE_HASH, - CLIENT_INVALIDATE_ALL, - SERVER_INVALIDATE_HASH, - MAP_VALUE, - ALL_INVALIDATION_DONE, - PREPARE_FOR_DESTROY, - RESOLVE_REQUEST; - - - public static final String RESPONSE_TYPE_FIELD_NAME = "opCode"; - public static final int RESPONSE_TYPE_FIELD_INDEX = 10; - public static final EnumMapping EHCACHE_RESPONSE_TYPES_ENUM_MAPPING = newEnumMappingBuilder(EhcacheResponseType.class) - .mapping(EhcacheResponseType.SUCCESS, 80) - .mapping(EhcacheResponseType.FAILURE, 81) - .mapping(EhcacheResponseType.GET_RESPONSE, 82) - .mapping(EhcacheResponseType.HASH_INVALIDATION_DONE, 83) - .mapping(EhcacheResponseType.ALL_INVALIDATION_DONE, 84) - .mapping(EhcacheResponseType.CLIENT_INVALIDATE_HASH, 85) - .mapping(EhcacheResponseType.CLIENT_INVALIDATE_ALL, 86) - .mapping(EhcacheResponseType.SERVER_INVALIDATE_HASH, 87) - .mapping(EhcacheResponseType.MAP_VALUE, 88) - .mapping(EhcacheResponseType.PREPARE_FOR_DESTROY, 89) - .mapping(EhcacheResponseType.RESOLVE_REQUEST, 90) - .build(); -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java deleted file mode 100644 index f9204ff2f0..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.ehcache.clustered.common.ServerSideConfiguration; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.terracotta.runnel.Struct; -import org.terracotta.runnel.StructBuilder; -import org.terracotta.runnel.decoding.StructDecoder; -import org.terracotta.runnel.encoding.StructEncoder; - -import java.nio.ByteBuffer; - -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; -import static org.terracotta.runnel.StructBuilder.newStructBuilder; - -public class LifeCycleMessageCodec { - - private static final String CONFIG_PRESENT_FIELD = "configPresent"; - - private final StructBuilder VALIDATE_MESSAGE_STRUCT_BUILDER_PREFIX = newStructBuilder() - .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) - .bool(CONFIG_PRESENT_FIELD, 30); - private static final int CONFIGURE_MESSAGE_NEXT_INDEX = 40; - - private final StructBuilder VALIDATE_STORE_MESSAGE_STRUCT_BUILDER_PREFIX = newStructBuilder() - .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) - .string(SERVER_STORE_NAME_FIELD, 30); - private static final int VALIDATE_STORE_NEXT_INDEX = 40; - - private final Struct PREPARE_FOR_DESTROY_STRUCT = newStructBuilder() - .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) - .build(); - - private final Struct validateMessageStruct; - private final Struct validateStoreMessageStruct; - - private final MessageCodecUtils messageCodecUtils; - private final ConfigCodec configCodec; - - public LifeCycleMessageCodec(ConfigCodec configCodec) { - this.messageCodecUtils = new MessageCodecUtils(); - this.configCodec = configCodec; - validateMessageStruct = this.configCodec.injectServerSideConfiguration( - VALIDATE_MESSAGE_STRUCT_BUILDER_PREFIX, CONFIGURE_MESSAGE_NEXT_INDEX).getUpdatedBuilder().build(); - - validateStoreMessageStruct = this.configCodec.injectServerStoreConfiguration( - VALIDATE_STORE_MESSAGE_STRUCT_BUILDER_PREFIX, VALIDATE_STORE_NEXT_INDEX).getUpdatedBuilder().build(); - } - - public byte[] encode(LifecycleMessage message) { - switch (message.getMessageType()) { - case VALIDATE: - return encodeTierManagerValidateMessage((LifecycleMessage.ValidateStoreManager) message); - case VALIDATE_SERVER_STORE: - return encodeValidateStoreMessage((LifecycleMessage.ValidateServerStore) message); - case PREPARE_FOR_DESTROY: - return encodePrepareForDestroyMessage(message); - default: - throw new IllegalArgumentException("Unknown lifecycle message: " + message.getClass()); - } - } - - private byte[] encodePrepareForDestroyMessage(LifecycleMessage message) { - return PREPARE_FOR_DESTROY_STRUCT.encoder() - .enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) - .encode().array(); - } - - private byte[] encodeValidateStoreMessage(LifecycleMessage.ValidateServerStore message) { - StructEncoder encoder = validateStoreMessageStruct.encoder(); - messageCodecUtils.encodeMandatoryFields(encoder, message); - - encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); - configCodec.encodeServerStoreConfiguration(encoder, message.getStoreConfiguration()); - return encoder.encode().array(); - } - - private byte[] encodeTierManagerValidateMessage(LifecycleMessage.ValidateStoreManager message) { - StructEncoder encoder = validateMessageStruct.encoder(); - ServerSideConfiguration config = message.getConfiguration(); - messageCodecUtils.encodeMandatoryFields(encoder, message); - if (config == null) { - encoder.bool(CONFIG_PRESENT_FIELD, false); - } else { - encoder.bool(CONFIG_PRESENT_FIELD, true); - configCodec.encodeServerSideConfiguration(encoder, config); - } - return encoder.encode().array(); - } - - public EhcacheEntityMessage decode(EhcacheMessageType messageType, ByteBuffer messageBuffer) { - - switch (messageType) { - case VALIDATE: - return decodeValidateMessage(messageBuffer); - case VALIDATE_SERVER_STORE: - return decodeValidateServerStoreMessage(messageBuffer); - case PREPARE_FOR_DESTROY: - return decodePrepareForDestroyMessage(); - } - throw new IllegalArgumentException("LifeCycleMessage operation not defined for : " + messageType); - } - - private LifecycleMessage.PrepareForDestroy decodePrepareForDestroyMessage() { - return new LifecycleMessage.PrepareForDestroy(); - } - - private LifecycleMessage.ValidateServerStore decodeValidateServerStoreMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = validateStoreMessageStruct.decoder(messageBuffer); - - String storeName = decoder.string(SERVER_STORE_NAME_FIELD); - ServerStoreConfiguration config = configCodec.decodeServerStoreConfiguration(decoder); - - return new LifecycleMessage.ValidateServerStore(storeName, config); - } - - private LifecycleMessage.ValidateStoreManager decodeValidateMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = validateMessageStruct.decoder(messageBuffer); - - boolean configPresent = decoder.bool(CONFIG_PRESENT_FIELD); - - ServerSideConfiguration config = null; - if (configPresent) { - config = configCodec.decodeServerSideConfiguration(decoder); - } - - return new LifecycleMessage.ValidateStoreManager(config); - } -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java deleted file mode 100644 index 13f0a821e0..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.terracotta.runnel.encoding.StructEncoder; - -import java.util.UUID; - -/** - * MessageCodecUtils - */ -public class MessageCodecUtils { - - public static final String SERVER_STORE_NAME_FIELD = "serverStoreName"; - public static final String KEY_FIELD = "key"; - - public void encodeMandatoryFields(StructEncoder encoder, EhcacheOperationMessage message) { - encoder.enm(EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME, message.getMessageType()); - } -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java deleted file mode 100644 index 01b2a73f2b..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.terracotta.runnel.Struct; -import org.terracotta.runnel.decoding.ArrayDecoder; -import org.terracotta.runnel.decoding.StructDecoder; -import org.terracotta.runnel.encoding.ArrayEncoder; -import org.terracotta.runnel.encoding.StructEncoder; - -import java.util.HashSet; -import java.util.Set; -import java.util.UUID; - -import static java.nio.ByteBuffer.wrap; -import static org.terracotta.runnel.StructBuilder.newStructBuilder; - -public class ReconnectMessageCodec { - - private static final String HASH_INVALIDATION_IN_PROGRESS_FIELD = "hashInvalidationInProgress"; - private static final String CLEAR_IN_PROGRESS_FIELD = "clearInProgress"; - - private static final Struct CLUSTER_TIER_RECONNECT_MESSAGE_STRUCT = newStructBuilder() - .int64s(HASH_INVALIDATION_IN_PROGRESS_FIELD, 20) - .bool(CLEAR_IN_PROGRESS_FIELD, 30) - .build(); - - public byte[] encode(ClusterTierReconnectMessage reconnectMessage) { - StructEncoder encoder = CLUSTER_TIER_RECONNECT_MESSAGE_STRUCT.encoder(); - ArrayEncoder> arrayEncoder = encoder.int64s(HASH_INVALIDATION_IN_PROGRESS_FIELD); - for (Long hash : reconnectMessage.getInvalidationsInProgress()) { - arrayEncoder.value(hash); - } - encoder.bool(CLEAR_IN_PROGRESS_FIELD, reconnectMessage.isClearInProgress()); - return encoder.encode().array(); - } - - public ClusterTierReconnectMessage decode(byte[] payload) { - StructDecoder decoder = CLUSTER_TIER_RECONNECT_MESSAGE_STRUCT.decoder(wrap(payload)); - ArrayDecoder> arrayDecoder = decoder.int64s(HASH_INVALIDATION_IN_PROGRESS_FIELD); - - Set hashes; - if (arrayDecoder != null) { - hashes = new HashSet(arrayDecoder.length()); - for (int i = 0; i < arrayDecoder.length(); i++) { - hashes.add(arrayDecoder.value()); - } - } else { - hashes = new HashSet(0); - } - ClusterTierReconnectMessage message = new ClusterTierReconnectMessage(hashes); - - Boolean clearInProgress = decoder.bool(CLEAR_IN_PROGRESS_FIELD); - if (clearInProgress != null && clearInProgress) { - message.clearInProgress(); - } - return message; - } -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java deleted file mode 100644 index 4e3f69b20a..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java +++ /dev/null @@ -1,262 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.ehcache.clustered.common.internal.exceptions.ClusterException; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.PrepareForDestroy; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ResolveRequest; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Util; -import org.terracotta.runnel.Struct; -import org.terracotta.runnel.StructBuilder; -import org.terracotta.runnel.decoding.ArrayDecoder; -import org.terracotta.runnel.decoding.Enm; -import org.terracotta.runnel.decoding.StructDecoder; -import org.terracotta.runnel.encoding.ArrayEncoder; -import org.terracotta.runnel.encoding.StructEncoder; - -import java.nio.ByteBuffer; -import java.util.HashSet; -import java.util.Set; - -import static java.nio.ByteBuffer.wrap; -import static org.ehcache.clustered.common.internal.messages.ChainCodec.CHAIN_STRUCT; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.AllInvalidationDone; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ClientInvalidateAll; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ClientInvalidateHash; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.HashInvalidationDone; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ServerInvalidateHash; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.MapValue; -import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.EHCACHE_RESPONSE_TYPES_ENUM_MAPPING; -import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.RESPONSE_TYPE_FIELD_INDEX; -import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.RESPONSE_TYPE_FIELD_NAME; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; -import static org.ehcache.clustered.common.internal.messages.StateRepositoryOpCodec.WHITELIST_PREDICATE; -import static org.terracotta.runnel.StructBuilder.newStructBuilder; - -public class ResponseCodec { - - private static final String EXCEPTION_FIELD = "exception"; - private static final String INVALIDATION_ID_FIELD = "invalidationId"; - private static final String CHAIN_FIELD = "chain"; - private static final String MAP_VALUE_FIELD = "mapValue"; - private static final String STORES_FIELD = "stores"; - - private static final Struct SUCCESS_RESPONSE_STRUCT = StructBuilder.newStructBuilder() - .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) - .build(); - private static final Struct FAILURE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() - .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) - .struct(EXCEPTION_FIELD, 20, ExceptionCodec.EXCEPTION_STRUCT) - .build(); - private static final Struct GET_RESPONSE_STRUCT = StructBuilder.newStructBuilder() - .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) - .struct(CHAIN_FIELD, 20, CHAIN_STRUCT) - .build(); - private static final Struct HASH_INVALIDATION_DONE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() - .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) - .int64(KEY_FIELD, 20) - .build(); - private static final Struct ALL_INVALIDATION_DONE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() - .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) - .build(); - private static final Struct CLIENT_INVALIDATE_HASH_RESPONSE_STRUCT = StructBuilder.newStructBuilder() - .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) - .int64(KEY_FIELD, 20) - .int32(INVALIDATION_ID_FIELD, 30) - .build(); - private static final Struct CLIENT_INVALIDATE_ALL_RESPONSE_STRUCT = StructBuilder.newStructBuilder() - .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) - .int32(INVALIDATION_ID_FIELD, 20) - .build(); - private static final Struct SERVER_INVALIDATE_HASH_RESPONSE_STRUCT = StructBuilder.newStructBuilder() - .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) - .int64(KEY_FIELD, 20) - .build(); - private static final Struct MAP_VALUE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() - .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) - .byteBuffer(MAP_VALUE_FIELD, 20) - .build(); - private static final Struct PREPARE_FOR_DESTROY_RESPONSE_STRUCT = newStructBuilder() - .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) - .strings(STORES_FIELD, 20) - .build(); - private static final Struct RESOLVE_REQUEST_RESPONSE_STRUCT = newStructBuilder() - .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) - .int64(KEY_FIELD, 20) - .struct(CHAIN_FIELD, 30, CHAIN_STRUCT) - .build(); - - public byte[] encode(EhcacheEntityResponse response) { - switch (response.getResponseType()) { - case FAILURE: - final EhcacheEntityResponse.Failure failure = (EhcacheEntityResponse.Failure)response; - return FAILURE_RESPONSE_STRUCT.encoder() - .enm(RESPONSE_TYPE_FIELD_NAME, failure.getResponseType()) - .struct(EXCEPTION_FIELD, failure.getCause(), ExceptionCodec::encode) - .encode().array(); - case SUCCESS: - return SUCCESS_RESPONSE_STRUCT.encoder() - .enm(RESPONSE_TYPE_FIELD_NAME, response.getResponseType()) - .encode().array(); - case GET_RESPONSE: - final EhcacheEntityResponse.GetResponse getResponse = (EhcacheEntityResponse.GetResponse)response; - return GET_RESPONSE_STRUCT.encoder() - .enm(RESPONSE_TYPE_FIELD_NAME, getResponse.getResponseType()) - .struct(CHAIN_FIELD, getResponse.getChain(), ChainCodec::encode) - .encode().array(); - case HASH_INVALIDATION_DONE: { - HashInvalidationDone hashInvalidationDone = (HashInvalidationDone) response; - return HASH_INVALIDATION_DONE_RESPONSE_STRUCT.encoder() - .enm(RESPONSE_TYPE_FIELD_NAME, hashInvalidationDone.getResponseType()) - .int64(KEY_FIELD, hashInvalidationDone.getKey()) - .encode().array(); - } - case ALL_INVALIDATION_DONE: { - AllInvalidationDone allInvalidationDone = (AllInvalidationDone) response; - return ALL_INVALIDATION_DONE_RESPONSE_STRUCT.encoder() - .enm(RESPONSE_TYPE_FIELD_NAME, allInvalidationDone.getResponseType()) - .encode().array(); - } - case CLIENT_INVALIDATE_HASH: { - ClientInvalidateHash clientInvalidateHash = (ClientInvalidateHash) response; - return CLIENT_INVALIDATE_HASH_RESPONSE_STRUCT.encoder() - .enm(RESPONSE_TYPE_FIELD_NAME, clientInvalidateHash.getResponseType()) - .int64(KEY_FIELD, clientInvalidateHash.getKey()) - .int32(INVALIDATION_ID_FIELD, clientInvalidateHash.getInvalidationId()) - .encode().array(); - } - case CLIENT_INVALIDATE_ALL: { - ClientInvalidateAll clientInvalidateAll = (ClientInvalidateAll) response; - return CLIENT_INVALIDATE_ALL_RESPONSE_STRUCT.encoder() - .enm(RESPONSE_TYPE_FIELD_NAME, clientInvalidateAll.getResponseType()) - .int32(INVALIDATION_ID_FIELD, clientInvalidateAll.getInvalidationId()) - .encode().array(); - } - case SERVER_INVALIDATE_HASH: { - ServerInvalidateHash serverInvalidateHash = (ServerInvalidateHash) response; - return SERVER_INVALIDATE_HASH_RESPONSE_STRUCT.encoder() - .enm(RESPONSE_TYPE_FIELD_NAME, serverInvalidateHash.getResponseType()) - .int64(KEY_FIELD, serverInvalidateHash.getKey()) - .encode().array(); - } - case MAP_VALUE: { - MapValue mapValue = (MapValue) response; - byte[] encodedMapValue = Util.marshall(mapValue.getValue()); - return MAP_VALUE_RESPONSE_STRUCT.encoder() - .enm(RESPONSE_TYPE_FIELD_NAME, mapValue.getResponseType()) - .byteBuffer(MAP_VALUE_FIELD, wrap(encodedMapValue)) - .encode().array(); - } - case PREPARE_FOR_DESTROY: { - PrepareForDestroy prepare = (PrepareForDestroy) response; - StructEncoder encoder = PREPARE_FOR_DESTROY_RESPONSE_STRUCT.encoder() - .enm(RESPONSE_TYPE_FIELD_NAME, prepare.getResponseType()); - ArrayEncoder> storesEncoder = encoder.strings(STORES_FIELD); - for (String storeName : prepare.getStores()) { - storesEncoder.value(storeName); - } - return encoder - .encode().array(); - } - case RESOLVE_REQUEST: { - EhcacheEntityResponse.ResolveRequest resolve = (ResolveRequest) response; - return RESOLVE_REQUEST_RESPONSE_STRUCT.encoder() - .enm(RESPONSE_TYPE_FIELD_NAME, resolve.getResponseType()) - .int64(KEY_FIELD, resolve.getKey()) - .struct(CHAIN_FIELD, resolve.getChain(), ChainCodec::encode) - .encode().array(); - } - default: - throw new UnsupportedOperationException("The operation is not supported : " + response.getResponseType()); - } - } - - public EhcacheEntityResponse decode(byte[] payload) { - ByteBuffer buffer = wrap(payload); - StructDecoder decoder = SUCCESS_RESPONSE_STRUCT.decoder(buffer); - Enm opCodeEnm = decoder.enm(RESPONSE_TYPE_FIELD_NAME); - - if (!opCodeEnm.isFound()) { - throw new AssertionError("Got a response without an opCode"); - } - if (!opCodeEnm.isValid()) { - // Need to ignore the response here as we do not understand its type - coming from the future? - return null; - } - - EhcacheResponseType opCode = opCodeEnm.get(); - buffer.rewind(); - switch (opCode) { - case SUCCESS: - return EhcacheEntityResponse.success(); - case FAILURE: - decoder = FAILURE_RESPONSE_STRUCT.decoder(buffer); - ClusterException exception = ExceptionCodec.decode(decoder.struct(EXCEPTION_FIELD)); - return EhcacheEntityResponse.failure(exception.withClientStackTrace()); - case GET_RESPONSE: - decoder = GET_RESPONSE_STRUCT.decoder(buffer); - return EhcacheEntityResponse.getResponse(ChainCodec.decode(decoder.struct(CHAIN_FIELD))); - case HASH_INVALIDATION_DONE: { - decoder = HASH_INVALIDATION_DONE_RESPONSE_STRUCT.decoder(buffer); - long key = decoder.int64(KEY_FIELD); - return EhcacheEntityResponse.hashInvalidationDone(key); - } - case ALL_INVALIDATION_DONE: { - return EhcacheEntityResponse.allInvalidationDone(); - } - case CLIENT_INVALIDATE_HASH: { - decoder = CLIENT_INVALIDATE_HASH_RESPONSE_STRUCT.decoder(buffer); - long key = decoder.int64(KEY_FIELD); - int invalidationId = decoder.int32(INVALIDATION_ID_FIELD); - return EhcacheEntityResponse.clientInvalidateHash(key, invalidationId); - } - case CLIENT_INVALIDATE_ALL: { - decoder = CLIENT_INVALIDATE_ALL_RESPONSE_STRUCT.decoder(buffer); - int invalidationId = decoder.int32(INVALIDATION_ID_FIELD); - return EhcacheEntityResponse.clientInvalidateAll(invalidationId); - } - case SERVER_INVALIDATE_HASH: { - decoder = SERVER_INVALIDATE_HASH_RESPONSE_STRUCT.decoder(buffer); - long key = decoder.int64(KEY_FIELD); - return EhcacheEntityResponse.serverInvalidateHash(key); - } - case MAP_VALUE: { - decoder = MAP_VALUE_RESPONSE_STRUCT.decoder(buffer); - return EhcacheEntityResponse.mapValue( - Util.unmarshall(decoder.byteBuffer(MAP_VALUE_FIELD), WHITELIST_PREDICATE)); - } - case PREPARE_FOR_DESTROY: { - decoder = PREPARE_FOR_DESTROY_RESPONSE_STRUCT.decoder(buffer); - ArrayDecoder> storesDecoder = decoder.strings(STORES_FIELD); - Set stores = new HashSet<>(); - for (int i = 0; i < storesDecoder.length(); i++) { - stores.add(storesDecoder.value()); - } - return EhcacheEntityResponse.prepareForDestroy(stores); - } - case RESOLVE_REQUEST: { - decoder = RESOLVE_REQUEST_RESPONSE_STRUCT.decoder(buffer); - long key = decoder.int64(KEY_FIELD); - Chain chain = ChainCodec.decode(decoder.struct(CHAIN_FIELD)); - return EhcacheEntityResponse.resolveRequest(key, chain); - } - default: - throw new UnsupportedOperationException("The operation is not supported with opCode : " + opCode); - } - } -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java deleted file mode 100644 index 7b26ae7caf..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.AppendMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClearMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAck; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAllAck; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetAndAppendMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ReplaceAtHeadMessage; -import org.ehcache.clustered.common.internal.store.Chain; -import org.terracotta.runnel.Struct; -import org.terracotta.runnel.decoding.StructDecoder; -import org.terracotta.runnel.encoding.StructEncoder; - -import java.nio.ByteBuffer; - -import static org.ehcache.clustered.common.internal.messages.ChainCodec.CHAIN_STRUCT; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; -import static org.terracotta.runnel.StructBuilder.newStructBuilder; - -public class ServerStoreOpCodec { - - private static final Struct GET_AND_APPEND_MESSAGE_STRUCT = newStructBuilder() - .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) - .int64(KEY_FIELD, 30) - .byteBuffer("payload", 40) - .build(); - - private static final Struct APPEND_MESSAGE_STRUCT = newStructBuilder() - .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) - .int64(KEY_FIELD, 30) - .byteBuffer("payload", 40) - .build(); - - private static final Struct REPLACE_MESSAGE_STRUCT = newStructBuilder() - .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) - .int64(KEY_FIELD, 30) - .struct("expect", 40, CHAIN_STRUCT) - .struct("update", 50, CHAIN_STRUCT) - .build(); - - private static final Struct CLIENT_INVALIDATION_ACK_MESSAGE_STRUCT = newStructBuilder() - .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) - .int64(KEY_FIELD, 20) - .int32("invalidationId", 30) - .build(); - - private static final Struct CLIENT_INVALIDATION_ALL_ACK_MESSAGE_STRUCT = newStructBuilder() - .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) - .int32("invalidationId", 40) - .build(); - - private static final Struct CLEAR_MESSAGE_STRUCT = newStructBuilder() - .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) - .build(); - - private static final Struct GET_MESSAGE_STRUCT = newStructBuilder() - .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) - .int64(KEY_FIELD, 30) - .build(); - - private final MessageCodecUtils messageCodecUtils = new MessageCodecUtils(); - - public byte[] encode(ServerStoreOpMessage message) { - StructEncoder encoder; - - switch (message.getMessageType()) { - case GET_STORE: - GetMessage getMessage = (GetMessage) message; - encoder = GET_MESSAGE_STRUCT.encoder(); - return encoder - .enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) - .int64(KEY_FIELD, getMessage.getKey()) - .encode() - .array(); - case APPEND: - AppendMessage appendMessage = (AppendMessage) message; - encoder = APPEND_MESSAGE_STRUCT.encoder(); - messageCodecUtils.encodeMandatoryFields(encoder, message); - return encoder - .int64(KEY_FIELD, appendMessage.getKey()) - .byteBuffer("payload", appendMessage.getPayload()) - .encode() - .array(); - case GET_AND_APPEND: - GetAndAppendMessage getAndAppendMessage = (GetAndAppendMessage) message; - encoder = GET_AND_APPEND_MESSAGE_STRUCT.encoder(); - messageCodecUtils.encodeMandatoryFields(encoder, message); - return encoder - .int64(KEY_FIELD, getAndAppendMessage.getKey()) - .byteBuffer("payload", getAndAppendMessage.getPayload()) - .encode() - .array(); - case REPLACE: - final ReplaceAtHeadMessage replaceAtHeadMessage = (ReplaceAtHeadMessage) message; - encoder = REPLACE_MESSAGE_STRUCT.encoder(); - messageCodecUtils.encodeMandatoryFields(encoder, message); - return encoder - .int64(KEY_FIELD, replaceAtHeadMessage.getKey()) - .struct("expect", replaceAtHeadMessage.getExpect(), ChainCodec::encode) - .struct("update", replaceAtHeadMessage.getUpdate(), ChainCodec::encode) - .encode() - .array(); - case CLIENT_INVALIDATION_ACK: - ClientInvalidationAck clientInvalidationAckMessage = (ClientInvalidationAck) message; - encoder = CLIENT_INVALIDATION_ACK_MESSAGE_STRUCT.encoder(); - return encoder - .enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) - .int64(KEY_FIELD, clientInvalidationAckMessage.getKey()) - .int32("invalidationId", clientInvalidationAckMessage.getInvalidationId()) - .encode() - .array(); - case CLIENT_INVALIDATION_ALL_ACK: - ClientInvalidationAllAck clientInvalidationAllAckMessage = (ClientInvalidationAllAck) message; - encoder = CLIENT_INVALIDATION_ALL_ACK_MESSAGE_STRUCT.encoder(); - return encoder - .enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) - .int32("invalidationId", clientInvalidationAllAckMessage.getInvalidationId()) - .encode().array(); - case CLEAR: - encoder = CLEAR_MESSAGE_STRUCT.encoder(); - messageCodecUtils.encodeMandatoryFields(encoder, message); - return encoder - .encode() - .array(); - default: - throw new RuntimeException("Unhandled message operation : " + message.getMessageType()); - } - } - - public EhcacheEntityMessage decode(EhcacheMessageType opCode, ByteBuffer messageBuffer) { - StructDecoder decoder; - switch (opCode) { - case GET_STORE: { - decoder = GET_MESSAGE_STRUCT.decoder(messageBuffer); - Long key = decoder.int64(KEY_FIELD); - return new GetMessage(key); - } - case GET_AND_APPEND: { - decoder = GET_AND_APPEND_MESSAGE_STRUCT.decoder(messageBuffer); - Long key = decoder.int64(KEY_FIELD); - ByteBuffer payload = decoder.byteBuffer("payload"); - return new GetAndAppendMessage(key, payload); - } - case APPEND: { - decoder = APPEND_MESSAGE_STRUCT.decoder(messageBuffer); - Long key = decoder.int64(KEY_FIELD); - ByteBuffer payload = decoder.byteBuffer("payload"); - return new AppendMessage(key, payload); - } - case REPLACE: { - decoder = REPLACE_MESSAGE_STRUCT.decoder(messageBuffer); - Long key = decoder.int64(KEY_FIELD); - Chain expect = ChainCodec.decode(decoder.struct("expect")); - Chain update = ChainCodec.decode(decoder.struct("update")); - return new ReplaceAtHeadMessage(key, expect, update); - } - case CLIENT_INVALIDATION_ACK: { - decoder = CLIENT_INVALIDATION_ACK_MESSAGE_STRUCT.decoder(messageBuffer); - Long key = decoder.int64(KEY_FIELD); - Integer invalidationId = decoder.int32("invalidationId"); - return new ClientInvalidationAck(key, invalidationId); - } - case CLIENT_INVALIDATION_ALL_ACK: { - decoder = CLIENT_INVALIDATION_ALL_ACK_MESSAGE_STRUCT - .decoder(messageBuffer); - Integer invalidationId = decoder.int32("invalidationId"); - return new ClientInvalidationAllAck(invalidationId); - } - case CLEAR: { - return new ClearMessage(); - } - default: - throw new RuntimeException("Unhandled message operation : " + opCode); - } - } - -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java deleted file mode 100644 index 6d3db2154f..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.ehcache.clustered.common.internal.store.Chain; - -import java.nio.ByteBuffer; - -public abstract class ServerStoreOpMessage extends EhcacheOperationMessage { - - private ServerStoreOpMessage() { - } - - public static abstract class KeyBasedServerStoreOpMessage extends ServerStoreOpMessage implements ConcurrentEntityMessage { - - private final long key; - - KeyBasedServerStoreOpMessage(final long key) { - this.key = key; - } - - public long getKey() { - return key; - } - - @Override - public long concurrencyKey() { - return key; - } - } - - public static class GetMessage extends KeyBasedServerStoreOpMessage { - - public GetMessage(long key) { - super(key); - } - - @Override - public EhcacheMessageType getMessageType() { - return EhcacheMessageType.GET_STORE; - } - } - - public static class GetAndAppendMessage extends KeyBasedServerStoreOpMessage { - - private final ByteBuffer payload; - - public GetAndAppendMessage(long key, ByteBuffer payload) { - super(key); - this.payload = payload; - } - - @Override - public EhcacheMessageType getMessageType() { - return EhcacheMessageType.GET_AND_APPEND; - } - - public ByteBuffer getPayload() { - return payload; - } - - } - - public static class AppendMessage extends KeyBasedServerStoreOpMessage { - - private final ByteBuffer payload; - - public AppendMessage(long key, ByteBuffer payload) { - super(key); - this.payload = payload; - } - - @Override - public EhcacheMessageType getMessageType() { - return EhcacheMessageType.APPEND; - } - - public ByteBuffer getPayload() { - return payload; - } - - } - - public static class ReplaceAtHeadMessage extends KeyBasedServerStoreOpMessage { - - private final Chain expect; - private final Chain update; - - public ReplaceAtHeadMessage(long key, Chain expect, Chain update) { - super(key); - this.expect = expect; - this.update = update; - } - - @Override - public EhcacheMessageType getMessageType() { - return EhcacheMessageType.REPLACE; - } - - public Chain getExpect() { - return expect; - } - - public Chain getUpdate() { - return update; - } - } - - public static class ClientInvalidationAck extends KeyBasedServerStoreOpMessage { - - private final int invalidationId; - - public ClientInvalidationAck(long key, int invalidationId) { - super(key); - this.invalidationId = invalidationId; - } - - public int getInvalidationId() { - return invalidationId; - } - - @Override - public EhcacheMessageType getMessageType() { - return EhcacheMessageType.CLIENT_INVALIDATION_ACK; - } - } - - public static class ClientInvalidationAllAck extends ServerStoreOpMessage { - - private final int invalidationId; - - public ClientInvalidationAllAck(int invalidationId) { - super(); - this.invalidationId = invalidationId; - } - - @Override - public EhcacheMessageType getMessageType() { - return EhcacheMessageType.CLIENT_INVALIDATION_ALL_ACK; - } - - public int getInvalidationId() { - return invalidationId; - } - } - - public static class ClearMessage extends ServerStoreOpMessage { - - @Override - public EhcacheMessageType getMessageType() { - return EhcacheMessageType.CLEAR; - } - } - -} - diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java deleted file mode 100644 index 9d177130ec..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.store; - -import org.ehcache.clustered.common.internal.util.ByteBufferInputStream; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.ListIterator; -import java.util.function.Predicate; - -public class Util { - - public static final Iterator reverseIterator(List list) { - final ListIterator listIterator = list.listIterator(list.size()); - return new Iterator() { - @Override - public boolean hasNext() { - return listIterator.hasPrevious(); - } - - @Override - public T next() { - return listIterator.previous(); - } - - @Override - public void remove() { - listIterator.remove(); - } - }; - } - - public static long readPayLoad(ByteBuffer byteBuffer) { - return byteBuffer.getLong(); - } - - public static ByteBuffer createPayload(long key) { - ByteBuffer byteBuffer = ByteBuffer.allocate(8).putLong(key); - byteBuffer.flip(); - return byteBuffer.asReadOnlyBuffer(); - } - - public static ByteBuffer createPayload(long key, int payloadSize) { - if (payloadSize < 8) { - throw new IllegalArgumentException("payload must be at least 8 bytes long"); - } - ByteBuffer byteBuffer = ByteBuffer.allocate(payloadSize); - byteBuffer.putLong(key); - for (int i = 0; i < payloadSize - 8; i++) { - byteBuffer.put((byte) 0); - } - byteBuffer.flip(); - return byteBuffer.asReadOnlyBuffer(); - } - - public static boolean chainsEqual(Chain chain1, Chain chain2) { - Iterator it1 = chain1.iterator(); - Iterator it2 = chain2.iterator(); - - while (it1.hasNext() && it2.hasNext()) { - Element next1 = it1.next(); - Element next2 = it2.next(); - - if (!next1.getPayload().equals(next2.getPayload())) { - return false; - } - } - - return !it1.hasNext() && !it2.hasNext(); - } - - public static Element getElement(final ByteBuffer payload) { - return payload::duplicate; - } - - public static Chain getChain(boolean isSequenced, ByteBuffer... buffers) { - List elements = new ArrayList<>(); - long counter = 0; - for (final ByteBuffer buffer : buffers) { - if (isSequenced) { - elements.add(getElement(counter++, buffer)); - } else { - elements.add(getElement(buffer)); - } - - } - return getChain(elements); - } - - public static Chain getChain(final List elements) { - return new Chain() { - private final List list = Collections.unmodifiableList(elements); - @Override - public Iterator reverseIterator() { - return Util.reverseIterator(list); - } - - @Override - public boolean isEmpty() { - return list.isEmpty(); - } - - @Override - public int length() { - return list.size(); - } - - @Override - public Iterator iterator() { - return list.iterator(); - } - }; - } - - public static SequencedElement getElement(final long sequence, final ByteBuffer payload) { - return new SequencedElement() { - @Override - public long getSequenceNumber() { - return sequence; - } - - @Override - public ByteBuffer getPayload() { - return payload.duplicate(); - } - }; - } - - public static Object unmarshall(ByteBuffer payload, Predicate> isClassPermitted) { - try (ObjectInputStream objectInputStream = - new FilteredObjectInputStream(new ByteBufferInputStream(payload), isClassPermitted, null)) { - return objectInputStream.readObject(); - } catch (IOException | ClassNotFoundException ex) { - throw new IllegalArgumentException(ex); - } - } - - public static byte[] marshall(Object message) { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - try(ObjectOutputStream oout = new ObjectOutputStream(out)) { - oout.writeObject(message); - } catch (IOException e) { - throw new IllegalArgumentException(e); - } - return out.toByteArray(); - } -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java deleted file mode 100644 index 0b0c1e60b0..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.util; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.ByteBuffer; - -import static java.lang.Math.max; -import static java.lang.Math.min; - -// TODO remove once it comes with Runnel -public class ByteBufferInputStream extends InputStream { - - private final ByteBuffer buffer; - - public ByteBufferInputStream(ByteBuffer buffer) { - this.buffer = buffer.slice(); - } - - @Override - public int read() throws IOException { - if (buffer.hasRemaining()) { - return 0xff & buffer.get(); - } else { - return -1; - } - } - - @Override - public int read(byte b[], int off, int len) { - len = min(len, buffer.remaining()); - buffer.get(b, off, len); - return len; - } - - @Override - public long skip(long n) { - n = min(buffer.remaining(), max(n, 0)); - buffer.position((int) (buffer.position() + n)); - return n; - } - - @Override - public synchronized int available() { - return buffer.remaining(); - } -} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ChainCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ChainCodecTest.java deleted file mode 100644 index 31786a9b69..0000000000 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ChainCodecTest.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.clustered.common.internal.store.SequencedElement; -import org.junit.Test; - -import java.util.Iterator; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.hamcrest.Matchers.is; -import static org.ehcache.clustered.common.internal.store.Util.createPayload; -import static org.ehcache.clustered.common.internal.store.Util.readPayLoad; -import static org.ehcache.clustered.common.internal.store.Util.getChain; - -public class ChainCodecTest { - - @Test - public void testChainWithSingleElement() { - Chain chain = getChain(false, createPayload(1L)); - - assertThat(chain.isEmpty(), is(false)); - Iterator chainIterator = chain.iterator(); - assertThat(readPayLoad(chainIterator.next().getPayload()), is(1L)); - assertThat(chainIterator.hasNext(), is(false)); - - Chain decoded = ChainCodec.decode(ChainCodec.encode(chain)); - - assertThat(decoded.isEmpty(), is(false)); - chainIterator = decoded.iterator(); - assertThat(readPayLoad(chainIterator.next().getPayload()), is(1L)); - assertThat(chainIterator.hasNext(), is(false)); - } - - @Test - public void testChainWithSingleSequencedElement() { - Chain chain = getChain(true, createPayload(1L)); - - assertThat(chain.isEmpty(), is(false)); - Iterator chainIterator = chain.iterator(); - assertThat(readPayLoad(chainIterator.next().getPayload()), is(1L)); - assertThat(chainIterator.hasNext(), is(false)); - - Chain decoded = ChainCodec.decode(ChainCodec.encode(chain)); - - assertThat(decoded.isEmpty(), is(false)); - chainIterator = decoded.iterator(); - assertThat(readPayLoad(chainIterator.next().getPayload()), is(1L)); - assertThat(chainIterator.hasNext(), is(false)); - - assertSameSequenceChain(chain, decoded); - } - - @Test - public void testChainWithMultipleElements() { - Chain chain = getChain(false, createPayload(1L), createPayload(2L), createPayload(3L)); - - assertThat(chain.isEmpty(), is(false)); - Util.assertChainHas(chain, 1L, 2L, 3L); - - Chain decoded = ChainCodec.decode(ChainCodec.encode(chain)); - - assertThat(decoded.isEmpty(), is(false)); - Util.assertChainHas(decoded, 1L, 2L, 3L); - } - - @Test - public void testChainWithMultipleSequencedElements() { - Chain chain = getChain(true, createPayload(1L), createPayload(2L), createPayload(3L)); - - assertThat(chain.isEmpty(), is(false)); - Util.assertChainHas(chain, 1L, 2L, 3L); - - Chain decoded = ChainCodec.decode(ChainCodec.encode(chain)); - - assertThat(decoded.isEmpty(), is(false)); - Util.assertChainHas(decoded, 1L, 2L, 3L); - - assertSameSequenceChain(chain, decoded); - } - - @Test - public void testEmptyChain() { - Chain decoded = ChainCodec.decode(ChainCodec.encode(getChain(false))); - - assertThat(decoded.isEmpty(), is(true)); - } - - private static void assertSameSequenceChain(Chain original, Chain decoded) { - Iterator decodedIterator = decoded.iterator(); - for (Element element : original) { - assertEquals(((SequencedElement) element).getSequenceNumber(), - ((SequencedElement) decodedIterator.next()).getSequenceNumber()); - } - } -} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java deleted file mode 100644 index d8e1764146..0000000000 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.ehcache.clustered.common.ServerSideConfiguration; -import org.junit.Test; -import org.terracotta.runnel.Struct; -import org.terracotta.runnel.encoding.StructEncoder; - -import java.nio.ByteBuffer; -import java.util.Collections; - -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.*; -import static org.terracotta.runnel.StructBuilder.newStructBuilder; - -public class CommonConfigCodecTest { - - private static final CommonConfigCodec CODEC = new CommonConfigCodec(); - - @Test - public void testEncodeDecodeServerSideConfiguration() throws Exception { - ServerSideConfiguration serverSideConfiguration = - new ServerSideConfiguration("foo", Collections.singletonMap("bar", new ServerSideConfiguration.Pool(1))); - Struct serverSideConfigurationStruct = CODEC.injectServerSideConfiguration(newStructBuilder(), 10).getUpdatedBuilder().build(); - StructEncoder encoder = serverSideConfigurationStruct.encoder(); - CODEC.encodeServerSideConfiguration(encoder, serverSideConfiguration); - ByteBuffer byteBuffer = encoder.encode(); - byteBuffer.rewind(); - ServerSideConfiguration decodedServerSideConfiguration = - CODEC.decodeServerSideConfiguration(serverSideConfigurationStruct.decoder(byteBuffer)); - assertThat(decodedServerSideConfiguration.getDefaultServerResource(), is("foo")); - assertThat(decodedServerSideConfiguration.getResourcePools(), hasKey("bar")); - } -} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java deleted file mode 100644 index 3d09962a86..0000000000 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; -import org.ehcache.clustered.common.internal.store.Chain; -import org.hamcrest.Matchers; -import org.junit.Test; - -import java.util.Date; -import java.util.HashSet; -import java.util.Set; - -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.allInvalidationDone; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.clientInvalidateAll; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.clientInvalidateHash; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.failure; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.getResponse; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.hashInvalidationDone; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.mapValue; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.prepareForDestroy; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.serverInvalidateHash; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.success; -import static org.ehcache.clustered.common.internal.store.Util.createPayload; -import static org.ehcache.clustered.common.internal.store.Util.getChain; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - -public class ResponseCodecTest { - - private static final ResponseCodec RESPONSE_CODEC = new ResponseCodec(); - private static final long KEY = 42L; - private static final int INVALIDATION_ID = 134; - - @Test - public void testFailureResponseCodec() { - EhcacheEntityResponse failure = failure(new IllegalMessageException("Test Exception")); - - EhcacheEntityResponse decoded = RESPONSE_CODEC.decode(RESPONSE_CODEC.encode(failure)); - - assertThat(((EhcacheEntityResponse.Failure)decoded).getCause().getMessage(), is("Test Exception")); - } - - @Test - public void testGetResponseCodec() { - EhcacheEntityResponse getResponse = getResponse(getChain(false, - createPayload(1L), createPayload(11L), createPayload(111L))); - - EhcacheEntityResponse decoded = RESPONSE_CODEC.decode(RESPONSE_CODEC.encode(getResponse)); - - Chain decodedChain = ((EhcacheEntityResponse.GetResponse) decoded).getChain(); - - Util.assertChainHas(decodedChain, 1L, 11L, 111L); - } - - @Test - public void testMapValueCodec() throws Exception { - Object subject = new Integer(10); - EhcacheEntityResponse mapValue = mapValue(subject); - EhcacheEntityResponse.MapValue decoded = - (EhcacheEntityResponse.MapValue) RESPONSE_CODEC.decode(RESPONSE_CODEC.encode(mapValue)); - assertThat(decoded.getValue(), equalTo(subject)); - } - - @Test - public void testSuccess() throws Exception { - byte[] encoded = RESPONSE_CODEC.encode(success()); - assertThat(RESPONSE_CODEC.decode(encoded), Matchers.sameInstance(success())); - } - - @Test - public void testHashInvalidationDone() throws Exception { - EhcacheEntityResponse.HashInvalidationDone response = hashInvalidationDone(KEY); - byte[] encoded = RESPONSE_CODEC.encode(response); - EhcacheEntityResponse.HashInvalidationDone decodedResponse = (EhcacheEntityResponse.HashInvalidationDone) RESPONSE_CODEC.decode(encoded); - - assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.HASH_INVALIDATION_DONE)); - assertThat(decodedResponse.getKey(), is(KEY)); - } - - @Test - public void testAllInvalidationDone() throws Exception { - EhcacheEntityResponse.AllInvalidationDone response = allInvalidationDone(); - - byte[] encoded = RESPONSE_CODEC.encode(response); - EhcacheEntityResponse.AllInvalidationDone decodedResponse = (EhcacheEntityResponse.AllInvalidationDone) RESPONSE_CODEC.decode(encoded); - - assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.ALL_INVALIDATION_DONE)); - } - - @Test - public void testClientInvalidateHash() throws Exception { - EhcacheEntityResponse.ClientInvalidateHash response = clientInvalidateHash(KEY, INVALIDATION_ID); - byte[] encoded = RESPONSE_CODEC.encode(response); - EhcacheEntityResponse.ClientInvalidateHash decodedResponse = (EhcacheEntityResponse.ClientInvalidateHash) RESPONSE_CODEC.decode(encoded); - - assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.CLIENT_INVALIDATE_HASH)); - assertThat(decodedResponse.getKey(), is(KEY)); - assertThat(decodedResponse.getInvalidationId(), is(INVALIDATION_ID)); - } - - @Test - public void testClientInvalidateAll() throws Exception { - EhcacheEntityResponse.ClientInvalidateAll response = clientInvalidateAll(INVALIDATION_ID); - - byte[] encoded = RESPONSE_CODEC.encode(response); - EhcacheEntityResponse.ClientInvalidateAll decodedResponse = (EhcacheEntityResponse.ClientInvalidateAll) RESPONSE_CODEC.decode(encoded); - - assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.CLIENT_INVALIDATE_ALL)); - assertThat(decodedResponse.getInvalidationId(), is(INVALIDATION_ID)); - } - - @Test - public void testServerInvalidateHash() throws Exception { - EhcacheEntityResponse.ServerInvalidateHash response = serverInvalidateHash(KEY); - - byte[] encoded = RESPONSE_CODEC.encode(response); - EhcacheEntityResponse.ServerInvalidateHash decodedResponse = (EhcacheEntityResponse.ServerInvalidateHash) RESPONSE_CODEC.decode(encoded); - - assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.SERVER_INVALIDATE_HASH)); - assertThat(decodedResponse.getKey(), is(KEY)); - } - - @Test - public void testPrepareForDestroy() throws Exception { - Set storeIdentifiers = new HashSet<>(); - storeIdentifiers.add("store1"); - storeIdentifiers.add("anotherStore"); - EhcacheEntityResponse.PrepareForDestroy response = prepareForDestroy(storeIdentifiers); - - byte[] encoded = RESPONSE_CODEC.encode(response); - EhcacheEntityResponse.PrepareForDestroy decodedResponse = (EhcacheEntityResponse.PrepareForDestroy) RESPONSE_CODEC.decode(encoded); - - assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.PREPARE_FOR_DESTROY)); - assertThat(decodedResponse.getStores(), is(storeIdentifiers)); - } - - @Test - public void testResolveRequest() throws Exception { - long hash = 42L; - EhcacheEntityResponse.ResolveRequest response = new EhcacheEntityResponse.ResolveRequest(hash, getChain(false, - createPayload(1L), createPayload(11L), createPayload(111L))); - - byte[] encoded = RESPONSE_CODEC.encode(response); - EhcacheEntityResponse.ResolveRequest decodedResponse = (EhcacheEntityResponse.ResolveRequest) RESPONSE_CODEC.decode(encoded); - - assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.RESOLVE_REQUEST)); - assertThat(decodedResponse.getKey(), is(42L)); - Util.assertChainHas(decodedResponse.getChain(), 1L, 11L, 111L); - } -} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java deleted file mode 100644 index 07c350d5cd..0000000000 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.junit.Test; - -import static java.nio.ByteBuffer.wrap; -import static org.ehcache.clustered.common.internal.store.Util.createPayload; -import static org.ehcache.clustered.common.internal.store.Util.getChain; -import static org.ehcache.clustered.common.internal.store.Util.readPayLoad; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - -public class ServerStoreOpCodecTest { - - private static final ServerStoreOpCodec STORE_OP_CODEC = new ServerStoreOpCodec(); - - @Test - public void testAppendMessageCodec() { - - ServerStoreOpMessage.AppendMessage appendMessage = new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L)); - - byte[] encoded = STORE_OP_CODEC.encode(appendMessage); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(appendMessage.getMessageType(), wrap(encoded)); - ServerStoreOpMessage.AppendMessage decodedAppendMessage = (ServerStoreOpMessage.AppendMessage) decodedMsg; - - assertThat(decodedAppendMessage.getKey(), is(1L)); - assertThat(readPayLoad(decodedAppendMessage.getPayload()), is(1L)); - assertThat(decodedAppendMessage.getMessageType(), is(EhcacheMessageType.APPEND)); - } - - @Test - public void testGetMessageCodec() { - ServerStoreOpMessage getMessage = new ServerStoreOpMessage.GetMessage(2L); - - byte[] encoded = STORE_OP_CODEC.encode(getMessage); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(getMessage.getMessageType(), wrap(encoded)); - ServerStoreOpMessage.GetMessage decodedGetMessage = (ServerStoreOpMessage.GetMessage) decodedMsg; - - assertThat(decodedGetMessage.getKey(), is(2L)); - assertThat(decodedGetMessage.getMessageType(), is(EhcacheMessageType.GET_STORE)); - } - - @Test - public void testGetAndAppendMessageCodec() { - ServerStoreOpMessage getAndAppendMessage = new ServerStoreOpMessage.GetAndAppendMessage(10L, createPayload(10L)); - - byte[] encoded = STORE_OP_CODEC.encode(getAndAppendMessage); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(getAndAppendMessage.getMessageType(), wrap(encoded)); - ServerStoreOpMessage.GetAndAppendMessage decodedGetAndAppendMessage = (ServerStoreOpMessage.GetAndAppendMessage) decodedMsg; - - assertThat(decodedGetAndAppendMessage.getKey(), is(10L)); - assertThat(readPayLoad(decodedGetAndAppendMessage.getPayload()), is(10L)); - assertThat(decodedGetAndAppendMessage.getMessageType(), is(EhcacheMessageType.GET_AND_APPEND)); - } - - @Test - public void testReplaceAtHeadMessageCodec() { - ServerStoreOpMessage replaceAtHeadMessage = new ServerStoreOpMessage.ReplaceAtHeadMessage(10L, - getChain(true, createPayload(10L), createPayload(100L), createPayload(1000L)), - getChain(false, createPayload(2000L))); - - byte[] encoded = STORE_OP_CODEC.encode(replaceAtHeadMessage); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(replaceAtHeadMessage.getMessageType(), wrap(encoded)); - ServerStoreOpMessage.ReplaceAtHeadMessage decodedReplaceAtHeadMessage = (ServerStoreOpMessage.ReplaceAtHeadMessage) decodedMsg; - - assertThat(decodedReplaceAtHeadMessage.getKey(), is(10L)); - Util.assertChainHas(decodedReplaceAtHeadMessage.getExpect(), 10L, 100L, 1000L); - Util.assertChainHas(decodedReplaceAtHeadMessage.getUpdate(), 2000L); - assertThat(decodedReplaceAtHeadMessage.getMessageType(), is(EhcacheMessageType.REPLACE)); - } - - @Test - public void testClearMessageCodec() throws Exception { - ServerStoreOpMessage clearMessage = new ServerStoreOpMessage.ClearMessage(); - - byte[] encoded = STORE_OP_CODEC.encode(clearMessage); - ServerStoreOpMessage decodedMsg = (ServerStoreOpMessage) STORE_OP_CODEC.decode(clearMessage.getMessageType(), wrap(encoded)); - - assertThat(decodedMsg.getMessageType(), is(EhcacheMessageType.CLEAR)); - } - - @Test - public void testClientInvalidationAckMessageCodec() throws Exception { - ServerStoreOpMessage invalidationAckMessage = new ServerStoreOpMessage.ClientInvalidationAck(42L,123); - - byte[] encoded = STORE_OP_CODEC.encode(invalidationAckMessage); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(invalidationAckMessage.getMessageType(), wrap(encoded)); - ServerStoreOpMessage.ClientInvalidationAck decodedInvalidationAckMessage = (ServerStoreOpMessage.ClientInvalidationAck)decodedMsg; - - assertThat(decodedInvalidationAckMessage.getKey(), is(42L)); - assertThat(decodedInvalidationAckMessage.getInvalidationId(), is(123)); - assertThat(decodedInvalidationAckMessage.getMessageType(), is(EhcacheMessageType.CLIENT_INVALIDATION_ACK)); - } -} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/Util.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/Util.java deleted file mode 100644 index 94a4b350c5..0000000000 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/Util.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; - -import java.util.Iterator; - -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.ehcache.clustered.common.internal.store.Util.readPayLoad; - -/** - * - */ -public final class Util { - - private Util() { - } - - public static void assertChainHas(Chain chain, long... payLoads) { - Iterator elements = chain.iterator(); - for (long payLoad : payLoads) { - assertThat(readPayLoad(elements.next().getPayload()), is(Long.valueOf(payLoad))); - } - assertThat(elements.hasNext(), is(false)); - } -} diff --git a/clustered/ehcache-client/build.gradle b/clustered/ehcache-client/build.gradle new file mode 100644 index 0000000000..bff6116577 --- /dev/null +++ b/clustered/ehcache-client/build.gradle @@ -0,0 +1,61 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.clustered-module' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 Client Side Clustering module' + description = 'The Client Side Clustering module of Ehcache 3' + } +} + +dependencies { + api project(':ehcache-impl') + api project(':ehcache-xml') + + implementation project(':clustered:ehcache-common') + implementation "org.terracotta:entity-client-api:$terracottaApisVersion" + implementation "org.terracotta:runnel:$terracottaPlatformVersion" + implementation "org.terracotta:lease-api:$terracottaPlatformVersion" + implementation "org.terracotta.dynamic-config.entities:dynamic-config-topology-entity-client:$terracottaPlatformVersion" + implementation "org.terracotta:connection-api:$terracottaApisVersion" + + compileOnly 'org.osgi:org.osgi.service.component.annotations:1.3.0' + + testImplementation(project(':ehcache-transactions')) { + capabilities { + requireCapability("org.ehcache:ehcache-transactions-modules") + } + } + testImplementation(project(':clustered:server:ehcache-entity')) { + exclude group: 'org.terracotta.internal', module: 'tc-config-parser' + } + testImplementation project(':clustered:server:ehcache-service') + testImplementation "org.terracotta:client-message-tracker:$terracottaPlatformVersion" + testImplementation project(':clustered:test-utils') + testImplementation "org.terracotta:entity-test-lib:$terracottaPassthroughTestingVersion" + testImplementation "org.terracotta:passthrough-server:$terracottaPassthroughTestingVersion" + testImplementation "org.terracotta.internal:common:$terracottaCoreVersion" + testImplementation "org.terracotta:passthrough-leased-connection-api:$terracottaPlatformVersion" + testImplementation (group: 'org.codehaus.btm', name: 'btm', version: '2.1.4') { + exclude group:'org.slf4j', module:'slf4j-api' + } + testImplementation testFixtures(project(':ehcache-xml')) + testImplementation ("org.terracotta:statistics:$parent.statisticVersion") +} diff --git a/clustered/client/config/checkstyle-suppressions.xml b/clustered/ehcache-client/config/checkstyle-suppressions.xml similarity index 100% rename from clustered/client/config/checkstyle-suppressions.xml rename to clustered/ehcache-client/config/checkstyle-suppressions.xml diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredResourcePool.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/ClusteredResourcePool.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredResourcePool.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/ClusteredResourcePool.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredResourceType.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/ClusteredResourceType.java similarity index 96% rename from clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredResourceType.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/ClusteredResourceType.java index ae978d73b7..fdfabe8eef 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredResourceType.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/ClusteredResourceType.java @@ -48,9 +48,8 @@ final class Types { * * @return an array containing the constants of {@code ClusteredResourceType} in the order declared */ - @SuppressWarnings("unchecked") public static ClusteredResourceType[] values() { - return new ClusteredResourceType[] {DEDICATED, SHARED, UNKNOWN}; // unchecked + return new ClusteredResourceType[] {DEDICATED, SHARED, UNKNOWN}; } /** diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java similarity index 86% rename from clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java index 7faa760d85..5c39d03ce3 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java @@ -23,7 +23,7 @@ /** * {@link ServiceConfiguration} for the {@link ClusteredStore}. */ -public class ClusteredStoreConfiguration implements ServiceConfiguration { +public class ClusteredStoreConfiguration implements ServiceConfiguration { private final Consistency consistency; @@ -59,4 +59,14 @@ public Class getServiceType() { public Consistency getConsistency() { return consistency; } + + @Override + public Consistency derive() { + return getConsistency(); + } + + @Override + public ClusteredStoreConfiguration build(Consistency representation) { + return new ClusteredStoreConfiguration(representation); + } } diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/ClusteringServiceConfiguration.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/ClusteringServiceConfiguration.java new file mode 100644 index 0000000000..d8082c55b6 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/ClusteringServiceConfiguration.java @@ -0,0 +1,463 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.config; + +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.ConnectionSource; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.CacheManagerConfiguration; +import org.ehcache.core.HumanReadable; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +import java.net.InetSocketAddress; +import java.net.URI; +import java.time.Duration; +import java.util.Objects; +import java.util.Properties; + +import org.ehcache.clustered.common.ServerSideConfiguration; + +import static java.util.Objects.requireNonNull; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.seededFrom; + +/** + * Specifies the configuration for a {@link ClusteringService}. + */ +public class ClusteringServiceConfiguration + implements ServiceCreationConfiguration, + CacheManagerConfiguration, + HumanReadable { + + /** + * An enumeration of configurable client to server connection behaviors. + */ + public enum ClientMode { + /** + * Connect to the cluster with no expectations regarding the cluster state. + */ + CONNECT, + /** + * Connect to the cluster and validate the cluster state is compatible with {@link #getServerConfiguration()}. + */ + EXPECTING, + /** + * Connect to the cluster and create or validate the cluster state is compatible with {@link #getServerConfiguration()}. + */ + AUTO_CREATE, + /** + * Auto creates the necessary state on reconnecting to a cluster as well as on initial connection like {@link #AUTO_CREATE}. + */ + AUTO_CREATE_ON_RECONNECT + } + + public static final ClientMode DEFAULT_CLIENT_MODE = ClientMode.CONNECT; + @Deprecated + public static final boolean DEFAULT_AUTOCREATE = DEFAULT_CLIENT_MODE.equals(ClientMode.AUTO_CREATE); + + private final ConnectionSource connectionSource; + private final ClientMode clientMode; + private final ServerSideConfiguration serverConfiguration; + private final Timeouts timeouts; + private final Properties properties; + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param clusterUri the non-{@code null} URI identifying the cluster server + * + * @throws NullPointerException if {@code clusterUri} is {@code null} + * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(URI)} + */ + @Deprecated + public ClusteringServiceConfiguration(URI clusterUri) { + this(clusterUri, Timeouts.DEFAULT); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param servers the non-{@code null} iterable of servers in the cluster + * @param clusterTierManager the non-{@code null} cluster tier manager identifier + * + * @throws NullPointerException if {@code servers} is {@code null} + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(Iterable, String)} + */ + @Deprecated + public ClusteringServiceConfiguration(Iterable servers, String clusterTierManager) { + this(servers, clusterTierManager, Timeouts.DEFAULT); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param clusterUri the non-{@code null} URI identifying the cluster server + * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations + * + * @throws NullPointerException if {@code clusterUri} is {@code null} + * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(URI)} + */ + @Deprecated + public ClusteringServiceConfiguration(URI clusterUri, Timeouts timeouts) { + this(clusterUri, timeouts, null); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param servers the non-{@code null} iterable of servers in the cluster + * @param clusterTierManager the non-{@code null} cluster tier manager identifier + * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations + * + * @throws NullPointerException if {@code servers} is {@code null} + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(Iterable, String)} + */ + @Deprecated + public ClusteringServiceConfiguration(Iterable servers, String clusterTierManager, Timeouts timeouts) { + this(servers, clusterTierManager, timeouts, null); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param clusterUri the non-{@code null} URI identifying the cluster server + * @param serverConfig the server side entity configuration required + * + * @throws NullPointerException if {@code clusterUri} is {@code null} + * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(URI)} + */ + @Deprecated + public ClusteringServiceConfiguration(URI clusterUri, ServerSideConfiguration serverConfig) { + this(clusterUri, Timeouts.DEFAULT, serverConfig); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param clusterUri the non-{@code null} URI identifying the cluster server + * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations + * @param serverConfig the server side entity configuration required + * + * @throws NullPointerException if {@code clusterUri} is {@code null} + * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(URI)} + */ + @Deprecated + public ClusteringServiceConfiguration(URI clusterUri, Timeouts timeouts, ServerSideConfiguration serverConfig) { + this(clusterUri, timeouts, DEFAULT_AUTOCREATE, serverConfig); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param servers the non-{@code null} iterable of servers in the cluster + * @param clusterTierManager the non-{@code null} cluster tier manager identifier + * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations + * @param serverConfig the server side entity configuration required + * + * @throws NullPointerException if {@code servers} is {@code null} + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(Iterable, String)} + */ + @Deprecated + public ClusteringServiceConfiguration(Iterable servers, String clusterTierManager, Timeouts timeouts, + ServerSideConfiguration serverConfig) { + this(servers, clusterTierManager, timeouts, DEFAULT_AUTOCREATE, serverConfig); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param clusterUri the non-{@code null} URI identifying the cluster server + * @param autoCreate {@code true} if server components should be auto created + * @param serverConfig the server side entity configuration required + * + * @throws NullPointerException if {@code clusterUri} is {@code null} + * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(URI)} + */ + @Deprecated + public ClusteringServiceConfiguration(URI clusterUri, boolean autoCreate, ServerSideConfiguration serverConfig) { + this(clusterUri, Timeouts.DEFAULT, autoCreate, serverConfig); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param servers the non-{@code null} iterable of servers in the cluster + * @param clusterTierManager the non-{@code null} cluster tier manager identifier + * @param autoCreate {@code true} if server components should be auto created + * @param serverConfig the server side entity configuration required + * + * @throws NullPointerException if {@code servers} is {@code null} + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(Iterable, String)} + */ + @Deprecated + public ClusteringServiceConfiguration(Iterable servers, String clusterTierManager, boolean autoCreate, + ServerSideConfiguration serverConfig) { + this(servers, clusterTierManager, Timeouts.DEFAULT, autoCreate, serverConfig); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param clusterUri the non-{@code null} URI identifying the cluster server + * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations + * @param autoCreate {@code true} if server components should be auto created + * @param serverConfig the server side entity configuration required + * + * @throws NullPointerException if {@code clusterUri} is {@code null} + * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(URI)} + */ + @Deprecated + public ClusteringServiceConfiguration(URI clusterUri, Timeouts timeouts, boolean autoCreate, ServerSideConfiguration serverConfig) { + this(clusterUri, timeouts, autoCreate, serverConfig, new Properties()); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param servers the non-{@code null} iterable of servers in the cluster + * @param clusterTierManager the non-{@code null} cluster tier manager identifier + * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations + * @param autoCreate {@code true} if server components should be auto created + * @param serverConfig the server side entity configuration required + * + * @throws NullPointerException if {@code servers} is {@code null} + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(Iterable, String)} + */ + @Deprecated + public ClusteringServiceConfiguration(Iterable servers, String clusterTierManager, Timeouts timeouts, + boolean autoCreate, ServerSideConfiguration serverConfig) { + this(servers, clusterTierManager, timeouts, autoCreate, serverConfig, new Properties()); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param clusterUri the non-{@code null} URI identifying the cluster server + * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations + * @param autoCreate {@code true} if server components should be auto created + * @param serverConfig the server side entity configuration required + * @param properties the non-{@code null} connection Properties + * + * @throws NullPointerException if {@code clusterUri} is {@code null} + * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(URI)} + */ + @Deprecated + public ClusteringServiceConfiguration(URI clusterUri, Timeouts timeouts, boolean autoCreate, ServerSideConfiguration serverConfig, Properties properties) { + this(new ConnectionSource.ClusterUri(clusterUri), timeouts, autoCreate, serverConfig, properties); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param servers the non-{@code null} iterable of servers in the cluster + * @param clusterTierManager the non-{@code null} cluster tier manager identifier + * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations + * @param autoCreate {@code true} if server components should be auto created + * @param serverConfig the server side entity configuration required + * @param properties the non-{@code null} connection Properties + * + * @throws NullPointerException if {@code servers} is {@code null} + * @deprecated In favor of {@link ClusteringServiceConfigurationBuilder#cluster(Iterable, String)} + */ + @Deprecated + public ClusteringServiceConfiguration(Iterable servers, String clusterTierManager, Timeouts timeouts, + boolean autoCreate, ServerSideConfiguration serverConfig, Properties properties) { + this(new ConnectionSource.ServerList(servers, clusterTierManager), timeouts, autoCreate, serverConfig, properties); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param connectionSource the non-{@code null} {@code ConnectionSource} identifying the source of connection to servers in the cluster + * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations + * @param autoCreate {@code true} if server components should be auto created + * @param serverSideConfiguration the server side entity configuration required + * @param properties the non-{@code null} connection Properties + * + * @throws NullPointerException if {@code servers} is {@code null} + * @deprecated In favor of {@link #ClusteringServiceConfiguration(ConnectionSource, Timeouts, ClientMode, ServerSideConfiguration, Properties)} )} + */ + @Deprecated + public ClusteringServiceConfiguration(ConnectionSource connectionSource, Timeouts timeouts, boolean autoCreate, + ServerSideConfiguration serverSideConfiguration, Properties properties) { + this(connectionSource, timeouts, + autoCreate ? ClientMode.AUTO_CREATE : (serverSideConfiguration == null ? ClientMode.CONNECT : ClientMode.EXPECTING), + serverSideConfiguration, properties); + } + + /** + * Creates a {@code ClusteringServiceConfiguration} from the properties provided. + * + * @param connectionSource the non-{@code null} {@code ConnectionSource} identifying the source of connection to servers in the cluster + * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations + * @param clientMode behavioral mode when connecting to the cluster + * @param serverSideConfiguration the server side entity configuration required + * @param properties the non-{@code null} connection Properties + */ + public ClusteringServiceConfiguration(ConnectionSource connectionSource, Timeouts timeouts, ClientMode clientMode, + ServerSideConfiguration serverSideConfiguration, Properties properties) { + this.connectionSource = requireNonNull(connectionSource); + this.clientMode = requireNonNull(clientMode); + this.serverConfiguration = serverSideConfiguration; + this.timeouts = requireNonNull(timeouts, "Operation timeouts cannot be null"); + this.properties = (Properties) requireNonNull(properties, "Properties cannot be null").clone(); + } + + protected ClusteringServiceConfiguration(ClusteringServiceConfiguration baseConfig) { + Objects.requireNonNull(baseConfig, "Base configuration cannot be null"); + this.connectionSource = baseConfig.getConnectionSource(); + this.timeouts = baseConfig.getTimeouts(); + this.clientMode = baseConfig.getClientMode(); + this.serverConfiguration = baseConfig.getServerConfiguration(); + this.properties = baseConfig.getProperties(); + } + + /** + * The {@code URI} of the cluster that will be connected to. + * + * @return the cluster {@code URI} + */ + public URI getClusterUri() { + return connectionSource.getClusterUri(); + } + + /** + * The {@code ConnectionSource} of the cluster, containing either a {@code URI}, or an {@code Iterable} + * of the servers in the cluster. + * + * @return a cluster {@code ConnectionSource} + */ + public ConnectionSource getConnectionSource() { + return connectionSource; + } + + /** + * Returns {@code true} is server side components should be automatically created. + * + * @return {@code true} is auto-create is enabled + * @deprecated Deprecated in favor of {@link #getClientMode()} + */ + @Deprecated + public boolean isAutoCreate() { + final ClientMode clientMode = getClientMode(); + return ClientMode.AUTO_CREATE.equals(clientMode) || ClientMode.AUTO_CREATE_ON_RECONNECT.equals(clientMode); + } + + /** + * Returns the client connection mode. + * + * @return the client mode + */ + public ClientMode getClientMode() { + return clientMode; + } + + /** + * The default server resource to use for caches and pools, or {@code null} if one is not defined. + * + * @return the default server resource + */ + public ServerSideConfiguration getServerConfiguration() { + return serverConfiguration; + } + + /** + * The timeouts for all cache operations + * + * @return the cache timeouts + */ + public Timeouts getTimeouts() { + return timeouts; + } + + /** + * The {@code Properties} for the connection. + * + * @return the connection {@code Properties} + */ + public Properties getProperties() { + return (Properties) properties.clone(); + } + + /** + * The timeout for cache read operations. + * + * @return the cache read operation timeout + * + * @deprecated Use {@link #getTimeouts()} + */ + @Deprecated + public Duration getReadOperationTimeout() { + return timeouts.getReadOperationTimeout(); + } + + @Override + public Class getServiceType() { + return ClusteringService.class; + } + + @SuppressWarnings("unchecked") + @Override + public CacheManagerBuilder builder(CacheManagerBuilder other) { + return (CacheManagerBuilder) other.using(this); // unchecked + } + + @Override + public String readableString() { + return this.getClass().getName() + ":\n " + + getConnectionSource() + "\n " + + "timeouts: " + getTimeouts()+ "\n " + + "clientMode: " + getClientMode() + "\n " + + "defaultServerResource: " + (serverConfiguration == null ? null : serverConfiguration.getDefaultServerResource()) + "\n " + + readablePoolsString(); + } + + private String readablePoolsString() { + StringBuilder pools = new StringBuilder("resourcePools:\n"); + if (serverConfiguration != null) { + serverConfiguration.getResourcePools().forEach((key, value) -> { + pools.append(" "); + pools.append(key); + pools.append(": "); + pools.append(value); + pools.append("\n"); + }); + } else { + pools.append(" None."); + } + return pools.toString(); + } + + @Override + public ClusteringServiceConfigurationBuilder derive() { + return seededFrom(this); + } + + @Override + public ClusteringServiceConfiguration build(ClusteringServiceConfigurationBuilder representation) { + return representation.build(); + } +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/DedicatedClusteredResourcePool.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/DedicatedClusteredResourcePool.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/config/DedicatedClusteredResourcePool.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/DedicatedClusteredResourcePool.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/SharedClusteredResourcePool.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/SharedClusteredResourcePool.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/config/SharedClusteredResourcePool.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/SharedClusteredResourcePool.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/Timeouts.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/Timeouts.java similarity index 95% rename from clustered/client/src/main/java/org/ehcache/clustered/client/config/Timeouts.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/Timeouts.java index 44ba00bc09..10d755c5de 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/config/Timeouts.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/Timeouts.java @@ -20,7 +20,6 @@ import java.time.Duration; -import java.time.temporal.ChronoUnit; import java.util.function.LongSupplier; /** @@ -30,8 +29,9 @@ public final class Timeouts { public static final Duration DEFAULT_OPERATION_TIMEOUT = Duration.ofSeconds(5); + public static final Duration DEFAULT_CONNECTION_TIMEOUT = Duration.ofSeconds(150); public static final Duration INFINITE_TIMEOUT = Duration.ofNanos(Long.MAX_VALUE); - public static final Timeouts DEFAULT = new Timeouts(DEFAULT_OPERATION_TIMEOUT, DEFAULT_OPERATION_TIMEOUT, INFINITE_TIMEOUT); + public static final Timeouts DEFAULT = new Timeouts(DEFAULT_OPERATION_TIMEOUT, DEFAULT_OPERATION_TIMEOUT, DEFAULT_CONNECTION_TIMEOUT); private final Duration readOperationTimeout; private final Duration writeOperationTimeout; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredResourcePoolBuilder.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredResourcePoolBuilder.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredResourcePoolBuilder.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredResourcePoolBuilder.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredStoreConfigurationBuilder.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredStoreConfigurationBuilder.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredStoreConfigurationBuilder.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredStoreConfigurationBuilder.java diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteringServiceConfigurationBuilder.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteringServiceConfigurationBuilder.java new file mode 100644 index 0000000000..3c0e3722d1 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteringServiceConfigurationBuilder.java @@ -0,0 +1,275 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.config.builders; + +import java.net.InetSocketAddress; +import java.net.URI; + +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; + +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.Objects; +import java.util.Properties; +import java.util.concurrent.TimeUnit; +import java.util.function.UnaryOperator; + +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration.ClientMode; +import org.ehcache.clustered.client.config.Timeouts; +import org.ehcache.clustered.client.internal.ConnectionSource; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.config.Builder; + +import static org.ehcache.clustered.client.config.ClusteringServiceConfiguration.DEFAULT_CLIENT_MODE; + +/** + * A builder of ClusteringService configurations. + */ +public final class ClusteringServiceConfigurationBuilder implements Builder { + + private final ConnectionSource connectionSource; + private final Timeouts timeouts; + private final ClientMode clientMode; + private final ServerSideConfigurationBuilder serverSideConfiguration; + private final Properties properties; + + /** + * Creates a new builder connecting to the given cluster. + * + * @param clusterUri cluster URI + * + * @return a clustering service configuration builder + */ + public static ClusteringServiceConfigurationBuilder cluster(URI clusterUri) { + return new ClusteringServiceConfigurationBuilder(new ConnectionSource.ClusterUri(clusterUri), TimeoutsBuilder.timeouts().build(), DEFAULT_CLIENT_MODE, null, new Properties()); + } + + /** + * Creates a new builder connecting to the given cluster. + * + * @param servers the non-{@code null} iterable of servers in the cluster + * @param clusterTierManager the non-{@code null} cluster tier manager identifier + * + * @return a clustering service configuration builder + */ + public static ClusteringServiceConfigurationBuilder cluster(Iterable servers, String clusterTierManager) { + return new ClusteringServiceConfigurationBuilder(new ConnectionSource.ServerList(servers, clusterTierManager), TimeoutsBuilder.timeouts().build(), DEFAULT_CLIENT_MODE, null, new Properties()); + } + + /** + * Creates a new builder seeded from an existing configuration. + * + * @param configuration existing clustering configuration + * @return a clustering service configuration builder + */ + public static ClusteringServiceConfigurationBuilder seededFrom(ClusteringServiceConfiguration configuration) { + + ServerSideConfiguration serverSideConfiguration = configuration.getServerConfiguration(); + if (serverSideConfiguration == null) { + return new ClusteringServiceConfigurationBuilder(configuration.getConnectionSource(), configuration.getTimeouts(), + configuration.getClientMode(), null, configuration.getProperties()); + } else { + return new ClusteringServiceConfigurationBuilder(configuration.getConnectionSource(), configuration.getTimeouts(), + configuration.getClientMode(), new ServerSideConfigurationBuilder(serverSideConfiguration), configuration.getProperties()); + } + } + + private ClusteringServiceConfigurationBuilder(ConnectionSource connectionSource, Timeouts timeouts, ClientMode clientMode, ServerSideConfigurationBuilder serverSideConfiguration, Properties properties) { + this.connectionSource = connectionSource; + this.timeouts = Objects.requireNonNull(timeouts, "Timeouts can't be null"); + this.clientMode = clientMode; + this.serverSideConfiguration = serverSideConfiguration; + this.properties = properties; + } + + /** + * Reconfigure to connect to a different URI. + * + * @return a clustering service configuration builder + */ + public ClusteringServiceConfigurationBuilder usingUri(URI clusterUri) { + return new ClusteringServiceConfigurationBuilder(new ConnectionSource.ClusterUri(clusterUri), timeouts, clientMode, serverSideConfiguration, properties); + } + + /** + * Reconfigure to connect to a different cluster. + * + * @return a clustering service configuration builder + */ + public ClusteringServiceConfigurationBuilder usingServers(Iterable servers) { + return new ClusteringServiceConfigurationBuilder(new ConnectionSource.ServerList(servers, connectionSource.getClusterTierManager()), timeouts, clientMode, serverSideConfiguration, properties); + } + + /** + * Reconfigure to connect to a different cluster and manager name. + * + * @return a clustering service configuration builder + */ + public ClusteringServiceConfigurationBuilder usingServers(Iterable servers, String clusterTierManager) { + return new ClusteringServiceConfigurationBuilder(new ConnectionSource.ServerList(servers, clusterTierManager), timeouts, clientMode, serverSideConfiguration, properties); + } + + /** + * Support connection to an existing entity or create if the entity if absent. + * + * @return a clustering service configuration builder + * @deprecated in favor of {@link ClusteringServiceConfigurationBuilder#autoCreate(UnaryOperator)} + */ + @Deprecated + public ServerSideConfigurationBuilder autoCreate() { + return new ServerSideConfigurationBuilder(new ClusteringServiceConfigurationBuilder(this.connectionSource, this.timeouts, ClientMode.AUTO_CREATE, serverSideConfiguration, properties)); + } + + /** + * Only support connection to an existing entity. + * + * @return a clustering service configuration builder + * @deprecated in favor of {@link ClusteringServiceConfigurationBuilder#expecting(UnaryOperator)} + */ + @Deprecated + public ServerSideConfigurationBuilder expecting() { + return new ServerSideConfigurationBuilder(new ClusteringServiceConfigurationBuilder(this.connectionSource, this.timeouts, ClientMode.EXPECTING, serverSideConfiguration, properties)); + } + + /** + * Support connection to an existing entity or create if the entity if absent. + *

+ * An empty server-side configuration can be created by performing no operations on the supplied builder: + * {@code builder.autoCreate(b -> b)} + * + * @return a clustering service configuration builder + */ + public ClusteringServiceConfigurationBuilder autoCreate(UnaryOperator serverSideConfig) { + return new ClusteringServiceConfigurationBuilder(this.connectionSource, this.timeouts, ClientMode.AUTO_CREATE, + serverSideConfig.apply(new ServerSideConfigurationBuilder()), properties); + } + + /** + * Support connection to an existing entity or create if the entity if absent on initial connection or any subsequent reconnect attempt. + *

+ * An empty server-side configuration can be created by performing no operations on the supplied builder: + * {@code builder.autoCreateOnReconnect(b -> b)} + * + * @return a clustering service configuration builder + */ + public ClusteringServiceConfigurationBuilder autoCreateOnReconnect(UnaryOperator serverSideConfig) { + return new ClusteringServiceConfigurationBuilder(this.connectionSource, this.timeouts, ClientMode.AUTO_CREATE_ON_RECONNECT, + serverSideConfig.apply(new ServerSideConfigurationBuilder()), properties); + } + + /** + * Only support connection to an existing entity. + *

+ * An empty server-side configuration can be requested by performing no operations on the supplied builder: + * {@code builder.expecting(b -> b)} + * + * @return a clustering service configuration builder + */ + public ClusteringServiceConfigurationBuilder expecting(UnaryOperator serverSideConfig) { + return new ClusteringServiceConfigurationBuilder(this.connectionSource, this.timeouts, ClientMode.EXPECTING, + serverSideConfig.apply(new ServerSideConfigurationBuilder()), properties); + } + + /** + * Adds timeouts. + * Read operations which time out return a result comparable to a cache miss. + * Write operations which time out won't do anything. + * Lifecycle operations which time out will fail with exception + * + * @param timeouts the amount of time permitted for all operations + * + * @return a clustering service configuration builder + * + * @throws NullPointerException if {@code timeouts} is {@code null} + */ + public ClusteringServiceConfigurationBuilder timeouts(Timeouts timeouts) { + return new ClusteringServiceConfigurationBuilder(connectionSource, timeouts, clientMode, serverSideConfiguration, properties); + } + + /** + * Adds timeouts. + * Read operations which time out return a result comparable to a cache miss. + * Write operations which time out won't do anything. + * Lifecycle operations which time out will fail with exception + * + * @param timeoutsBuilder the builder for amount of time permitted for all operations + * + * @return a clustering service configuration builder + * + * @throws NullPointerException if {@code timeouts} is {@code null} + */ + public ClusteringServiceConfigurationBuilder timeouts(Builder timeoutsBuilder) { + return new ClusteringServiceConfigurationBuilder(connectionSource, timeoutsBuilder.build(), clientMode, serverSideConfiguration, properties); + } + + /** + * Adds a read operation timeout. Read operations which time out return a result comparable to + * a cache miss. + * + * @param duration the amount of time permitted for read operations + * @param unit the time units for {@code duration} + * + * @return a clustering service configuration builder + * + * @throws NullPointerException if {@code unit} is {@code null} + * @throws IllegalArgumentException if {@code amount} is negative + * + * @deprecated Use {@link #timeouts(Timeouts)}. Note that calling this method will override any timeouts previously set + * by setting the read operation timeout to the specified value and everything else to its default. + */ + @Deprecated + public ClusteringServiceConfigurationBuilder readOperationTimeout(long duration, TimeUnit unit) { + Duration readTimeout = Duration.of(duration, toChronoUnit(unit)); + return timeouts(TimeoutsBuilder.timeouts().read(readTimeout).build()); + } + + @Override + public ClusteringServiceConfiguration build() { + if (serverSideConfiguration == null) { + return build(null); + } else { + return build(serverSideConfiguration.buildServerSideConfiguration()); + } + } + + /** + * Internal method to build a new {@link ClusteringServiceConfiguration} from the {@link ServerSideConfigurationBuilder}. + * + * @param serverSideConfiguration the {@code ServerSideConfiguration} to use + * + * @return a new {@code ClusteringServiceConfiguration} instance built from {@code this} + * {@code ClusteringServiceConfigurationBuilder} and the {@code serverSideConfiguration} provided + */ + ClusteringServiceConfiguration build(ServerSideConfiguration serverSideConfiguration) { + return new ClusteringServiceConfiguration(connectionSource, timeouts, clientMode, serverSideConfiguration, properties); + } + + private static ChronoUnit toChronoUnit(TimeUnit unit) { + if(unit == null) { + return null; + } + switch (unit) { + case NANOSECONDS: return ChronoUnit.NANOS; + case MICROSECONDS: return ChronoUnit.MICROS; + case MILLISECONDS: return ChronoUnit.MILLIS; + case SECONDS: return ChronoUnit.SECONDS; + case MINUTES: return ChronoUnit.MINUTES; + case HOURS: return ChronoUnit.HOURS; + case DAYS: return ChronoUnit.DAYS; + default: throw new AssertionError("Unknown unit: " + unit); + } + } +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ServerSideConfigurationBuilder.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/builders/ServerSideConfigurationBuilder.java similarity index 90% rename from clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ServerSideConfigurationBuilder.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/builders/ServerSideConfigurationBuilder.java index e956aec4fb..5b54784588 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ServerSideConfigurationBuilder.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/config/builders/ServerSideConfigurationBuilder.java @@ -39,6 +39,12 @@ public class ServerSideConfigurationBuilder implements Builder pools; + ServerSideConfigurationBuilder() { + this.clientSideBuilder = null; + this.defaultServerResource = null; + this.pools = emptyMap(); + } + ServerSideConfigurationBuilder(ClusteringServiceConfigurationBuilder clientSideBuilder) { if (clientSideBuilder == null) { throw new NullPointerException("clientSideBuilder can not be null"); @@ -48,6 +54,12 @@ public class ServerSideConfigurationBuilder implements Builder maintenanceHolds = new ConcurrentHashMap<>(); + private final Map fetchHolds = new ConcurrentHashMap<>(); + private final Executor asyncWorker; private final Timeouts entityTimeouts; - public ClusterTierManagerClientEntityFactory(Connection connection) { - this(connection, TimeoutsBuilder.timeouts().build()); + public ClusterTierManagerClientEntityFactory(Connection connection, Executor asyncWorker) { + this(connection, asyncWorker, TimeoutsBuilder.timeouts().build()); } - public ClusterTierManagerClientEntityFactory(Connection connection, Timeouts entityTimeouts) { + public ClusterTierManagerClientEntityFactory(Connection connection, Executor asyncWorker, Timeouts entityTimeouts) { this.connection = connection; + this.asyncWorker = requireNonNull(asyncWorker); this.entityTimeouts = entityTimeouts; } @@ -79,13 +86,30 @@ public boolean acquireLeadership(String entityIdentifier) { } } - public void abandonLeadership(String entityIdentifier) { + public boolean abandonAllHolds(String entityIdentifier, boolean healthyConnection) { + return abandonLeadership(entityIdentifier, healthyConnection) | abandonFetchHolds(entityIdentifier, healthyConnection); + } + + /** + * Proactively abandon leadership before closing connection. + * + * @param entityIdentifier the master entity identifier + * @return true of abandoned false otherwise + */ + public boolean abandonLeadership(String entityIdentifier, boolean healthyConnection) { Hold hold = maintenanceHolds.remove(entityIdentifier); - if (hold == null) { - throw new IllegalMonitorStateException("Leadership was never held"); - } else { - hold.unlock(); - } + return (hold != null) && healthyConnection && silentlyUnlock(hold, entityIdentifier); + } + + /** + * Proactively abandon any READ holds before closing connection. + * + * @param entityIdentifier the master entity identifier + * @return true of abandoned false otherwise + */ + private boolean abandonFetchHolds(String entityIdentifier, boolean healthyConnection) { + Hold hold = fetchHolds.remove(entityIdentifier); + return (hold != null) && healthyConnection && silentlyUnlock(hold, entityIdentifier); } /** @@ -109,7 +133,7 @@ public void create(final String identifier, final ServerSideConfiguration config throw new EntityBusyException("Unable to obtain maintenance lease for " + identifier); } - EntityRef ref = getEntityRef(identifier); + EntityRef ref = getEntityRef(identifier); try { ref.create(new ClusterTierManagerConfiguration(identifier, config)); } catch (EntityConfigurationException e) { @@ -162,6 +186,9 @@ public ClusterTierManagerClientEntity retrieve(String identifier, ServerSideConf if (!validated) { silentlyClose(entity, identifier); silentlyUnlock(fetchHold, identifier); + } else { + // track read holds as well so that we can explicitly abandon + fetchHolds.put(identifier, fetchHold); } } } @@ -174,7 +201,7 @@ public void destroy(final String identifier) throws EntityBusyException { throw new EntityBusyException("Unable to obtain maintenance lease for " + identifier); } - EntityRef ref = getEntityRef(identifier); + EntityRef ref = getEntityRef(identifier); destroyAllClusterTiers(ref, identifier); try { if (!ref.destroy()) { @@ -192,7 +219,8 @@ public void destroy(final String identifier) throws EntityBusyException { } } - private void destroyAllClusterTiers(EntityRef ref, String identifier) { + private void destroyAllClusterTiers(EntityRef ref, String identifier) { ClusterTierManagerClientEntity entity; try { entity = ref.fetchEntity(null); @@ -224,11 +252,13 @@ private void silentlyClose(ClusterTierManagerClientEntity entity, String identif } } - private void silentlyUnlock(Hold localMaintenance, String identifier) { + private boolean silentlyUnlock(Hold localMaintenance, String identifier) { try { localMaintenance.unlock(); + return true; } catch(Exception e) { LOGGER.error("Failed to unlock for id {}", identifier, e); + return false; } } @@ -236,7 +266,7 @@ private VoltronReadWriteLock createAccessLockFor(String entityIdentifier) { return new VoltronReadWriteLock(connection, "ClusterTierManagerClientEntityFactory-AccessLock-" + entityIdentifier); } - private EntityRef getEntityRef(String identifier) { + private EntityRef getEntityRef(String identifier) { try { return connection.getEntityRef(ClusterTierManagerClientEntity.class, ENTITY_VERSION, identifier); } catch (EntityNotProvidedException e) { @@ -247,30 +277,27 @@ private EntityRef entityRef; + ClientMode clientMode, boolean isReconnect) throws EntityNotFoundException, CachePersistenceException { + EntityRef entityRef; try { entityRef = connection.getEntityRef(InternalClusterTierClientEntity.class, ENTITY_VERSION, entityName(clusterTierManagerIdentifier, storeIdentifier)); } catch (EntityNotProvidedException e) { throw new AssertionError(e); } - if (autoCreate) { + if ((ClientMode.AUTO_CREATE.equals(clientMode) && !isReconnect) || ClientMode.AUTO_CREATE_ON_RECONNECT.equals(clientMode)) { while (true) { try { entityRef.create(new ClusterTierEntityConfiguration(clusterTierManagerIdentifier, storeIdentifier, clientStoreConfiguration)); } catch (EntityAlreadyExistsException e) { // Ignore - entity exists } catch (EntityConfigurationException e) { - throw new CachePersistenceException("Unable to create cluster tier", e); + throw new PerpetualCachePersistenceException("Unable to create cluster tier", e); } catch (EntityException e) { throw new AssertionError(e); } try { - InternalClusterTierClientEntity entity = entityRef.fetchEntity(null); - entity.setStoreIdentifier(storeIdentifier); - entity.setTimeouts(entityTimeouts); - return entity; + return entityRef.fetchEntity(new ClusterTierUserData(entityTimeouts, storeIdentifier, asyncWorker)); } catch (EntityNotFoundException e) { // Ignore - will try to create again } catch (EntityException e) { @@ -278,16 +305,19 @@ public ClusterTierClientEntity fetchOrCreateClusteredStoreEntity(String clusterT } } } else { - try { - InternalClusterTierClientEntity entity = entityRef.fetchEntity(null); - entity.setStoreIdentifier(storeIdentifier); - entity.setTimeouts(entityTimeouts); - return entity; - } catch (EntityNotFoundException e) { - throw e; - } catch (EntityException e) { - throw new AssertionError(e); - } + return fetchClusterTierClientEntity(storeIdentifier, entityRef); + } + } + + private ClusterTierClientEntity fetchClusterTierClientEntity(String storeIdentifier, + EntityRef entityRef) + throws EntityNotFoundException { + try { + return entityRef.fetchEntity(new ClusterTierUserData(entityTimeouts, storeIdentifier, asyncWorker)); + } catch (EntityNotFoundException e) { + throw e; + } catch (EntityException e) { + throw new AssertionError(e); } } @@ -306,4 +336,9 @@ public void destroyClusteredStoreEntity(String clusterTierManagerIdentifier, Str private static String entityName(String clusterTierManagerIdentifier, String storeIdentifier) { return clusterTierManagerIdentifier + "$" + storeIdentifier; } + + // For test purposes + public Map getMaintenanceHolds() { + return maintenanceHolds; + } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerClientEntityService.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerClientEntityService.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerClientEntityService.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerClientEntityService.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerCreationException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerCreationException.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerCreationException.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerCreationException.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerNotFoundException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerNotFoundException.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerNotFoundException.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerNotFoundException.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerValidationException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerValidationException.java similarity index 85% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerValidationException.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerValidationException.java index ab91bb5cc7..e0aac0f71d 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerValidationException.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/ClusterTierManagerValidationException.java @@ -19,7 +19,9 @@ /** * Thrown to indicate a failure in validating an {@code Entity} supporting clustered operations. */ -public class ClusterTierManagerValidationException extends RuntimeException { +public class ClusterTierManagerValidationException extends PerpetualCachePersistenceException { + + private static final long serialVersionUID = -428725072152588216L; public ClusterTierManagerValidationException(String message) { super(message); @@ -28,8 +30,4 @@ public ClusterTierManagerValidationException(String message) { public ClusterTierManagerValidationException(String message, Throwable cause) { super(message, cause); } - - public ClusterTierManagerValidationException(Throwable cause) { - super(cause); - } } diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/ConnectionSource.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/ConnectionSource.java new file mode 100644 index 0000000000..1b5bafe547 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/ConnectionSource.java @@ -0,0 +1,190 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal; + +import org.terracotta.connection.ConnectionException; +import org.terracotta.connection.entity.Entity; +import org.terracotta.connection.entity.EntityRef; +import org.terracotta.dynamic_config.api.model.Cluster; +import org.terracotta.dynamic_config.api.model.Node; +import org.terracotta.dynamic_config.api.model.UID; +import org.terracotta.dynamic_config.entity.topology.client.DynamicTopologyEntity; +import org.terracotta.dynamic_config.entity.topology.common.DynamicTopologyEntityConstants; +import org.terracotta.exception.EntityNotFoundException; +import org.terracotta.exception.EntityNotProvidedException; +import org.terracotta.exception.EntityVersionMismatchException; +import org.terracotta.lease.connection.LeasedConnection; +import org.terracotta.lease.connection.LeasedConnectionFactory; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Properties; +import java.util.concurrent.CopyOnWriteArraySet; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +public abstract class ConnectionSource { + + public abstract String getClusterTierManager(); + + public abstract LeasedConnection connect(Properties connectionProperties) throws ConnectionException; + + public abstract URI getClusterUri(); + + public static class ClusterUri extends ConnectionSource { + + private final URI clusterUri; + private final String clusterTierManager; + + public ClusterUri(URI clusterUri) { + this.clusterUri = Objects.requireNonNull(clusterUri, "Cluster URI cannot be null"); + this.clusterTierManager = extractCacheManager(clusterUri); + } + + @Override + public String getClusterTierManager() { + return clusterTierManager; + } + + @Override + public LeasedConnection connect(Properties connectionProperties) throws ConnectionException { + return LeasedConnectionFactory.connect(extractClusterUri(clusterUri), connectionProperties); + } + + @Override + public URI getClusterUri() { + return clusterUri; + } + + @Override + public String toString() { + return "clusterUri: " + clusterUri; + } + + private static String extractCacheManager(URI uri) { + URI baseUri = extractClusterUri(uri); + return baseUri.relativize(uri).getPath(); + } + + private static URI extractClusterUri(URI uri) { + try { + return new URI(uri.getScheme(), uri.getAuthority(), null, null, null); + } catch (URISyntaxException e) { + throw new AssertionError(e); + } + } + } + + public static class ServerList extends ConnectionSource { + + private final CopyOnWriteArraySet servers; + private final String clusterTierManager; + + public ServerList(Iterable servers, String clusterTierManager) { + this.servers = createServerList(servers); + this.clusterTierManager = Objects.requireNonNull(clusterTierManager, "Cluster tier manager identifier cannot be null"); + } + + private CopyOnWriteArraySet createServerList(Iterable servers) { + Objects.requireNonNull(servers, "Servers cannot be null"); + CopyOnWriteArraySet serverList = new CopyOnWriteArraySet<>(); + servers.forEach(server -> serverList.add(server)); + return serverList; + } + + @Override + public String getClusterTierManager() { + return clusterTierManager; + } + + @Override + public LeasedConnection connect(Properties connectionProperties) throws ConnectionException { + LeasedConnection connection = LeasedConnectionFactory.connect(servers, connectionProperties); + try { + EntityRef ref = connection.getEntityRef(DynamicTopologyEntity.class, 1, DynamicTopologyEntityConstants.ENTITY_NAME); + DynamicTopologyEntity dynamicTopologyEntity = ref.fetchEntity(null); + dynamicTopologyEntity.setListener(new DynamicTopologyEntity.Listener() { + @Override + public void onNodeRemoval(Cluster cluster, UID stripeUID, Node removedNode) { + removedNode.getEndpoints().forEach(e -> servers.remove(e.getAddress())); + } + + @Override + public void onNodeAddition(Cluster cluster, UID addedNodeUID) { + servers.add(cluster.determineEndpoint(addedNodeUID, servers).get().getAddress()); + } + }); + return new LeasedConnection() { + @Override + public EntityRef getEntityRef(Class cls, long version, String name) throws EntityNotProvidedException { + return connection.getEntityRef(cls, version, name); + } + + @Override + public void close() throws IOException { + Future close = dynamicTopologyEntity.releaseEntity(); + try { + close.get(10, TimeUnit.SECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } catch (ExecutionException e) { + throw new IOException(e.getCause()); + } catch (TimeoutException e) { + } finally { + connection.close(); + } + } + + @Override + public boolean isValid() { + return connection.isValid(); + } + + + }; + } catch (EntityNotProvidedException | EntityVersionMismatchException | EntityNotFoundException e) { + throw new AssertionError(e); + } + } + + @Override + public URI getClusterUri() { + throw new IllegalStateException("Cannot use getClusterUri() on ConnectionSource.ServerList. Use getServers() instead."); + } + + public Iterable getServers() { + return cloneServers(servers); + } + + @Override + public String toString() { + return "servers: " + getServers() + " [cache-manager: " + getClusterTierManager() + "]"; + } + + private List cloneServers(Iterable servers) { + List socketAddresses = new ArrayList<>(); + servers.forEach(socketAddresses::add); + return socketAddresses; + } + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/PerpetualCachePersistenceException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/PerpetualCachePersistenceException.java new file mode 100644 index 0000000000..fbf83a626c --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/PerpetualCachePersistenceException.java @@ -0,0 +1,51 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal; + +import org.ehcache.CachePersistenceException; + +/** + * Thrown to indicate a perpetual (non-transient) failure in a persistent cache manager. + *

+ * Receiving this exception indicates that future interactions with the throwing entity will continue to fail without + * corrective action. + * + * @see CachePersistenceException + */ +public class PerpetualCachePersistenceException extends CachePersistenceException { + + private static final long serialVersionUID = -5858875151420107041L; + + /** + * Creates a {@code PerpetualCachePersistenceException} with the provided message. + * + * @param message information about the exception + */ + public PerpetualCachePersistenceException(String message) { + super(message); + } + + /** + * Creates a {@code PerpetualCachePersistenceException} with the provided message and cause. + * + * @param message information about the exception + * @param cause the cause of this exception + */ + public PerpetualCachePersistenceException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/SimpleClusterTierManagerClientEntity.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/SimpleClusterTierManagerClientEntity.java similarity index 87% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/SimpleClusterTierManagerClientEntity.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/SimpleClusterTierManagerClientEntity.java index 9dfadeaef6..ae3f42e1b7 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/SimpleClusterTierManagerClientEntity.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/SimpleClusterTierManagerClientEntity.java @@ -24,7 +24,10 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.PrepareForDestroy; import org.ehcache.clustered.common.internal.messages.EhcacheResponseType; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.terracotta.connection.entity.Entity; +import org.terracotta.entity.EndpointDelegate; import org.terracotta.entity.EntityClientEndpoint; import org.terracotta.entity.EntityResponse; import org.terracotta.entity.InvokeFuture; @@ -40,12 +43,30 @@ */ public class SimpleClusterTierManagerClientEntity implements ClusterTierManagerClientEntity { + private static final Logger LOGGER = LoggerFactory.getLogger(SimpleClusterTierManagerClientEntity.class); + private final EntityClientEndpoint endpoint; private final LifeCycleMessageFactory messageFactory; public SimpleClusterTierManagerClientEntity(EntityClientEndpoint endpoint) { this.endpoint = endpoint; this.messageFactory = new LifeCycleMessageFactory(); + endpoint.setDelegate(new EndpointDelegate() { + @Override + public void handleMessage(EhcacheEntityResponse messageFromServer) { + + } + + @Override + public byte[] createExtendedReconnectData() { + return new byte[0]; + } + + @Override + public void didDisconnectUnexpectedly() { + LOGGER.info("CacheManager got disconnected from server"); + } + }); } @Override diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java similarity index 87% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java index 9d1da5223a..35bab3ea89 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java @@ -20,7 +20,7 @@ import org.ehcache.clustered.client.config.ClusteredResourcePool; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.config.ResourcePool; -import org.ehcache.core.config.AbstractResourcePool; +import org.ehcache.impl.config.AbstractResourcePool; /** * Implementation for {@link ClusteredResourcePool}. @@ -38,16 +38,6 @@ public PoolAllocation getPoolAllocation() { return new PoolAllocation.Unknown(); } - @Override - public ClusteredResourceType getType() { - return super.getType(); - } - - @Override - public boolean isPersistent() { - return super.isPersistent(); - } - @Override public void validateUpdate(ResourcePool newPool) { throw new UnsupportedOperationException("Updating CLUSTERED resource is not supported"); diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java similarity index 98% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java index dd5eed3d30..b45f234bc6 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java @@ -21,7 +21,7 @@ import org.ehcache.config.ResourcePool; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.HumanReadable; -import org.ehcache.core.config.SizedResourcePoolImpl; +import org.ehcache.impl.config.SizedResourcePoolImpl; import org.ehcache.clustered.client.config.DedicatedClusteredResourcePool; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java similarity index 93% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java index 2b312545b3..338bd2046a 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java @@ -20,7 +20,7 @@ import org.ehcache.clustered.client.config.SharedClusteredResourcePool; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.config.ResourcePool; -import org.ehcache.core.config.AbstractResourcePool; +import org.ehcache.impl.config.AbstractResourcePool; /** * Implementation for {@link SharedClusteredResourcePool}. @@ -46,11 +46,6 @@ public SharedClusteredResourcePoolImpl(final String sharedResourcePool) { this.sharedResourcePool = sharedResourcePool; } - @Override - public ClusteredResourceType getType() { - return super.getType(); - } - @Override public String getSharedResourcePool() { return this.sharedResourcePool; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredCacheConstants.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredCacheConstants.java similarity index 95% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredCacheConstants.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredCacheConstants.java index 6a5be3fb4e..881159e4b2 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredCacheConstants.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredCacheConstants.java @@ -34,4 +34,5 @@ final class ClusteredCacheConstants { * Namespace for cluster configuration elements. Must match {@code targetNamespace} in {@value #XSD}. */ static final URI NAMESPACE = URI.create("http://www.ehcache.org/v3/clustered"); + static final String TC_CLUSTERED_NAMESPACE_PREFIX = "tc:"; } diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredResourceConfigurationParser.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredResourceConfigurationParser.java new file mode 100644 index 0000000000..639b05ef58 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredResourceConfigurationParser.java @@ -0,0 +1,156 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.config.xml; + +import org.ehcache.clustered.client.internal.config.ClusteredResourcePoolImpl; +import org.ehcache.clustered.client.internal.config.DedicatedClusteredResourcePoolImpl; +import org.ehcache.clustered.client.internal.config.SharedClusteredResourcePoolImpl; +import org.ehcache.config.ResourcePool; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.xml.BaseConfigParser; +import org.ehcache.xml.CacheResourceConfigurationParser; +import org.ehcache.xml.JaxbParsers; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.osgi.service.component.annotations.Component; +import org.w3c.dom.Attr; +import org.w3c.dom.DOMException; +import org.w3c.dom.Document; +import org.w3c.dom.Element; + +import java.io.IOException; +import java.net.URI; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import javax.xml.transform.Source; +import javax.xml.transform.stream.StreamSource; + +import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.NAMESPACE; +import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.XML_SCHEMA; +import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.TC_CLUSTERED_NAMESPACE_PREFIX; + +/** + * Provides a parser for the {@code /config/cache/resources} extension elements. + */ +@Component +public class ClusteredResourceConfigurationParser extends BaseConfigParser implements CacheResourceConfigurationParser { + + private static final String CLUSTERED_ELEMENT_NAME = "clustered"; + private static final String DEDICATED_ELEMENT_NAME = "clustered-dedicated"; + private static final String SHARED_ELEMENT_NAME = "clustered-shared"; + private static final String FROM_ELEMENT_NAME = "from"; + private static final String UNIT_ELEMENT_NAME = "unit"; + private static final String SHARING_ELEMENT_NAME = "sharing"; + + public ClusteredResourceConfigurationParser() { + super(ResourcePool.class); + } + + @Override + public Source getXmlSchema() throws IOException { + return new StreamSource(XML_SCHEMA.openStream()); + } + + @Override + public URI getNamespace() { + return NAMESPACE; + } + + protected ResourcePool parseResourceConfig(final Element fragment) { + final String elementName = fragment.getLocalName(); + switch (elementName) { + case SHARED_ELEMENT_NAME: + final String sharing = JaxbParsers.parsePropertyOrString(fragment.getAttribute(SHARING_ELEMENT_NAME)); + return new SharedClusteredResourcePoolImpl(sharing); + + case DEDICATED_ELEMENT_NAME: + // 'from' attribute is optional on 'clustered-dedicated' element + final Attr fromAttr = fragment.getAttributeNode(FROM_ELEMENT_NAME); + final String from = (fromAttr == null ? null : JaxbParsers.parsePropertyOrString(fromAttr.getValue())); + + final String unitValue = fragment.getAttribute(UNIT_ELEMENT_NAME).toUpperCase(); + final MemoryUnit sizeUnits; + try { + sizeUnits = MemoryUnit.valueOf(unitValue); + } catch (IllegalArgumentException e) { + throw new XmlConfigurationException(String.format("XML configuration element <%s> 'unit' attribute '%s' is not valid", elementName, unitValue), e); + } + + final String sizeValue; + try { + sizeValue = fragment.getFirstChild().getNodeValue().trim(); + } catch (DOMException e) { + throw new XmlConfigurationException(String.format("XML configuration element <%s> value is not valid", elementName), e); + } + final long size; + try { + size = JaxbParsers.parsePropertyOrPositiveInteger(sizeValue).longValueExact(); + } catch (NumberFormatException e) { + throw new XmlConfigurationException(String.format("XML configuration element <%s> value '%s' is not valid", elementName, sizeValue), e); + } + + return new DedicatedClusteredResourcePoolImpl(from, size, sizeUnits); + case CLUSTERED_ELEMENT_NAME: + return new ClusteredResourcePoolImpl(); + } + return null; + } + + @Override + public ResourcePool parseResourceConfiguration(final Element fragment) { + ResourcePool resourcePool = parseResourceConfig(fragment); + if (resourcePool != null) { + return resourcePool; + } + throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported", + fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); + } + + @Override + public Element unparseResourcePool(ResourcePool resourcePool) { + return unparseConfig(resourcePool); + } + + @Override + protected Element createRootElement(Document doc, ResourcePool resourcePool) { + Element rootElement = null; + if (ClusteredResourcePoolImpl.class == resourcePool.getClass()) { + rootElement = doc.createElementNS(getNamespace().toString(), TC_CLUSTERED_NAMESPACE_PREFIX + CLUSTERED_ELEMENT_NAME); + } else if (DedicatedClusteredResourcePoolImpl.class == resourcePool.getClass()) { + DedicatedClusteredResourcePoolImpl dedicatedClusteredResourcePool = (DedicatedClusteredResourcePoolImpl) resourcePool; + rootElement = doc.createElementNS(getNamespace().toString(), TC_CLUSTERED_NAMESPACE_PREFIX + DEDICATED_ELEMENT_NAME); + if (dedicatedClusteredResourcePool.getFromResource() != null) { + rootElement.setAttribute(FROM_ELEMENT_NAME, dedicatedClusteredResourcePool.getFromResource()); + } + rootElement.setAttribute(UNIT_ELEMENT_NAME, dedicatedClusteredResourcePool.getUnit().toString()); + rootElement.setTextContent(String.valueOf(dedicatedClusteredResourcePool.getSize())); + } else if (SharedClusteredResourcePoolImpl.class == resourcePool.getClass()) { + SharedClusteredResourcePoolImpl sharedClusteredResourcePool = (SharedClusteredResourcePoolImpl) resourcePool; + rootElement = doc.createElementNS(getNamespace().toString(), TC_CLUSTERED_NAMESPACE_PREFIX + SHARED_ELEMENT_NAME); + rootElement.setAttribute(SHARING_ELEMENT_NAME, sharedClusteredResourcePool.getSharedResourcePool()); + } + return rootElement; + } + + @Override + public Set> getResourceTypes() { + return Collections.unmodifiableSet(new HashSet<>(Arrays.asList(ClusteredResourcePoolImpl.class, + DedicatedClusteredResourcePoolImpl.class, SharedClusteredResourcePoolImpl.class))); + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteringCacheManagerServiceConfigurationParser.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteringCacheManagerServiceConfigurationParser.java new file mode 100644 index 0000000000..6eb41f4a32 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteringCacheManagerServiceConfigurationParser.java @@ -0,0 +1,493 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.config.xml; + +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration.ClientMode; +import org.ehcache.clustered.client.config.Timeouts; +import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; +import org.ehcache.clustered.client.internal.ConnectionSource; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.BaseConfigParser; +import org.ehcache.xml.CacheManagerServiceConfigurationParser; +import org.ehcache.xml.JaxbParsers; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.TimeTypeWithPropSubst; +import org.osgi.service.component.annotations.Component; +import org.w3c.dom.Attr; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + + +import java.io.IOException; +import java.math.BigInteger; +import java.net.InetSocketAddress; +import java.net.URI; +import java.net.URISyntaxException; +import java.time.Duration; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Properties; + +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBElement; +import javax.xml.bind.JAXBException; +import javax.xml.bind.Unmarshaller; +import javax.xml.bind.helpers.DefaultValidationEventHandler; +import javax.xml.transform.Source; +import javax.xml.transform.stream.StreamSource; + +import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.NAMESPACE; +import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.XML_SCHEMA; +import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.TC_CLUSTERED_NAMESPACE_PREFIX; +import static org.ehcache.xml.XmlModel.convertToJavaTimeUnit; + +/** + * Provides parsing support for the {@code } elements representing a {@link ClusteringService ClusteringService}. + * + * @see ClusteredCacheConstants#XSD + */ +@Component +public class ClusteringCacheManagerServiceConfigurationParser extends BaseConfigParser implements CacheManagerServiceConfigurationParser { + + public static final String CLUSTER_ELEMENT_NAME = "cluster"; + public static final String CONNECTION_ELEMENT_NAME = "connection"; + public static final String CLUSTER_CONNECTION_ELEMENT_NAME = "cluster-connection"; + public static final String CLUSTER_TIER_MANAGER_ATTRIBUTE_NAME = "cluster-tier-manager"; + public static final String SERVER_ELEMENT_NAME = "server"; + public static final String HOST_ATTRIBUTE_NAME = "host"; + public static final String PORT_ATTRIBUTE_NAME = "port"; + public static final String READ_TIMEOUT_ELEMENT_NAME = "read-timeout"; + public static final String WRITE_TIMEOUT_ELEMENT_NAME = "write-timeout"; + public static final String CONNECTION_TIMEOUT_ELEMENT_NAME = "connection-timeout"; + public static final String URL_ATTRIBUTE_NAME = "url"; + public static final String DEFAULT_RESOURCE_ELEMENT_NAME = "default-resource"; + public static final String SHARED_POOL_ELEMENT_NAME = "shared-pool"; + public static final String SERVER_SIDE_CONFIG = "server-side-config"; + public static final String AUTO_CREATE_ATTRIBUTE_NAME = "auto-create"; + public static final String CLIENT_MODE_ATTRIBUTE_NAME = "client-mode"; + public static final String UNIT_ATTRIBUTE_NAME = "unit"; + public static final String NAME_ATTRIBUTE_NAME = "name"; + public static final String FROM_ATTRIBUTE_NAME = "from"; + public static final String DEFAULT_UNIT_ATTRIBUTE_VALUE = "seconds"; + + public ClusteringCacheManagerServiceConfigurationParser() { + super(ClusteringServiceConfiguration.class); + } + + @Override + public Source getXmlSchema() throws IOException { + return new StreamSource(XML_SCHEMA.openStream()); + } + + @Override + public URI getNamespace() { + return NAMESPACE; + } + + /** + * Complete interpretation of the top-level elements defined in {@value ClusteredCacheConstants#XSD}. + * This method is called only for those elements from the namespace set by {@link ClusteredCacheConstants#NAMESPACE}. + *

+ * This method presumes the element presented is valid according to the XSD. + * + * @param fragment the XML fragment to process + * @param classLoader + * @return a {@link org.ehcache.clustered.client.config.ClusteringServiceConfiguration ClusteringServiceConfiguration} + */ + @Override + public ServiceCreationConfiguration parseServiceCreationConfiguration(final Element fragment, ClassLoader classLoader) { + + if (CLUSTER_ELEMENT_NAME.equals(fragment.getLocalName())) { + + ClusteringCacheManagerServiceConfigurationParser.ServerSideConfig serverConfig = null; + URI connectionUri = null; + List serverAddresses = new ArrayList<>(); + String clusterTierManager = null; + Duration getTimeout = null, putTimeout = null, connectionTimeout = null; + final NodeList childNodes = fragment.getChildNodes(); + for (int i = 0; i < childNodes.getLength(); i++) { + final Node item = childNodes.item(i); + if (Node.ELEMENT_NODE == item.getNodeType()) { + switch (item.getLocalName()) { + case CONNECTION_ELEMENT_NAME: + /* + * is a required element in the XSD + */ + final Attr urlAttribute = ((Element)item).getAttributeNode(URL_ATTRIBUTE_NAME); + final String urlValue = JaxbParsers.parseStringWithProperties(urlAttribute.getValue()); + try { + connectionUri = new URI(urlValue); + } catch (URISyntaxException e) { + throw new XmlConfigurationException( + String.format("Value of %s attribute on XML configuration element <%s> in <%s> is not a valid URI - '%s'", + urlAttribute.getName(), item.getNodeName(), fragment.getTagName(), connectionUri), e); + } + + break; + case CLUSTER_CONNECTION_ELEMENT_NAME: + clusterTierManager = JaxbParsers.parsePropertyOrString(((Element)item).getAttribute(CLUSTER_TIER_MANAGER_ATTRIBUTE_NAME)); + final NodeList serverNodes = item.getChildNodes(); + for (int j = 0; j < serverNodes.getLength(); j++) { + final Node serverNode = serverNodes.item(j); + final String host = JaxbParsers.parsePropertyOrString(((Element)serverNode).getAttributeNode(HOST_ATTRIBUTE_NAME).getValue().trim()); + final Attr port = ((Element)serverNode).getAttributeNode(PORT_ATTRIBUTE_NAME); + InetSocketAddress address; + if (port == null) { + address = InetSocketAddress.createUnresolved(host, 0); + } else { + String portString = JaxbParsers.parsePropertyOrString(port.getValue()); + address = InetSocketAddress.createUnresolved(host, Integer.parseInt(portString)); + } + serverAddresses.add(address); + } + + break; + case READ_TIMEOUT_ELEMENT_NAME: + /* + * is an optional element + */ + getTimeout = processTimeout(fragment, item); + + break; + case WRITE_TIMEOUT_ELEMENT_NAME: + /* + * is an optional element + */ + putTimeout = processTimeout(fragment, item); + + break; + case CONNECTION_TIMEOUT_ELEMENT_NAME: + /* + * is an optional element + */ + connectionTimeout = processTimeout(fragment, item); + + break; + case SERVER_SIDE_CONFIG: + /* + * is an optional element + */ + serverConfig = processServerSideConfig((Element) item); + break; + default: + throw new XmlConfigurationException( + String.format("Unknown XML configuration element <%s> in <%s>", + item.getNodeName(), fragment.getTagName())); + } + } + } + + try { + ConnectionSource connectionSource; + if (connectionUri != null) { + connectionSource = new ConnectionSource.ClusterUri(connectionUri); + } else { + connectionSource = new ConnectionSource.ServerList(serverAddresses, clusterTierManager); + } + + Timeouts timeouts = getTimeouts(getTimeout, putTimeout, connectionTimeout); + + if (serverConfig == null) { + return new ClusteringServiceConfiguration(connectionSource, timeouts, ClientMode.CONNECT, null, new Properties()); + } else { + ServerSideConfiguration serverSideConfiguration; + if (serverConfig.defaultServerResource == null) { + serverSideConfiguration = new ServerSideConfiguration(serverConfig.pools); + } else { + serverSideConfiguration = new ServerSideConfiguration(serverConfig.defaultServerResource, serverConfig.pools); + } + return new ClusteringServiceConfiguration(connectionSource, timeouts, serverConfig.clientMode, serverSideConfiguration, new Properties()); + } + } catch (IllegalArgumentException e) { + throw new XmlConfigurationException(e); + } + } + throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported", + fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); + } + + @Override + public Class getServiceType() { + return ClusteringService.class; + } + + /** + * Translates a {@link ServiceCreationConfiguration} to an xml element + * + * @param serviceCreationConfiguration + */ + @Override + public Element unparseServiceCreationConfiguration(final ServiceCreationConfiguration serviceCreationConfiguration) { + Element rootElement = unparseConfig(serviceCreationConfiguration); + return rootElement; + } + + private Element createRootUrlElement(Document doc, ClusteringServiceConfiguration clusteringServiceConfiguration) { + Element rootElement = doc.createElementNS(getNamespace().toString(), TC_CLUSTERED_NAMESPACE_PREFIX + CLUSTER_ELEMENT_NAME); + Element urlElement = createUrlElement(doc, clusteringServiceConfiguration); + rootElement.appendChild(urlElement); + return rootElement; + } + + protected Element createUrlElement(Document doc, ClusteringServiceConfiguration clusteringServiceConfiguration) { + Element urlElement = doc.createElement(TC_CLUSTERED_NAMESPACE_PREFIX + CONNECTION_ELEMENT_NAME); + urlElement.setAttribute(URL_ATTRIBUTE_NAME, clusteringServiceConfiguration.getClusterUri().toString()); + return urlElement; + } + + private Element createServerElement(Document doc, ClusteringServiceConfiguration clusteringServiceConfiguration) { + if (!(clusteringServiceConfiguration.getConnectionSource() instanceof ConnectionSource.ServerList)) { + throw new IllegalArgumentException("When connection URL is null, source of connection MUST be of type ConnectionSource.ServerList.class"); + } + ConnectionSource.ServerList servers = (ConnectionSource.ServerList)clusteringServiceConfiguration.getConnectionSource(); + Element rootElement = doc.createElementNS(getNamespace().toString(), TC_CLUSTERED_NAMESPACE_PREFIX + CLUSTER_ELEMENT_NAME); + Element connElement = createConnectionElementWrapper(doc, clusteringServiceConfiguration); + servers.getServers().forEach(server -> { + Element serverElement = doc.createElement(TC_CLUSTERED_NAMESPACE_PREFIX + SERVER_ELEMENT_NAME); + serverElement.setAttribute(HOST_ATTRIBUTE_NAME, server.getHostName()); + /* + If port is greater than 0, set the attribute. Otherwise, do not set. Default value will be taken. + */ + if (server.getPort() > 0) { + serverElement.setAttribute(PORT_ATTRIBUTE_NAME, Integer.toString(server.getPort())); + } + connElement.appendChild(serverElement); + }); + rootElement.appendChild(connElement); + return rootElement; + } + + protected Element createConnectionElementWrapper(Document doc, ClusteringServiceConfiguration clusteringServiceConfiguration) { + Element connElement = doc.createElement(TC_CLUSTERED_NAMESPACE_PREFIX + CLUSTER_CONNECTION_ELEMENT_NAME); + connElement.setAttribute(CLUSTER_TIER_MANAGER_ATTRIBUTE_NAME, clusteringServiceConfiguration.getConnectionSource() + .getClusterTierManager()); + return connElement; + } + + @Override + protected Element createRootElement(Document doc, ClusteringServiceConfiguration clusteringServiceConfiguration) { + Element rootElement; + if (clusteringServiceConfiguration.getConnectionSource() instanceof ConnectionSource.ClusterUri) { + rootElement = createRootUrlElement(doc, clusteringServiceConfiguration); + } else { + rootElement = createServerElement(doc, clusteringServiceConfiguration); + } + + processTimeUnits(doc, rootElement, clusteringServiceConfiguration); + Element serverSideConfigurationElem = processServerSideElements(doc, clusteringServiceConfiguration); + if (serverSideConfigurationElem != null) { + rootElement.appendChild(serverSideConfigurationElem); + } + return rootElement; + } + + private void processTimeUnits(Document doc, Element parent, ClusteringServiceConfiguration clusteringServiceConfiguration) { + if (clusteringServiceConfiguration.getTimeouts() != null) { + Timeouts timeouts = clusteringServiceConfiguration.getTimeouts(); + + Element readTimeoutElem = createTimeoutElement(doc, READ_TIMEOUT_ELEMENT_NAME, timeouts.getReadOperationTimeout()); + Element writeTimeoutElem = createTimeoutElement(doc, WRITE_TIMEOUT_ELEMENT_NAME, timeouts.getWriteOperationTimeout()); + Element connectionTimeoutElem = createTimeoutElement(doc, CONNECTION_TIMEOUT_ELEMENT_NAME, timeouts.getConnectionTimeout()); + /* + Important: do not change the order of following three elements if corresponding change is not done in xsd + */ + parent.appendChild(readTimeoutElem); + parent.appendChild(writeTimeoutElem); + parent.appendChild(connectionTimeoutElem); + } + } + + private Element createTimeoutElement(Document doc, String timeoutName, Duration timeout) { + Element retElement; + if (READ_TIMEOUT_ELEMENT_NAME.equals(timeoutName)) { + retElement = doc.createElement(TC_CLUSTERED_NAMESPACE_PREFIX + READ_TIMEOUT_ELEMENT_NAME); + } else if (WRITE_TIMEOUT_ELEMENT_NAME.equals(timeoutName)) { + retElement = doc.createElement(TC_CLUSTERED_NAMESPACE_PREFIX + WRITE_TIMEOUT_ELEMENT_NAME); + } else { + retElement = doc.createElement(TC_CLUSTERED_NAMESPACE_PREFIX + CONNECTION_TIMEOUT_ELEMENT_NAME); + } + retElement.setAttribute(UNIT_ATTRIBUTE_NAME, DEFAULT_UNIT_ATTRIBUTE_VALUE); + retElement.setTextContent(Long.toString(timeout.getSeconds())); + return retElement; + } + + protected Element processServerSideElements(Document doc, ClusteringServiceConfiguration clusteringServiceConfiguration) { + switch (clusteringServiceConfiguration.getClientMode()) { + case CONNECT: + return null; + case EXPECTING: + case AUTO_CREATE: + case AUTO_CREATE_ON_RECONNECT: + Element serverSideConfigurationElem = createServerSideConfigurationElement(doc, clusteringServiceConfiguration); + ServerSideConfiguration serverSideConfiguration = clusteringServiceConfiguration.getServerConfiguration(); + String defaultServerResource = serverSideConfiguration.getDefaultServerResource(); + if (!(defaultServerResource == null || defaultServerResource.trim().length() == 0)) { + Element defaultResourceElement = createDefaultServerResourceElement(doc, defaultServerResource); + serverSideConfigurationElem.appendChild(defaultResourceElement); + } + Map resourcePools = serverSideConfiguration.getResourcePools(); + if (resourcePools != null) { + resourcePools.forEach( + (key, value) -> { + Element poolElement = createSharedPoolElement(doc, key, value); + serverSideConfigurationElem.appendChild(poolElement); + } + ); + } + return serverSideConfigurationElem; + } + throw new AssertionError(); + } + + private Element createServerSideConfigurationElement(Document doc, ClusteringServiceConfiguration clusteringServiceConfiguration) { + Element serverSideConfigurationElem = doc.createElement(TC_CLUSTERED_NAMESPACE_PREFIX + SERVER_SIDE_CONFIG); + serverSideConfigurationElem.setAttribute(CLIENT_MODE_ATTRIBUTE_NAME, clusteringServiceConfiguration.getClientMode() + .name().toLowerCase(Locale.ROOT).replace('_', '-')); + return serverSideConfigurationElem; + } + + + private Element createSharedPoolElement(Document doc, String poolName, ServerSideConfiguration.Pool pool) { + Element poolElement = doc.createElement(TC_CLUSTERED_NAMESPACE_PREFIX + SHARED_POOL_ELEMENT_NAME); + poolElement.setAttribute(NAME_ATTRIBUTE_NAME, poolName); + String from = pool.getServerResource(); + if (from != null) { + if (from.trim().length() == 0) { + throw new XmlConfigurationException("Resource pool name can not be empty."); + } + poolElement.setAttribute(FROM_ATTRIBUTE_NAME, from); + } + long memoryInBytes = MemoryUnit.B.convert(pool.getSize(), MemoryUnit.B); + poolElement.setAttribute(UNIT_ATTRIBUTE_NAME, MemoryUnit.B.toString()); + poolElement.setTextContent(Long.toString(memoryInBytes)); + return poolElement; + } + + private Element createDefaultServerResourceElement(Document doc, String defaultServerResource) { + Element defaultResourceElement = doc.createElement(TC_CLUSTERED_NAMESPACE_PREFIX + DEFAULT_RESOURCE_ELEMENT_NAME); + defaultResourceElement.setAttribute(FROM_ATTRIBUTE_NAME, defaultServerResource); + return defaultResourceElement; + } + + private ClusteringCacheManagerServiceConfigurationParser.ServerSideConfig processServerSideConfig(Element serverSideConfigElement) { + ClusteringCacheManagerServiceConfigurationParser.ServerSideConfig serverSideConfig = new ClusteringCacheManagerServiceConfigurationParser.ServerSideConfig(); + + String autoCreateAttr = serverSideConfigElement.getAttribute(AUTO_CREATE_ATTRIBUTE_NAME); + String clientModeAttr = serverSideConfigElement.getAttribute(CLIENT_MODE_ATTRIBUTE_NAME); + if (clientModeAttr.isEmpty()) { + if (!autoCreateAttr.isEmpty()) { + serverSideConfig.clientMode = Boolean.parseBoolean(autoCreateAttr) ? ClientMode.AUTO_CREATE : ClientMode.EXPECTING; + } + } else if (autoCreateAttr.isEmpty()) { + clientModeAttr = JaxbParsers.parsePropertyOrString(clientModeAttr); + serverSideConfig.clientMode = ClientMode.valueOf(clientModeAttr.toUpperCase(Locale.ROOT).replace('-', '_')); + } else { + throw new XmlConfigurationException("Cannot define both '" + AUTO_CREATE_ATTRIBUTE_NAME + "' and '" + CLIENT_MODE_ATTRIBUTE_NAME + "' attributes"); + } + + final NodeList serverSideNodes = serverSideConfigElement.getChildNodes(); + for (int i = 0; i < serverSideNodes.getLength(); i++) { + final Node item = serverSideNodes.item(i); + if (Node.ELEMENT_NODE == item.getNodeType()) { + String nodeLocalName = item.getLocalName(); + if (DEFAULT_RESOURCE_ELEMENT_NAME.equals(nodeLocalName)) { + serverSideConfig.defaultServerResource = JaxbParsers.parsePropertyOrString(((Element)item).getAttribute(FROM_ATTRIBUTE_NAME)); + + } else if (SHARED_POOL_ELEMENT_NAME.equals(nodeLocalName)) { + Element sharedPoolElement = (Element)item; + String poolName = sharedPoolElement.getAttribute(NAME_ATTRIBUTE_NAME); // required + Attr fromAttr = sharedPoolElement.getAttributeNode(FROM_ATTRIBUTE_NAME); // optional + String fromResource = (fromAttr == null ? null : fromAttr.getValue()); + Attr unitAttr = sharedPoolElement.getAttributeNode(UNIT_ATTRIBUTE_NAME); // optional - default 'B' + String unit = (unitAttr == null ? "B" : unitAttr.getValue()); + MemoryUnit memoryUnit = MemoryUnit.valueOf(unit.toUpperCase(Locale.ENGLISH)); + + String quantityValue = sharedPoolElement.getFirstChild().getNodeValue(); + long quantity; + try { + quantity = JaxbParsers.parsePropertyOrPositiveInteger(quantityValue).longValueExact(); + } catch (NumberFormatException e) { + throw new XmlConfigurationException("Magnitude of value specified for is too large"); + } + + ServerSideConfiguration.Pool poolDefinition; + if (fromResource == null) { + poolDefinition = new ServerSideConfiguration.Pool(memoryUnit.toBytes(quantity)); + } else { + poolDefinition = new ServerSideConfiguration.Pool(memoryUnit.toBytes(quantity), JaxbParsers.parsePropertyOrString(fromResource)); + } + + if (serverSideConfig.pools.put(poolName, poolDefinition) != null) { + throw new XmlConfigurationException("Duplicate definition for "); + } + } + } + } + return serverSideConfig; + } + + private Duration processTimeout(Element parentElement, Node timeoutNode) { + try { + // are direct subtype of ehcache:time-type; use JAXB to interpret it + JAXBContext context = JAXBContext.newInstance(TimeTypeWithPropSubst.class); + Unmarshaller unmarshaller = context.createUnmarshaller(); + unmarshaller.setEventHandler(new DefaultValidationEventHandler()); + JAXBElement jaxbElement = unmarshaller.unmarshal(timeoutNode, TimeTypeWithPropSubst.class); + + TimeTypeWithPropSubst timeType = jaxbElement.getValue(); + BigInteger amount = timeType.getValue(); + if (amount.compareTo(BigInteger.valueOf(Long.MAX_VALUE)) > 0) { + throw new XmlConfigurationException( + String.format("Value of XML configuration element <%s> in <%s> exceeds allowed value - %s", + timeoutNode.getNodeName(), parentElement.getTagName(), amount)); + } + return Duration.of(amount.longValue(), convertToJavaTimeUnit(timeType.getUnit())); + + } catch (JAXBException e) { + throw new XmlConfigurationException(e); + } + } + + private Timeouts getTimeouts(Duration getTimeout, Duration putTimeout, Duration connectionTimeout) { + TimeoutsBuilder builder = TimeoutsBuilder.timeouts(); + if (getTimeout != null) { + builder.read(getTimeout); + } + if (putTimeout != null) { + builder.write(putTimeout); + } + if (connectionTimeout != null) { + builder.connection(connectionTimeout); + } + return builder.build(); + } + + private static final class ServerSideConfig { + private ClientMode clientMode = ClientMode.CONNECT; + private String defaultServerResource = null; + private final Map pools = new HashMap<>(); + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteringCacheServiceConfigurationParser.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteringCacheServiceConfigurationParser.java new file mode 100644 index 0000000000..67b7c4e7bf --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteringCacheServiceConfigurationParser.java @@ -0,0 +1,95 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.config.xml; + +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.internal.store.ClusteredStore; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.xml.BaseConfigParser; +import org.ehcache.xml.CacheServiceConfigurationParser; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.osgi.service.component.annotations.Component; +import org.w3c.dom.Document; +import org.w3c.dom.Element; + +import java.io.IOException; +import java.net.URI; + +import javax.xml.transform.Source; +import javax.xml.transform.stream.StreamSource; + +import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.NAMESPACE; +import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.XML_SCHEMA; +import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.TC_CLUSTERED_NAMESPACE_PREFIX; + +/** + * Provides parsing support for the {@code } elements representing a {@link ClusteredStore.Provider ClusteringService}. + * + * @see ClusteredCacheConstants#XSD + */ +@Component +public class ClusteringCacheServiceConfigurationParser extends BaseConfigParser implements CacheServiceConfigurationParser { + + public static final String CLUSTERED_STORE_ELEMENT_NAME = "clustered-store"; + public static final String CONSISTENCY_ATTRIBUTE_NAME = "consistency"; + + public ClusteringCacheServiceConfigurationParser() { + super(ClusteredStoreConfiguration.class); + } + + @Override + public Source getXmlSchema() throws IOException { + return new StreamSource(XML_SCHEMA.openStream()); + } + + @Override + public URI getNamespace() { + return NAMESPACE; + } + + @Override + public ServiceConfiguration parseServiceConfiguration(Element fragment, ClassLoader classLoader) { + if (CLUSTERED_STORE_ELEMENT_NAME.equals(fragment.getLocalName())) { + if (fragment.hasAttribute(CONSISTENCY_ATTRIBUTE_NAME)) { + return new ClusteredStoreConfiguration(Consistency.valueOf(fragment.getAttribute(CONSISTENCY_ATTRIBUTE_NAME).toUpperCase())); + } else { + return new ClusteredStoreConfiguration(); + } + } + throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported", + fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); + } + + @Override + public Class getServiceType() { + return ClusteredStore.Provider.class; + } + + @Override + public Element unparseServiceConfiguration(ServiceConfiguration serviceConfiguration) { + return unparseConfig(serviceConfiguration); + } + + @Override + protected Element createRootElement(Document doc, ClusteredStoreConfiguration clusteredStoreConfiguration) { + Consistency consistency = clusteredStoreConfiguration.getConsistency(); + Element rootElement = doc.createElementNS(getNamespace().toString(), TC_CLUSTERED_NAMESPACE_PREFIX + CLUSTERED_STORE_ELEMENT_NAME); + rootElement.setAttribute(CONSISTENCY_ATTRIBUTE_NAME, consistency.name().toLowerCase()); + return rootElement; + } + +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/ClusteredLoaderWriterStore.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/ClusteredLoaderWriterStore.java new file mode 100644 index 0000000000..998e481e0c --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/ClusteredLoaderWriterStore.java @@ -0,0 +1,357 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.loaderwriter; + +import org.ehcache.clustered.client.internal.store.ClusteredStore; +import org.ehcache.clustered.client.internal.store.ClusteredValueHolder; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; +import org.ehcache.clustered.client.internal.store.lock.LockingServerStoreProxy; +import org.ehcache.clustered.client.internal.store.operations.ChainResolver; +import org.ehcache.clustered.client.internal.store.operations.EternalChainResolver; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.common.internal.store.operations.ConditionalRemoveOperation; +import org.ehcache.clustered.common.internal.store.operations.ConditionalReplaceOperation; +import org.ehcache.clustered.common.internal.store.operations.PutIfAbsentOperation; +import org.ehcache.clustered.common.internal.store.operations.PutOperation; +import org.ehcache.clustered.common.internal.store.operations.RemoveOperation; +import org.ehcache.clustered.common.internal.store.operations.ReplaceOperation; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.config.ResourceType; +import org.ehcache.core.events.StoreEventDispatcher; +import org.ehcache.core.exceptions.StorePassThroughException; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.spi.store.tiering.AuthoritativeTier; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.impl.store.DefaultStoreEventDispatcher; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterConfiguration; +import org.ehcache.spi.loaderwriter.CacheLoadingException; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceDependencies; + +import java.nio.ByteBuffer; +import java.util.Collection; +import java.util.Set; +import java.util.concurrent.TimeoutException; + +import static org.ehcache.core.exceptions.ExceptionFactory.newCacheLoadingException; +import static org.ehcache.core.exceptions.StorePassThroughException.handleException; + +public class ClusteredLoaderWriterStore extends ClusteredStore implements AuthoritativeTier { + + private final CacheLoaderWriter cacheLoaderWriter; + private final boolean useLoaderInAtomics; + + public ClusteredLoaderWriterStore(Configuration config, OperationsCodec codec, ChainResolver resolver, TimeSource timeSource, + CacheLoaderWriter loaderWriter, boolean useLoaderInAtomics, StoreEventDispatcher storeEventDispatcher, StatisticsService statisticsService) { + super(config, codec, resolver, timeSource, storeEventDispatcher, statisticsService); + this.cacheLoaderWriter = loaderWriter; + this.useLoaderInAtomics = useLoaderInAtomics; + } + + /** + * For Tests + */ + ClusteredLoaderWriterStore(Configuration config, OperationsCodec codec, EternalChainResolver resolver, + ServerStoreProxy proxy, TimeSource timeSource, CacheLoaderWriter loaderWriter, StatisticsService statisticsService) { + super(config, codec, resolver, proxy, timeSource, null, statisticsService); + this.cacheLoaderWriter = loaderWriter; + this.useLoaderInAtomics = true; + } + + private LockingServerStoreProxy getProxy() { + return (LockingServerStoreProxy) storeProxy; + } + + @Override + protected ValueHolder getInternal(K key) throws StoreAccessException, TimeoutException { + ValueHolder holder = super.getInternal(key); + try { + if (holder == null) { + long hash = extractLongKey(key); + boolean unlocked = false; + getProxy().lock(hash); + try { + V value; + try { + value = cacheLoaderWriter.load(key); + } catch (Exception e) { + throw new StorePassThroughException(new CacheLoadingException(e)); + } + if (value == null) { + return null; + } + append(key, value); + unlocked = true; + return new ClusteredValueHolder<>(value); + } finally { + getProxy().unlock(hash, unlocked); + } + } + } catch (RuntimeException re) { + throw handleException(re); + } + return holder; + } + + @Override + public boolean containsKey(K key) throws StoreAccessException { + try { + return super.getInternal(key) != null; + } catch (TimeoutException e) { + return false; + } + } + + private void append(K key, V value) throws TimeoutException { + PutOperation operation = new PutOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + storeProxy.append(extractedKey, payload); + } + + @Override + protected void silentPut(K key, V value) throws StoreAccessException { + try { + long hash = extractLongKey(key); + boolean unlocked = false; + getProxy().lock(hash); + try { + cacheLoaderWriter.write(key, value); + append(key, value); + unlocked = true; + } finally { + getProxy().unlock(hash, unlocked); + } + } catch (Exception e) { + throw handleException(e); + } + } + + @Override + protected ValueHolder silentGetAndPut(K key, V value) throws StoreAccessException { + try { + long hash = extractLongKey(key); + boolean unlocked = false; + final ServerStoreProxy.ChainEntry chain = getProxy().lock(hash); + try { + cacheLoaderWriter.write(key, value); + append(key, value); + unlocked = true; + return resolver.resolve(chain, key, timeSource.getTimeMillis()); + } finally { + getProxy().unlock(hash, unlocked); + } + } catch (Exception e) { + throw handleException(e); + } + } + + @Override + protected ValueHolder silentRemove(K key) throws StoreAccessException { + try { + long hash = extractLongKey(key); + boolean unlocked = false; + RemoveOperation operation = new RemoveOperation<>(key, timeSource.getTimeMillis()); + ByteBuffer payLoad = codec.encode(operation); + ServerStoreProxy.ChainEntry chain = getProxy().lock(hash); + try { + cacheLoaderWriter.delete(key); + storeProxy.append(hash, payLoad); + unlocked = true; + return resolver.resolve(chain, key, timeSource.getTimeMillis()); + } finally { + getProxy().unlock(hash, unlocked); + } + } catch (Exception e) { + throw handleException(e); + } + } + + @Override + protected ValueHolder silentPutIfAbsent(K key, V value) throws StoreAccessException { + try { + long hash = extractLongKey(key); + boolean unlocked = false; + ServerStoreProxy.ChainEntry existing = getProxy().lock(hash); + try { + ValueHolder existingVal = resolver.resolve(existing, key, timeSource.getTimeMillis()); + if (existingVal != null) { + return existingVal; + } else { + existingVal = loadFromLoaderWriter(key); + if (existingVal == null) { + cacheLoaderWriter.write(key, value); + PutIfAbsentOperation operation = new PutIfAbsentOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + storeProxy.append(hash, payload); + unlocked = true; + } + return existingVal; + } + } finally { + getProxy().unlock(hash, unlocked); + } + } catch (Exception e) { + throw handleException(e); + } + } + + @Override + protected ValueHolder silentReplace(K key, V value) throws StoreAccessException { + try { + long hash = extractLongKey(key); + boolean unlocked = false; + ServerStoreProxy.ChainEntry existing = getProxy().lock(hash); + try { + ValueHolder existingVal = resolver.resolve(existing, key, timeSource.getTimeMillis()); + if (existingVal != null) { + cacheLoaderWriter.write(key, value); + ReplaceOperation operation = new ReplaceOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + storeProxy.append(hash, payload); + unlocked = true; + return existingVal; + } else { + ValueHolder inCache = loadFromLoaderWriter(key); + if (inCache != null) { + cacheLoaderWriter.write(key, value); + ReplaceOperation operation = new ReplaceOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + storeProxy.append(hash, payload); + unlocked = true; + return inCache; + } else { + return null; + } + } + } finally { + getProxy().unlock(hash, unlocked); + } + } catch (Exception e) { + throw handleException(e); + } + } + + @Override + protected ValueHolder silentRemove(K key, V value) throws StoreAccessException { + try { + long hash = extractLongKey(key); + boolean unlocked = false; + ServerStoreProxy.ChainEntry existing = getProxy().lock(hash); + try { + ValueHolder existingVal = resolver.resolve(existing, key, timeSource.getTimeMillis()); + if (existingVal == null) { + existingVal = loadFromLoaderWriter(key); + } + if (existingVal != null && value.equals(existingVal.get())) { + cacheLoaderWriter.delete(key); + ConditionalRemoveOperation operation = new ConditionalRemoveOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payLoad = codec.encode(operation); + storeProxy.append(hash, payLoad); + unlocked = true; + } + return existingVal; + } finally { + getProxy().unlock(hash, unlocked); + } + } catch (Exception e) { + throw handleException(e); + } + } + + @Override + protected ValueHolder silentReplace(K key, V oldValue, V newValue) throws StoreAccessException { + try { + long hash = extractLongKey(key); + boolean unlocked = false; + ServerStoreProxy.ChainEntry existing = getProxy().lock(hash); + try { + ValueHolder existingVal = resolver.resolve(existing, key, timeSource.getTimeMillis()); + if (existingVal == null) { + existingVal = loadFromLoaderWriter(key); + } + if (existingVal != null && oldValue.equals(existingVal.get())) { + cacheLoaderWriter.write(key, newValue); + ConditionalReplaceOperation operation = new ConditionalReplaceOperation<>(key, oldValue, newValue, timeSource.getTimeMillis()); + ByteBuffer payLoad = codec.encode(operation); + storeProxy.append(hash, payLoad); + unlocked = true; + } + return existingVal; + } finally { + getProxy().unlock(hash, unlocked); + } + } catch (Exception e) { + throw handleException(e); + } + } + + private ValueHolder loadFromLoaderWriter(K key) { + if (useLoaderInAtomics) { + try { + V loaded = cacheLoaderWriter.load(key); + if (loaded == null) { + return null; + } else { + return new ClusteredValueHolder<>(loaded); + } + } catch (Exception e) { + throw new StorePassThroughException(newCacheLoadingException(e)); + } + } + return null; + } + + /** + * Provider of {@link ClusteredLoaderWriterStore} instances. + */ + @ServiceDependencies({ TimeSourceService.class, ClusteringService.class}) + public static class Provider extends ClusteredStore.Provider { + @Override + protected ClusteredStore createStore(Configuration storeConfig, + OperationsCodec codec, + ChainResolver resolver, + TimeSource timeSource, + boolean useLoaderInAtomics, + Object[] serviceConfigs) { + StoreEventDispatcher storeEventDispatcher = new DefaultStoreEventDispatcher<>(storeConfig.getDispatcherConcurrency()); + return new ClusteredLoaderWriterStore<>(storeConfig, codec, resolver, timeSource, + storeConfig.getCacheLoaderWriter(), useLoaderInAtomics, storeEventDispatcher, getServiceProvider().getService(StatisticsService.class)); + } + + @Override + public int rank(Set> resourceTypes, Collection> serviceConfigs) { + int parentRank = super.rank(resourceTypes, serviceConfigs); + if (parentRank == 0 || serviceConfigs.stream().noneMatch(CacheLoaderWriterConfiguration.class::isInstance)) { + return 0; + } + return parentRank + 1; + } + + @Override + public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { + int parentRank = super.rankAuthority(authorityResource, serviceConfigs); + if (parentRank == 0 || serviceConfigs.stream().noneMatch(CacheLoaderWriterConfiguration.class::isInstance)) { + return 0; + } + return parentRank + 1; + } + } + +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/ClusteredLoaderWriterStoreProviderFactory.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/ClusteredLoaderWriterStoreProviderFactory.java new file mode 100644 index 0000000000..98a26168d8 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/ClusteredLoaderWriterStoreProviderFactory.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.loaderwriter; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; + +@Component +public class ClusteredLoaderWriterStoreProviderFactory implements ServiceFactory { + + @Override + public ClusteredLoaderWriterStore.Provider create(ServiceCreationConfiguration configuration) { + return new ClusteredLoaderWriterStore.Provider(); + } + + @Override + public Class getServiceType() { + return ClusteredLoaderWriterStore.Provider.class; + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/DelegatingLoaderWriterStore.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/DelegatingLoaderWriterStore.java new file mode 100644 index 0000000000..b9cb0c854e --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/DelegatingLoaderWriterStore.java @@ -0,0 +1,143 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.loaderwriter; + +import org.ehcache.Cache; +import org.ehcache.core.CacheConfigurationChangeListener; +import org.ehcache.core.events.NullStoreEventDispatcher; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.WrapperStore; +import org.ehcache.core.spi.store.events.StoreEventSource; +import org.ehcache.spi.resilience.StoreAccessException; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; + +public class DelegatingLoaderWriterStore implements WrapperStore { + + private final Store delegate; + + public DelegatingLoaderWriterStore(Store store) { + this.delegate = store; + } + + @Override + public ValueHolder get(K key) throws StoreAccessException { + return delegate.get(key); + } + + @Override + public boolean containsKey(K key) throws StoreAccessException { + return delegate.containsKey(key); + } + + @Override + public PutStatus put(K key, V value) throws StoreAccessException { + return delegate.put(key, value); + } + + @Override + public ValueHolder getAndPut(K key, V value) throws StoreAccessException { + return delegate.getAndPut(key, value); + } + + @Override + public ValueHolder putIfAbsent(K key, V value, Consumer put) throws StoreAccessException { + return delegate.putIfAbsent(key, value, put); + } + + @Override + public boolean remove(K key) throws StoreAccessException { + return delegate.remove(key); + } + + @Override + public ValueHolder getAndRemove(K key) throws StoreAccessException { + return delegate.getAndRemove(key); + } + + @Override + public RemoveStatus remove(K key, V value) throws StoreAccessException { + return delegate.remove(key, value); + } + + @Override + public ValueHolder replace(K key, V value) throws StoreAccessException { + return delegate.replace(key, value); + } + + @Override + public ReplaceStatus replace(K key, V oldValue, V newValue) throws StoreAccessException { + return delegate.replace(key, oldValue, newValue); + } + + @Override + public void clear() throws StoreAccessException { + delegate.clear(); + } + + @Override + public StoreEventSource getStoreEventSource() { + return new NullStoreEventDispatcher<>(); + } + + @Override + public Iterator>> iterator() { + return delegate.iterator(); + } + + @Override + public ValueHolder getAndCompute(K key, BiFunction mappingFunction) throws StoreAccessException { + throw new UnsupportedOperationException("Implement me"); + } + + @Override + public ValueHolder computeAndGet(K key, BiFunction mappingFunction, Supplier replaceEqual, Supplier invokeWriter) throws StoreAccessException { + throw new UnsupportedOperationException("Implement me"); + } + + @Override + public ValueHolder computeIfAbsent(K key, Function mappingFunction) throws StoreAccessException { + throw new UnsupportedOperationException("Implement me"); + } + + @Override + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction) throws StoreAccessException { + return delegate.bulkCompute(keys, remappingFunction); + } + + @Override + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction, Supplier replaceEqual) throws StoreAccessException { + throw new UnsupportedOperationException("Implement me"); + } + + @Override + public Map> bulkComputeIfAbsent(Set keys, Function, Iterable>> mappingFunction) throws StoreAccessException { + return delegate.bulkComputeIfAbsent(keys, mappingFunction); + } + + @Override + public List getConfigurationChangeListeners() { + return Collections.emptyList(); + } + +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/DelegatingLoaderWriterStoreProvider.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/DelegatingLoaderWriterStoreProvider.java new file mode 100644 index 0000000000..7a5bb4846d --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/DelegatingLoaderWriterStoreProvider.java @@ -0,0 +1,55 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.loaderwriter; + +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.client.service.ClusteringService.ClusteredCacheIdentifier; +import org.ehcache.config.ResourceType; +import org.ehcache.core.spi.store.AbstractWrapperStoreProvider; +import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterConfiguration; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceDependencies; +import java.util.Collection; +import java.util.Set; + +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; + +@ServiceDependencies({CacheLoaderWriterProvider.class, ClusteringService.class}) +public class DelegatingLoaderWriterStoreProvider extends AbstractWrapperStoreProvider { + + @Override + protected Store wrap(Store store, Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + DelegatingLoaderWriterStore loaderWriterStore = new DelegatingLoaderWriterStore<>(store); + return loaderWriterStore; + } + + @Override + public int rank(Set> resourceTypes, Collection> serviceConfigs) { + throw new UnsupportedOperationException("Its a Wrapper store provider, does not support regular ranking"); + } + + @Override + public int wrapperStoreRank(Collection> serviceConfigs) { + CacheLoaderWriterConfiguration loaderWriterConfiguration = findSingletonAmongst(CacheLoaderWriterConfiguration.class, serviceConfigs); + ClusteredCacheIdentifier clusteredCacheIdentifier = findSingletonAmongst(ClusteredCacheIdentifier.class, serviceConfigs); + if (clusteredCacheIdentifier != null && loaderWriterConfiguration != null) { + return 3; + } + return 0; + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/DelegatingLoaderWriterStoreProviderFactory.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/DelegatingLoaderWriterStoreProviderFactory.java new file mode 100644 index 0000000000..0ddca5007d --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/DelegatingLoaderWriterStoreProviderFactory.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.loaderwriter; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; + +@Component +public class DelegatingLoaderWriterStoreProviderFactory implements ServiceFactory { + + @Override + public DelegatingLoaderWriterStoreProvider create(ServiceCreationConfiguration configuration) { + return new DelegatingLoaderWriterStoreProvider(); + } + + @Override + public Class getServiceType() { + return DelegatingLoaderWriterStoreProvider.class; + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehind.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehind.java new file mode 100644 index 0000000000..852f0ff3e1 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehind.java @@ -0,0 +1,101 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.loaderwriter.writebehind; + +import org.ehcache.clustered.common.internal.util.ChainBuilder; +import org.ehcache.clustered.client.internal.store.operations.ChainResolver; +import org.ehcache.clustered.common.internal.store.operations.ConditionalRemoveOperation; +import org.ehcache.clustered.common.internal.store.operations.Operation; +import org.ehcache.clustered.common.internal.store.operations.PutOperation; +import org.ehcache.clustered.common.internal.store.operations.RemoveOperation; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeoutException; + +class ClusteredWriteBehind { + private final ClusteredWriteBehindStore clusteredWriteBehindStore; + private final ExecutorService executorService; + private final CacheLoaderWriter cacheLoaderWriter; + private final OperationsCodec codec; + private final ChainResolver resolver; + + ClusteredWriteBehind(ClusteredWriteBehindStore clusteredWriteBehindStore, + ExecutorService executorService, + ChainResolver resolver, + CacheLoaderWriter cacheLoaderWriter, + OperationsCodec codec) { + this.clusteredWriteBehindStore = clusteredWriteBehindStore; + this.executorService = executorService; + this.resolver = resolver; + this.cacheLoaderWriter = cacheLoaderWriter; + this.codec = codec; + } + + void flushWriteBehindQueue(Chain ignored, long hash) { + executorService.submit(() -> { + try { + Chain chain = clusteredWriteBehindStore.lock(hash); + try { + if (!chain.isEmpty()) { + Map> currentState = new HashMap<>(); + for (Element element : chain) { + ByteBuffer payload = element.getPayload(); + Operation operation = codec.decode(payload); + K key = operation.getKey(); + PutOperation result = resolver.applyOperation(key, + currentState.get(key), + operation); + try { + if (result != null) { + if (result != currentState.get(key) && !(operation instanceof PutOperation)) { + cacheLoaderWriter.write(result.getKey(), result.getValue()); + } + currentState.put(key, result.asOperationExpiringAt(result.expirationTime())); + } else { + if (currentState.get(key) != null && (operation instanceof RemoveOperation + || operation instanceof ConditionalRemoveOperation)) { + cacheLoaderWriter.delete(key); + } + currentState.remove(key); + } + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + ChainBuilder builder = new ChainBuilder(); + for (PutOperation operation : currentState.values()) { + builder = builder.add(codec.encode(operation)); + } + + clusteredWriteBehindStore.replaceAtHead(hash, chain, builder.build()); + } + } finally { + clusteredWriteBehindStore.unlock(hash, false); + } + } catch (TimeoutException e) { + throw new RuntimeException(e); + } + }); + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehindStore.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehindStore.java new file mode 100644 index 0000000000..1f94719e50 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehindStore.java @@ -0,0 +1,315 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.loaderwriter.writebehind; + +import org.ehcache.clustered.client.internal.loaderwriter.ClusteredLoaderWriterStore; +import org.ehcache.clustered.client.internal.store.ClusteredStore; +import org.ehcache.clustered.client.internal.store.ClusteredValueHolder; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; +import org.ehcache.clustered.client.internal.store.lock.LockingServerStoreProxy; +import org.ehcache.clustered.client.internal.store.operations.ChainResolver; +import org.ehcache.clustered.common.internal.store.operations.ConditionalRemoveOperation; +import org.ehcache.clustered.common.internal.store.operations.ConditionalReplaceOperation; +import org.ehcache.clustered.common.internal.store.operations.PutIfAbsentOperation; +import org.ehcache.clustered.common.internal.store.operations.PutOperation; +import org.ehcache.clustered.common.internal.store.operations.PutWithWriterOperation; +import org.ehcache.clustered.common.internal.store.operations.RemoveOperation; +import org.ehcache.clustered.common.internal.store.operations.ReplaceOperation; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.config.ResourceType; +import org.ehcache.core.events.StoreEventDispatcher; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.spi.store.tiering.AuthoritativeTier; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.impl.store.DefaultStoreEventDispatcher; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceDependencies; + +import java.nio.ByteBuffer; +import java.util.Collection; +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeoutException; + +import static org.ehcache.core.exceptions.StorePassThroughException.handleException; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; + +public class ClusteredWriteBehindStore extends ClusteredStore implements AuthoritativeTier { + + private final CacheLoaderWriter cacheLoaderWriter; + private final ClusteredWriteBehind clusteredWriteBehind; + + private ClusteredWriteBehindStore(Configuration config, + OperationsCodec codec, + ChainResolver resolver, + TimeSource timeSource, + CacheLoaderWriter loaderWriter, + ExecutorService executorService, + StoreEventDispatcher storeEventDispatcher, StatisticsService statisticsService) { + super(config, codec, resolver, timeSource, storeEventDispatcher, statisticsService); + this.cacheLoaderWriter = loaderWriter; + this.clusteredWriteBehind = new ClusteredWriteBehind<>(this, executorService, + resolver, + this.cacheLoaderWriter, + codec); + } + + + ServerStoreProxy.ChainEntry lock(long hash) throws TimeoutException { + return ((LockingServerStoreProxy) storeProxy).lock(hash); + } + + void unlock(long hash, boolean localOnly) throws TimeoutException { + ((LockingServerStoreProxy) storeProxy).unlock(hash, localOnly); + } + + void replaceAtHead(long key, Chain expected, Chain replacement) { + storeProxy.replaceAtHead(key, expected, replacement); + } + + @Override + protected ValueHolder getInternal(K key) throws StoreAccessException, TimeoutException { + try { + ServerStoreProxy.ChainEntry chain = storeProxy.get(extractLongKey(key)); + /* + * XXX : This condition is wrong... it should be "are there any entries for this key in the chain" + * Most sensible fix I can think of right now would be to push the cacheLoaderWriter access in to the chain + * resolver. + */ + if (!chain.isEmpty()) { + return resolver.resolve(chain, key, timeSource.getTimeMillis(), Integer.MAX_VALUE); + } else { + long hash = extractLongKey(key); + lock(hash); + try { + V value; + try { + value = cacheLoaderWriter.load(key); + } catch (Exception e) { + throw new RuntimeException(e); + } + if (value == null) { + return null; + } + append(key, value); + return new ClusteredValueHolder<>(value); + } finally { + unlock(hash, false); + } + } + } catch (RuntimeException re) { + throw handleException(re); + } + } + + private void append(K key, V value) throws TimeoutException { + PutOperation operation = new PutOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + storeProxy.append(extractedKey, payload); + } + + @Override + protected void silentPut(final K key, final V value) throws StoreAccessException { + try { + PutWithWriterOperation operation = new PutWithWriterOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + storeProxy.append(extractedKey, payload); + } catch (Exception re) { + throw handleException(re); + } + } + + @Override + protected ValueHolder silentGetAndPut(K key, V value) throws StoreAccessException { + try { + PutWithWriterOperation operation = new PutWithWriterOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + final ServerStoreProxy.ChainEntry chain = storeProxy.getAndAppend(extractedKey, payload); + return resolver.resolve(chain, key, timeSource.getTimeMillis(), Integer.MAX_VALUE); + } catch (Exception re) { + throw handleException(re); + } + } + + @Override + protected ValueHolder silentPutIfAbsent(K key, V value) throws StoreAccessException { + try { + PutIfAbsentOperation operation = new PutIfAbsentOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + ServerStoreProxy.ChainEntry chain = storeProxy.getAndAppend(extractedKey, payload); + return resolver.resolve(chain, key, timeSource.getTimeMillis(), Integer.MAX_VALUE); + } catch (Exception re) { + throw handleException(re); + } + } + + @Override + protected ValueHolder silentRemove(K key) throws StoreAccessException { + try { + RemoveOperation operation = new RemoveOperation<>(key, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + ServerStoreProxy.ChainEntry chain = storeProxy.getAndAppend(extractedKey, payload); + return resolver.resolve(chain, key, timeSource.getTimeMillis(), Integer.MAX_VALUE); + } catch (Exception re) { + throw handleException(re); + } + } + + @Override + protected ValueHolder silentRemove(K key, V value) throws StoreAccessException { + try { + ConditionalRemoveOperation operation = new ConditionalRemoveOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + ServerStoreProxy.ChainEntry chain = storeProxy.getAndAppend(extractedKey, payload); + return resolver.resolve(chain, key, timeSource.getTimeMillis(), Integer.MAX_VALUE); + } catch (Exception re) { + throw handleException(re); + } + } + + @Override + protected ValueHolder silentReplace(K key, V value) throws StoreAccessException { + try { + ReplaceOperation operation = new ReplaceOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + ServerStoreProxy.ChainEntry chain = storeProxy.getAndAppend(extractedKey, payload); + return resolver.resolve(chain, key, timeSource.getTimeMillis(), Integer.MAX_VALUE); + } catch (Exception re) { + throw handleException(re); + } + } + + protected ValueHolder silentReplace(K key, V oldValue, V newValue) throws StoreAccessException { + try { + ConditionalReplaceOperation operation = new ConditionalReplaceOperation<>(key, oldValue, newValue, timeSource + .getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + ServerStoreProxy.ChainEntry chain = storeProxy.getAndAppend(extractedKey, payload); + return resolver.resolve(chain, key, timeSource.getTimeMillis(), Integer.MAX_VALUE); + } catch (Exception re) { + throw handleException(re); + } + } + + public class WriteBehindServerCallback implements ServerStoreProxy.ServerCallback { + + private final ServerStoreProxy.ServerCallback delegate; + + WriteBehindServerCallback(ServerStoreProxy.ServerCallback delegate) { + this.delegate = delegate; + } + + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + this.delegate.onInvalidateHash(hash, evictedChain); + } + + @Override + public void onInvalidateAll() { + this.delegate.onInvalidateAll(); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + this.delegate.onAppend(beforeAppend, appended); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + this.delegate.compact(chain); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain, long hash) { + clusteredWriteBehind.flushWriteBehindQueue(chain, hash); + } + } + + private ServerStoreProxy.ServerCallback getWriteBehindServerCallback(ServerStoreProxy.ServerCallback delegate) { + return new WriteBehindServerCallback(delegate); + } + + /** + * Provider of {@link ClusteredWriteBehindStore} instances. + */ + @ServiceDependencies({ TimeSourceService.class, ClusteringService.class}) + public static class Provider extends ClusteredLoaderWriterStore.Provider { + @Override + protected ClusteredStore createStore(Configuration storeConfig, + OperationsCodec codec, + ChainResolver resolver, + TimeSource timeSource, + boolean useLoaderInAtomics, + Object[] serviceConfigs) { + WriteBehindConfiguration writeBehindConfiguration = findSingletonAmongst(WriteBehindConfiguration.class, serviceConfigs); + if (writeBehindConfiguration != null) { + ExecutorService executorService = + executionService.getOrderedExecutor(writeBehindConfiguration.getThreadPoolAlias(), + new LinkedBlockingQueue<>()); + StoreEventDispatcher storeEventDispatcher = new DefaultStoreEventDispatcher<>(storeConfig.getDispatcherConcurrency()); + return new ClusteredWriteBehindStore<>(storeConfig, + codec, + resolver, + timeSource, + storeConfig.getCacheLoaderWriter(), + executorService, + storeEventDispatcher, getServiceProvider().getService(StatisticsService.class)); + } + throw new AssertionError(); + } + + @Override + protected ServerStoreProxy.ServerCallback getServerCallback(ClusteredStore clusteredStore) { + if (clusteredStore instanceof ClusteredWriteBehindStore) { + return ((ClusteredWriteBehindStore)clusteredStore).getWriteBehindServerCallback(super.getServerCallback(clusteredStore)); + } + throw new AssertionError(); + } + + @Override + public int rank(Set> resourceTypes, Collection> serviceConfigs) { + int parentRank = super.rank(resourceTypes, serviceConfigs); + if (parentRank == 0 || serviceConfigs.stream().noneMatch(WriteBehindConfiguration.class::isInstance)) { + return 0; + } + return parentRank + 1; + } + + @Override + public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { + int parentRank = super.rankAuthority(authorityResource, serviceConfigs); + if (parentRank == 0 || serviceConfigs.stream().noneMatch(WriteBehindConfiguration.class::isInstance)) { + return 0; + } + return parentRank + 1; + } + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehindStoreProviderFactory.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehindStoreProviderFactory.java new file mode 100644 index 0000000000..434468a148 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehindStoreProviderFactory.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.loaderwriter.writebehind; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; + +@Component +public class ClusteredWriteBehindStoreProviderFactory implements ServiceFactory { + + @Override + public ClusteredWriteBehindStore.Provider create(ServiceCreationConfiguration configuration) { + return new ClusteredWriteBehindStore.Provider(); + } + + @Override + public Class getServiceType() { + return ClusteredWriteBehindStore.Provider.class; + } +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockClient.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockClient.java similarity index 98% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockClient.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockClient.java index 24c9cbe7a8..1c9c6ab212 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockClient.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockClient.java @@ -24,7 +24,6 @@ import org.terracotta.connection.entity.Entity; import org.terracotta.entity.EndpointDelegate; import org.terracotta.entity.EntityClientEndpoint; -import org.terracotta.entity.EntityResponse; import org.terracotta.entity.InvokeFuture; import org.terracotta.entity.MessageCodecException; import org.terracotta.exception.EntityException; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockEntityClientService.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockEntityClientService.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockEntityClientService.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockEntityClientService.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/AbstractClientEntityFactory.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/AbstractClientEntityFactory.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/AbstractClientEntityFactory.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/AbstractClientEntityFactory.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterStateRepository.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterStateRepository.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterStateRepository.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterStateRepository.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierCreationException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierCreationException.java similarity index 92% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierCreationException.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierCreationException.java index 8b7b057912..a66b9d606f 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierCreationException.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierCreationException.java @@ -21,6 +21,8 @@ */ public class ClusterTierCreationException extends ClusterTierException { + private static final long serialVersionUID = 6048350791384030212L; + public ClusterTierCreationException(String message, Throwable cause) { super(message, cause); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierDestructionException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierDestructionException.java similarity index 92% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierDestructionException.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierDestructionException.java index 5d023d2da6..eb16b351e6 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierDestructionException.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierDestructionException.java @@ -21,6 +21,8 @@ */ public class ClusterTierDestructionException extends ClusterTierException { + private static final long serialVersionUID = -7314374512451335435L; + public ClusterTierDestructionException(String message, Throwable cause) { super(message, cause); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierException.java similarity index 92% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierException.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierException.java index e61027fe22..458d1da3d7 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierException.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierException.java @@ -21,6 +21,8 @@ */ public abstract class ClusterTierException extends Exception { + private static final long serialVersionUID = -4057331870606799775L; + public ClusterTierException(String message, Throwable cause) { super(message, cause); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierManagerConfigurationException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierManagerConfigurationException.java similarity index 93% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierManagerConfigurationException.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierManagerConfigurationException.java index 18a3396a30..0c3b012d04 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierManagerConfigurationException.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierManagerConfigurationException.java @@ -21,6 +21,8 @@ */ public class ClusterTierManagerConfigurationException extends ClusterTierException { + private static final long serialVersionUID = 6540327268333174996L; + public ClusterTierManagerConfigurationException(String message, Throwable cause) { super(message, cause); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierReleaseException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierReleaseException.java similarity index 92% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierReleaseException.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierReleaseException.java index d5a28a2bce..a76e707f92 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierReleaseException.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierReleaseException.java @@ -21,6 +21,8 @@ */ public class ClusterTierReleaseException extends ClusterTierException { + private static final long serialVersionUID = -1595496769881016663L; + public ClusterTierReleaseException(String message, Throwable cause) { super(message, cause); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierValidationException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierValidationException.java similarity index 92% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierValidationException.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierValidationException.java index e74145cef5..f6a138ba56 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierValidationException.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusterTierValidationException.java @@ -21,6 +21,8 @@ */ public class ClusterTierValidationException extends ClusterTierException { + private static final long serialVersionUID = 7363986054006535780L; + public ClusterTierValidationException(String message, Throwable cause) { super(message, cause); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredMapException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredMapException.java similarity index 93% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredMapException.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredMapException.java index f4c11c4d4c..02edee8743 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredMapException.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredMapException.java @@ -18,6 +18,8 @@ public class ClusteredMapException extends RuntimeException { + private static final long serialVersionUID = -7486556137969177116L; + public ClusteredMapException(final String message) { super(message); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java similarity index 85% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java index d835ae6407..c02c4bdc99 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java @@ -17,26 +17,27 @@ package org.ehcache.clustered.client.internal.service; import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; -import org.ehcache.clustered.client.internal.service.DefaultClusteringService; import org.ehcache.clustered.client.service.ClusteringService; import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; /** * A factory for creating a {@link ClusteringService} instance. * * @author Clifford W. Johnson */ +@Component @ServiceFactory.RequiresConfiguration public class ClusteringServiceFactory implements ServiceFactory { @Override - public ClusteringService create(final ServiceCreationConfiguration configuration) { + public ClusteringService create(final ServiceCreationConfiguration configuration) { return new DefaultClusteringService((ClusteringServiceConfiguration) configuration); } @Override - public Class getServiceType() { - return ClusteringService.class; + public Class getServiceType() { + return DefaultClusteringService.class; } } diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ConnectionState.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ConnectionState.java new file mode 100644 index 0000000000..d6d92a04f4 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ConnectionState.java @@ -0,0 +1,363 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.service; + +import org.ehcache.CachePersistenceException; +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration.ClientMode; +import org.ehcache.clustered.client.config.Timeouts; +import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntity; +import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityFactory; +import org.ehcache.clustered.client.internal.ClusterTierManagerCreationException; +import org.ehcache.clustered.client.internal.ClusterTierManagerValidationException; +import org.ehcache.clustered.client.internal.ConnectionSource; +import org.ehcache.clustered.client.internal.PerpetualCachePersistenceException; +import org.ehcache.clustered.client.internal.store.ClusterTierClientEntity; +import org.ehcache.clustered.client.service.EntityBusyException; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.exceptions.DestroyInProgressException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.connection.Connection; +import org.terracotta.connection.ConnectionException; +import org.terracotta.connection.ConnectionPropertyNames; +import org.terracotta.exception.ConnectionClosedException; +import org.terracotta.exception.ConnectionShutdownException; +import org.terracotta.exception.EntityAlreadyExistsException; +import org.terracotta.exception.EntityNotFoundException; + +import java.io.IOException; +import java.util.Properties; +import java.util.Random; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.Executor; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicInteger; + +import static java.util.Objects.requireNonNull; + +class ConnectionState { + + private static final Logger LOGGER = LoggerFactory.getLogger(ConnectionState.class); + + private static final String CONNECTION_PREFIX = "Ehcache:"; + + private volatile Executor asyncWorker; + private volatile Connection clusterConnection = null; + private volatile ClusterTierManagerClientEntityFactory entityFactory = null; + private volatile ClusterTierManagerClientEntity entity = null; + + private final AtomicInteger reconnectCounter = new AtomicInteger(); + private final ConcurrentMap clusterTierEntities = new ConcurrentHashMap<>(); + private final Timeouts timeouts; + private final ConnectionSource connectionSource; + private final String entityIdentifier; + private final Properties connectionProperties; + private final ClusteringServiceConfiguration serviceConfiguration; + + private Runnable connectionRecoveryListener = () -> {}; + + ConnectionState(Timeouts timeouts, Properties connectionProperties, ClusteringServiceConfiguration serviceConfiguration) { + this.timeouts = timeouts; + this.connectionSource = serviceConfiguration.getConnectionSource(); + this.entityIdentifier = connectionSource.getClusterTierManager(); + this.connectionProperties = connectionProperties; + connectionProperties.put(ConnectionPropertyNames.CONNECTION_NAME, CONNECTION_PREFIX + entityIdentifier); + connectionProperties.put(ConnectionPropertyNames.CONNECTION_TIMEOUT, Long.toString(timeouts.getConnectionTimeout().toMillis())); + this.serviceConfiguration = serviceConfiguration; + } + + public void setConnectionRecoveryListener(Runnable connectionRecoveryListener) { + this.connectionRecoveryListener = connectionRecoveryListener; + } + + public Connection getConnection() { + return clusterConnection; + } + + public ClusterTierClientEntity getClusterTierClientEntity(String cacheId) { + return clusterTierEntities.get(cacheId); + } + + public ClusterTierManagerClientEntityFactory getEntityFactory() { + return entityFactory; + } + + public ClusterTierManagerClientEntity getEntity() { + return entity; + } + + public ClusterTierClientEntity createClusterTierClientEntity(String cacheId, + ServerStoreConfiguration clientStoreConfiguration, boolean isReconnect) + throws CachePersistenceException { + ClusterTierClientEntity storeClientEntity; + while (true) { + try { + storeClientEntity = entityFactory.fetchOrCreateClusteredStoreEntity(entityIdentifier, cacheId, + clientStoreConfiguration, serviceConfiguration.getClientMode(), isReconnect); + clusterTierEntities.put(cacheId, storeClientEntity); + break; + } catch (EntityNotFoundException e) { + throw new PerpetualCachePersistenceException("Cluster tier proxy '" + cacheId + "' for entity '" + entityIdentifier + "' does not exist.", e); + } catch (ConnectionClosedException | ConnectionShutdownException e) { + LOGGER.info("Disconnected from the server", e); + handleConnectionClosedException(true); + } + } + + return storeClientEntity; + } + + public void removeClusterTierClientEntity(String cacheId) { + clusterTierEntities.remove(cacheId); + } + + public void initClusterConnection(Executor asyncWorker) { + this.asyncWorker = requireNonNull(asyncWorker); + try { + connect(); + } catch (ConnectionClosedException | ConnectionException ex) { + LOGGER.error("Initial connection failed due to", ex); + throw new RuntimeException(ex); + } + } + + private void reconnect() { + while (true) { + try { + try { + //Ensure full closure of existing connection + clusterConnection.close(); + } catch (IOException | ConnectionClosedException | IllegalStateException e) { + LOGGER.debug("Exception closing previous cluster connection", e); + } + connect(); + LOGGER.info("New connection to server is established, reconnect count is {}", reconnectCounter.incrementAndGet()); + break; + } catch (ConnectionClosedException | ConnectionException e) { + LOGGER.error("Re-connection to server failed, trying again", e); + } + } + } + + private void connect() throws ConnectionException { + clusterConnection = connectionSource.connect(connectionProperties); + entityFactory = new ClusterTierManagerClientEntityFactory(clusterConnection, asyncWorker, timeouts); + } + + public void closeConnection() { + Connection conn = clusterConnection; + clusterConnection = null; + if(conn != null) { + try { + conn.close(); + } catch (IOException | ConnectionShutdownException e) { + LOGGER.warn("Error closing cluster connection: " + e); + } + } + } + + private boolean silentDestroyUtil() { + try { + silentDestroy(); + return true; + } catch (ConnectionClosedException | ConnectionShutdownException e) { + LOGGER.info("Disconnected from the server", e); + reconnect(); + return false; + } + } + + private void silentDestroy() { + LOGGER.debug("Found a broken ClusterTierManager - trying to clean it up"); + try { + // Random sleep to enable racing clients to have a window to do the cleanup + Thread.sleep(ThreadLocalRandom.current().nextInt(1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + try { + entityFactory.destroy(entityIdentifier); + } catch (EntityBusyException e) { + // Ignore - we have a racy client + LOGGER.debug("ClusterTierManager {} marked busy when trying to clean it up", entityIdentifier); + } + } + + public void acquireLeadership() { + if (!entityFactory.acquireLeadership(entityIdentifier)) { + entityFactory = null; + closeConnection(); + throw new IllegalStateException("Couldn't acquire cluster-wide maintenance lease"); + } + } + + public void initializeState() throws ClusterTierManagerValidationException { + try { + switch (serviceConfiguration.getClientMode()) { + case CONNECT: + case EXPECTING: + retrieveEntity(); + break; + case AUTO_CREATE: + case AUTO_CREATE_ON_RECONNECT: + autoCreateEntity(); + break; + default: + throw new AssertionError(serviceConfiguration.getClientMode()); + } + } catch (Throwable t) { + entityFactory = null; + closeConnection(); + throw t; + } + } + + private void retrieveEntity() throws ClusterTierManagerValidationException { + try { + entity = entityFactory.retrieve(entityIdentifier, serviceConfiguration.getServerConfiguration()); + } catch (DestroyInProgressException | EntityNotFoundException e) { + throw new IllegalStateException("The cluster tier manager '" + entityIdentifier + "' does not exist." + + " Please review your configuration.", e); + } catch (TimeoutException e) { + throw new RuntimeException("Could not connect to the cluster tier manager '" + entityIdentifier + + "'; retrieve operation timed out", e); + } + } + + public void destroyState(boolean healthyConnection) { + if (entityFactory != null) { + // proactively abandon any acquired read or write locks on a healthy connection + entityFactory.abandonAllHolds(entityIdentifier, healthyConnection); + } + entityFactory = null; + + clusterTierEntities.clear(); + entity = null; + } + + public void destroyAll() throws CachePersistenceException { + LOGGER.info("destroyAll called for cluster tiers on {}", connectionSource); + + while (true) { + try { + entityFactory.destroy(entityIdentifier); + break; + } catch (EntityBusyException e) { + throw new CachePersistenceException("Cannot delete cluster tiers on " + connectionSource, e); + } catch (ConnectionClosedException | ConnectionShutdownException e) { + handleConnectionClosedException(false); + } + } + } + + public void destroy(String name) throws CachePersistenceException { + // will happen when in maintenance mode + while (true) { + if (entity == null) { + try { + entity = entityFactory.retrieve(entityIdentifier, serviceConfiguration.getServerConfiguration()); + } catch (EntityNotFoundException e) { + // No entity on the server, so no need to destroy anything + break; + } catch (TimeoutException e) { + throw new CachePersistenceException("Could not connect to the cluster tier manager '" + entityIdentifier + + "'; retrieve operation timed out", e); + } catch (DestroyInProgressException e) { + if (silentDestroyUtil()) { + // Nothing left to do + break; + } + } catch (ConnectionClosedException | ConnectionShutdownException e) { + reconnect(); + } + } + + try { + if (entity != null) { + entityFactory.destroyClusteredStoreEntity(entityIdentifier, name); + break; + } + } catch (EntityNotFoundException e) { + // Ignore - does not exist, nothing to destroy + LOGGER.debug("Destruction of cluster tier {} failed as it does not exist", name); + break; + } catch (ConnectionClosedException | ConnectionShutdownException e) { + handleConnectionClosedException(false); + } + } + } + + private void autoCreateEntity() throws ClusterTierManagerValidationException, IllegalStateException { + while (true) { + try { + entityFactory.create(entityIdentifier, serviceConfiguration.getServerConfiguration()); + } catch (ClusterTierManagerCreationException e) { + throw new IllegalStateException("Could not create the cluster tier manager '" + entityIdentifier + "'.", e); + } catch (EntityAlreadyExistsException | EntityBusyException e) { + //ignore - entity already exists - try to retrieve + } catch (ConnectionClosedException | ConnectionShutdownException e) { + LOGGER.info("Disconnected from the server", e); + reconnect(); + continue; + } + + try { + entity = entityFactory.retrieve(entityIdentifier, serviceConfiguration.getServerConfiguration()); + break; + } catch (DestroyInProgressException e) { + silentDestroyUtil(); + } catch (EntityNotFoundException e) { + //ignore - loop and try to create + } catch (TimeoutException e) { + throw new RuntimeException("Could not connect to the cluster tier manager '" + entityIdentifier + + "'; retrieve operation timed out", e); + } catch (ConnectionClosedException | ConnectionShutdownException e) { + LOGGER.info("Disconnected from the server", e); + reconnect(); + } + } + + } + + private void handleConnectionClosedException(boolean retrieve) throws ClusterTierManagerValidationException { + while (true) { + try { + destroyState(false); + reconnect(); + if (retrieve) { + if (serviceConfiguration.getClientMode().equals(ClientMode.AUTO_CREATE_ON_RECONNECT)) { + autoCreateEntity(); + } else { + retrieveEntity(); + } + } + connectionRecoveryListener.run(); + break; + } catch (ConnectionClosedException | ConnectionShutdownException e) { + LOGGER.info("Disconnected from the server", e); + } + } + } + + //Only for test + int getReconnectCount() { + return reconnectCounter.get(); + } + +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java new file mode 100644 index 0000000000..eeff2f39c5 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -0,0 +1,386 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.service; + +import org.ehcache.CachePersistenceException; +import org.ehcache.clustered.client.internal.ClusterTierManagerValidationException; +import org.ehcache.clustered.client.internal.PerpetualCachePersistenceException; +import org.ehcache.clustered.client.config.ClusteredResourcePool; +import org.ehcache.clustered.client.config.ClusteredResourceType; +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.internal.loaderwriter.writebehind.ClusteredWriteBehindStore; +import org.ehcache.clustered.client.internal.store.ClusterTierClientEntity; +import org.ehcache.clustered.client.internal.store.EventualServerStoreProxy; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; +import org.ehcache.clustered.client.internal.store.StrongServerStoreProxy; +import org.ehcache.clustered.client.internal.store.lock.LockManager; +import org.ehcache.clustered.client.internal.store.lock.LockingServerStoreProxyImpl; +import org.ehcache.clustered.client.service.ClientEntityFactory; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.client.service.EntityService; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourceType; +import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.service.MaintainableService; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.connection.Connection; +import org.terracotta.connection.entity.Entity; + +import java.util.Collection; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeoutException; +import java.util.stream.Stream; + +/** + * Provides support for accessing server-based cluster services. + */ +public class DefaultClusteringService implements ClusteringService, EntityService { + + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultClusteringService.class); + + static final String CONNECTION_PREFIX = "Ehcache:"; + + private final ClusteringServiceConfiguration configuration; + private final ConcurrentMap knownPersistenceSpaces = new ConcurrentHashMap<>(); + private final ConnectionState connectionState; + + private final Set reconnectSet = ConcurrentHashMap.newKeySet(); + private final Collection connectionRecoveryListeners = new CopyOnWriteArrayList<>(); + + private volatile boolean inMaintenance = false; + private ExecutorService asyncExecutor; + + DefaultClusteringService(ClusteringServiceConfiguration configuration) { + this.configuration = configuration; + Properties properties = configuration.getProperties(); + this.connectionState = new ConnectionState(configuration.getTimeouts(), properties, configuration); + this.connectionState.setConnectionRecoveryListener(() -> connectionRecoveryListeners.forEach(Runnable::run)); + } + + @Override + public void addConnectionRecoveryListener(Runnable runnable) { + connectionRecoveryListeners.add(runnable); + } + + @Override + public void removeConnectionRecoveryListener(Runnable runnable) { + connectionRecoveryListeners.remove(runnable); + } + + @Override + public ClusteringServiceConfiguration getConfiguration() { + return this.configuration; + } + + @Override + public ClientEntityFactory newClientEntityFactory(String entityIdentifier, Class entityType, long entityVersion, C configuration) { + return new AbstractClientEntityFactory(entityIdentifier, entityType, entityVersion, configuration) { + @Override + protected Connection getConnection() { + if (!isConnected()) { + throw new IllegalStateException(getClass().getSimpleName() + " not started."); + } + return connectionState.getConnection(); + } + }; + } + + @Override + public boolean isConnected() { + return connectionState.getConnection() != null; + } + + @Override + public void start(final ServiceProvider serviceProvider) { + try { + asyncExecutor = createAsyncWorker(); + connectionState.initClusterConnection(asyncExecutor); + connectionState.initializeState(); + } catch (ClusterTierManagerValidationException e) { + throw new RuntimeException(e); + } + } + + @Override + public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { + asyncExecutor = createAsyncWorker(); + connectionState.initClusterConnection(asyncExecutor); + if(maintenanceScope == MaintenanceScope.CACHE_MANAGER) { + connectionState.acquireLeadership(); + } + inMaintenance = true; + } + + @Override + public void stop() { + LOGGER.info("Closing connection to cluster {}", configuration.getConnectionSource()); + + /* + * Entity close() operations must *not* be called; if the server connection is disconnected, the entity + * close operations will stall attempting to communicate with the server. (EntityClientEndpointImpl.close() + * calls a "closeHook" method provided by ClientEntityManagerImpl which ultimately winds up in + * InFlightMessage.waitForAcks -- a method that can wait forever.) Theoretically, the connection close will + * take care of server-side cleanup in the event the server is connected. + */ + connectionState.destroyState(true); + inMaintenance = false; + asyncExecutor.shutdown(); + connectionState.closeConnection(); + } + + @Override + public void destroyAll() throws CachePersistenceException { + if (!inMaintenance) { + throw new IllegalStateException("Maintenance mode required"); + } + connectionState.destroyAll(); + } + + @Override + public boolean handlesResourceType(ResourceType resourceType) { + return Stream.of(ClusteredResourceType.Types.values()).anyMatch(t -> t.equals(resourceType)); + } + + @Override + public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration config) { + ClusteredSpace clusteredSpace = knownPersistenceSpaces.get(name); + if(clusteredSpace != null) { + return clusteredSpace.identifier; + } else { + ClusteredCacheIdentifier cacheIdentifier = new DefaultClusterCacheIdentifier(name); + clusteredSpace = knownPersistenceSpaces.putIfAbsent(name, new ClusteredSpace(cacheIdentifier)); + if(clusteredSpace == null) { + return cacheIdentifier; + } else { + return clusteredSpace.identifier; + } + } + } + + @Override + public void releasePersistenceSpaceIdentifier(PersistenceSpaceIdentifier identifier) throws CachePersistenceException { + ClusteredCacheIdentifier clusterCacheIdentifier = (ClusteredCacheIdentifier) identifier; + if (knownPersistenceSpaces.remove(clusterCacheIdentifier.getId()) == null) { + throw new PerpetualCachePersistenceException("Unknown identifier: " + clusterCacheIdentifier); + } + } + + @Override + public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { + ClusteredCacheIdentifier clusterCacheIdentifier = (ClusteredCacheIdentifier) identifier; + ClusteredSpace clusteredSpace = knownPersistenceSpaces.get(clusterCacheIdentifier.getId()); + if (clusteredSpace == null) { + throw new PerpetualCachePersistenceException("Clustered space not found for identifier: " + clusterCacheIdentifier); + } + ConcurrentMap stateRepositories = clusteredSpace.stateRepositories; + ClusterStateRepository currentRepo = stateRepositories.get(name); + if(currentRepo != null) { + return currentRepo; + } else { + ClusterStateRepository newRepo = new ClusterStateRepository(clusterCacheIdentifier, name, + connectionState.getClusterTierClientEntity(clusterCacheIdentifier.getId())); + currentRepo = stateRepositories.putIfAbsent(name, newRepo); + if (currentRepo == null) { + return newRepo; + } else { + return currentRepo; + } + } + } + + private void checkStarted() { + if(!isStarted()) { + throw new IllegalStateException(getClass().getName() + " should be started to call destroy"); + } + } + + @Override + public void destroy(String name) throws CachePersistenceException { + checkStarted(); + connectionState.destroy(name); + } + + private boolean isStarted() { + return connectionState.getEntityFactory() != null; + } + + @Override + public ServerStoreProxy getServerStoreProxy(ClusteredCacheIdentifier cacheIdentifier, + Store.Configuration storeConfig, + Consistency configuredConsistency, + ServerCallback invalidation) throws CachePersistenceException { + final String cacheId = cacheIdentifier.getId(); + + if (configuredConsistency == null) { + throw new NullPointerException("Consistency cannot be null"); + } + + /* + * This method is expected to be called with exactly ONE ClusteredResourcePool specified. + */ + ClusteredResourcePool clusteredResourcePool = null; + for (ClusteredResourceType type : ClusteredResourceType.Types.values()) { + ClusteredResourcePool pool = storeConfig.getResourcePools().getPoolForResource(type); + if (pool != null) { + if (clusteredResourcePool != null) { + throw new IllegalStateException("At most one clustered resource supported for a cache"); + } + clusteredResourcePool = pool; + } + } + if (clusteredResourcePool == null) { + throw new IllegalStateException("A clustered resource is required for a clustered cache"); + } + + ServerStoreConfiguration clientStoreConfiguration = new ServerStoreConfiguration( + clusteredResourcePool.getPoolAllocation(), + storeConfig.getKeyType().getName(), + storeConfig.getValueType().getName(), + (storeConfig.getKeySerializer() == null ? null : storeConfig.getKeySerializer().getClass().getName()), + (storeConfig.getValueSerializer() == null ? null : storeConfig.getValueSerializer().getClass().getName()), + configuredConsistency, storeConfig.getCacheLoaderWriter() != null, + invalidation instanceof ClusteredWriteBehindStore.WriteBehindServerCallback); + + ClusterTierClientEntity storeClientEntity = connectionState.createClusterTierClientEntity(cacheId, clientStoreConfiguration, reconnectSet.remove(cacheId)); + + ServerStoreProxy serverStoreProxy; + switch (configuredConsistency) { + case STRONG: + serverStoreProxy = new StrongServerStoreProxy(cacheId, storeClientEntity, invalidation); + break; + case EVENTUAL: + serverStoreProxy = new EventualServerStoreProxy(cacheId, storeClientEntity, invalidation); + break; + default: + throw new AssertionError("Unknown consistency : " + configuredConsistency); + } + + try { + try { + storeClientEntity.validate(clientStoreConfiguration); + } catch (ClusterTierValidationException e) { + throw new PerpetualCachePersistenceException("Unable to create cluster tier proxy '" + cacheIdentifier.getId() + "' for entity '" + + configuration.getConnectionSource().getClusterTierManager() + "'", e); + } catch (ClusterTierException e) { + throw new CachePersistenceException("Unable to create cluster tier proxy '" + cacheIdentifier.getId() + "' for entity '" + + configuration.getConnectionSource().getClusterTierManager() + "'", e); + } catch (TimeoutException e) { + throw new CachePersistenceException("Unable to create cluster tier proxy '" + cacheIdentifier.getId() + "' for entity '" + + configuration.getConnectionSource().getClusterTierManager() + "'; validate operation timed out", e); + } + } catch (Throwable t) { + try { + serverStoreProxy.close(); + } catch (Throwable u) { + t.addSuppressed(u); + } + throw t; + } + + if (storeConfig.getCacheLoaderWriter() != null) { + LockManager lockManager = new LockManager(storeClientEntity); + serverStoreProxy = new LockingServerStoreProxyImpl(serverStoreProxy, lockManager); + } + + return serverStoreProxy; + } + + @Override + public void releaseServerStoreProxy(ServerStoreProxy storeProxy, boolean isReconnect) { + connectionState.removeClusterTierClientEntity(storeProxy.getCacheId()); + if (!isReconnect) { + storeProxy.close(); + } else { + reconnectSet.add(storeProxy.getCacheId()); + } + } + + /** + * Supplies the identifier to use for identifying a client-side cache to its server counterparts. + */ + private static class DefaultClusterCacheIdentifier implements ClusteredCacheIdentifier { + + private final String id; + + DefaultClusterCacheIdentifier(final String id) { + this.id = id; + } + + @Override + public String getId() { + return this.id; + } + + @Override + public Class getServiceType() { + return ClusteringService.class; + } + + @Override + public String toString() { + return getClass().getSimpleName() + "@" + id; + } + } + + private static class ClusteredSpace { + + private final ClusteredCacheIdentifier identifier; + private final ConcurrentMap stateRepositories; + + ClusteredSpace(final ClusteredCacheIdentifier identifier) { + this.identifier = identifier; + this.stateRepositories = new ConcurrentHashMap<>(); + } + } + + // for test purposes + public ConnectionState getConnectionState() { + return connectionState; + } + + private static ExecutorService createAsyncWorker() { + SecurityManager s = System.getSecurityManager(); + ThreadGroup initialGroup = (s != null) ? s.getThreadGroup() : Thread.currentThread().getThreadGroup(); + return Executors.newSingleThreadExecutor(r -> { + ThreadGroup group = initialGroup; + while (group != null && group.isDestroyed()) { + ThreadGroup parent = group.getParent(); + if (parent == null) { + break; + } else { + group = parent; + } + } + Thread t = new Thread(group, r, "Async DefaultClusteringService Worker"); + t.setDaemon(true); + return t; + }); + } +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ValueCodec.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ValueCodec.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ValueCodec.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ValueCodec.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ValueCodecFactory.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ValueCodecFactory.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ValueCodecFactory.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/service/ValueCodecFactory.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierClientEntity.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierClientEntity.java similarity index 90% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierClientEntity.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierClientEntity.java index b335f247b9..9d4516c6d0 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierClientEntity.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierClientEntity.java @@ -51,12 +51,14 @@ public interface ClusterTierClientEntity extends Entity { void addResponseListener(Class responseType, ResponseListener responseListener); - void setDisconnectionListener(DisconnectionListener disconnectionListener); + void addDisconnectionListener(DisconnectionListener disconnectionListener); - void setReconnectListener(ReconnectListener reconnectListener); + void addReconnectListener(ReconnectListener reconnectListener); + + void enableEvents(boolean enable) throws ClusterException, TimeoutException; interface ResponseListener { - void onResponse(T response); + void onResponse(T response) throws TimeoutException; } interface DisconnectionListener { diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierClientEntityService.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierClientEntityService.java similarity index 88% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierClientEntityService.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierClientEntityService.java index 02ca058014..2829d1dd7f 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierClientEntityService.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierClientEntityService.java @@ -33,7 +33,7 @@ /** * ClusterTierClientEntityService */ -public class ClusterTierClientEntityService implements EntityClientService { +public class ClusterTierClientEntityService implements EntityClientService { private final EntityConfigurationCodec configCodec = new EntityConfigurationCodec(new CommonConfigCodec()); @@ -53,8 +53,9 @@ public ClusterTierEntityConfiguration deserializeConfiguration(byte[] configurat } @Override - public ClusterTierClientEntity create(EntityClientEndpoint endpoint, Void userData) { - return new SimpleClusterTierClientEntity(endpoint); + public ClusterTierClientEntity create(EntityClientEndpoint endpoint, + ClusterTierUserData userData) { + return new SimpleClusterTierClientEntity(endpoint, userData.getTimeouts(), userData.getStoreIdentifier(), userData.getAsyncWorker()); } @Override diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierUserData.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierUserData.java new file mode 100644 index 0000000000..cf5e014c2e --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusterTierUserData.java @@ -0,0 +1,49 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.client.config.Timeouts; + +import java.util.concurrent.Executor; + +/** + * ClusterTierUserData + * + * Additional information passed to client side cluster tier entity. + */ +public class ClusterTierUserData { + private final Timeouts timeouts; + private final String storeIdentifier; + private final Executor asyncWorker; + + public ClusterTierUserData(Timeouts timeouts, String storeIdentifier, Executor asyncWorker) { + this.timeouts = timeouts; + this.storeIdentifier = storeIdentifier; + this.asyncWorker = asyncWorker; + } + + public Timeouts getTimeouts() { + return timeouts; + } + + public String getStoreIdentifier() { + return storeIdentifier; + } + + public Executor getAsyncWorker() { + return asyncWorker; + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java new file mode 100644 index 0000000000..e722a4edcb --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -0,0 +1,1043 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.Cache; +import org.ehcache.CachePersistenceException; +import org.ehcache.clustered.client.config.ClusteredResourcePool; +import org.ehcache.clustered.client.internal.PerpetualCachePersistenceException; +import org.ehcache.clustered.client.config.ClusteredResourceType; +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; +import org.ehcache.clustered.client.internal.store.operations.ChainResolver; +import org.ehcache.clustered.client.internal.store.operations.EternalChainResolver; +import org.ehcache.clustered.client.internal.store.operations.ExpiryChainResolver; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.client.service.ClusteringService.ClusteredCacheIdentifier; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.operations.ConditionalRemoveOperation; +import org.ehcache.clustered.common.internal.store.operations.ConditionalReplaceOperation; +import org.ehcache.clustered.common.internal.store.operations.Operation; +import org.ehcache.clustered.common.internal.store.operations.PutIfAbsentOperation; +import org.ehcache.clustered.common.internal.store.operations.PutOperation; +import org.ehcache.clustered.common.internal.store.operations.RemoveOperation; +import org.ehcache.clustered.common.internal.store.operations.ReplaceOperation; +import org.ehcache.clustered.common.internal.store.operations.TimestampOperation; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.config.ResourceType; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.core.CacheConfigurationChangeListener; +import org.ehcache.core.Ehcache; +import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap; +import org.ehcache.core.events.StoreEventDispatcher; +import org.ehcache.core.events.StoreEventSink; +import org.ehcache.core.spi.service.ExecutionService; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.events.StoreEventFilter; +import org.ehcache.core.spi.store.events.StoreEventListener; +import org.ehcache.core.spi.store.events.StoreEventSource; +import org.ehcache.core.statistics.OperationObserver; +import org.ehcache.core.statistics.OperationStatistic; +import org.ehcache.impl.store.BaseStore; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.core.spi.store.tiering.AuthoritativeTier; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; +import org.ehcache.core.statistics.StoreOperationOutcomes; +import org.ehcache.core.statistics.StoreOperationOutcomes.EvictionOutcome; +import org.ehcache.core.statistics.TierOperationOutcomes; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.store.DefaultStoreEventDispatcher; +import org.ehcache.impl.store.HashUtils; +import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; +import org.ehcache.spi.service.OptionalServiceDependencies; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.ByteBuffer; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; + +import static java.util.Collections.emptyIterator; +import static org.ehcache.core.exceptions.StorePassThroughException.handleException; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; + +/** + * Supports a {@link Store} in a clustered environment. + */ +public class ClusteredStore extends BaseStore implements AuthoritativeTier { + + static final String CHAIN_COMPACTION_THRESHOLD_PROP = "ehcache.client.chain.compaction.threshold"; + static final int DEFAULT_CHAIN_COMPACTION_THRESHOLD = 4; + + private final int chainCompactionLimit; + protected final OperationsCodec codec; + protected final ChainResolver resolver; + + protected final TimeSource timeSource; + private final DelegatingStoreEventDispatcher storeEventDispatcher; + + protected volatile ServerStoreProxy storeProxy; + private volatile InvalidationValve invalidationValve; + + private final OperationObserver getObserver; + private final OperationObserver putObserver; + private final OperationObserver removeObserver; + private final OperationObserver putIfAbsentObserver; + private final OperationObserver conditionalRemoveObserver; + private final OperationObserver replaceObserver; + private final OperationObserver conditionalReplaceObserver; + // Needed for JSR-107 compatibility even if unused + private final OperationObserver evictionObserver; + private final OperationObserver getAndFaultObserver; + + + protected ClusteredStore(Configuration config, OperationsCodec codec, ChainResolver resolver, TimeSource timeSource, StoreEventDispatcher storeEventDispatcher, StatisticsService statisticsService) { + super(config, statisticsService); + + this.chainCompactionLimit = Integer.getInteger(CHAIN_COMPACTION_THRESHOLD_PROP, DEFAULT_CHAIN_COMPACTION_THRESHOLD); + this.codec = codec; + this.resolver = resolver; + this.timeSource = timeSource; + this.storeEventDispatcher = new DelegatingStoreEventDispatcher<>(storeEventDispatcher); + + this.getObserver = createObserver("get", StoreOperationOutcomes.GetOutcome.class, true); + this.putObserver = createObserver("put", StoreOperationOutcomes.PutOutcome.class, true); + this.removeObserver = createObserver("remove", StoreOperationOutcomes.RemoveOutcome.class, true); + this.putIfAbsentObserver = createObserver("putIfAbsent", StoreOperationOutcomes.PutIfAbsentOutcome.class, true); + this.conditionalRemoveObserver = createObserver("conditionalRemove", StoreOperationOutcomes.ConditionalRemoveOutcome.class, true); + this.replaceObserver = createObserver("replace", StoreOperationOutcomes.ReplaceOutcome.class, true); + this.conditionalReplaceObserver = createObserver("conditionalReplace", StoreOperationOutcomes.ConditionalReplaceOutcome.class, true); + this.getAndFaultObserver = createObserver("getAndFault", AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class, true); + this.evictionObserver = createObserver("eviction", StoreOperationOutcomes.EvictionOutcome.class, false); + } + + /** + * For tests + */ + protected ClusteredStore(Configuration config, OperationsCodec codec, ChainResolver resolver, ServerStoreProxy proxy, TimeSource timeSource, StoreEventDispatcher storeEventDispatcher, StatisticsService statisticsService) { + this(config, codec, resolver, timeSource, storeEventDispatcher, statisticsService); + this.storeProxy = proxy; + } + + @Override + protected String getStatisticsTag() { + return "Clustered"; + } + + @Override + public ValueHolder get(final K key) throws StoreAccessException { + getObserver.begin(); + ValueHolder value; + try { + value = getInternal(key); + } catch (TimeoutException e) { + getObserver.end(StoreOperationOutcomes.GetOutcome.TIMEOUT); + return null; + } + if(value == null) { + getObserver.end(StoreOperationOutcomes.GetOutcome.MISS); + return null; + } else { + getObserver.end(StoreOperationOutcomes.GetOutcome.HIT); + return value; + } + } + + protected ValueHolder getInternal(K key) throws StoreAccessException, TimeoutException { + try { + ServerStoreProxy.ChainEntry entry = storeProxy.get(extractLongKey(key)); + return resolver.resolve(entry, key, timeSource.getTimeMillis()); + } catch (RuntimeException re) { + throw handleException(re); + } + } + + protected long extractLongKey(K key) { + return HashUtils.intHashToLong(key.hashCode()); + } + + @Override + public boolean containsKey(final K key) throws StoreAccessException { + try { + return getInternal(key) != null; + } catch (TimeoutException e) { + return false; + } + } + + @Override + public PutStatus put(final K key, final V value) throws StoreAccessException { + putObserver.begin(); + silentPut(key, value); + putObserver.end(StoreOperationOutcomes.PutOutcome.PUT); + return PutStatus.PUT; + } + + protected void silentPut(final K key, final V value) throws StoreAccessException { + try { + PutOperation operation = new PutOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + storeProxy.append(extractedKey, payload); + } catch (Exception re) { + throw handleException(re); + } + } + + @Override + public ValueHolder getAndPut(K key, V value) throws StoreAccessException { + putObserver.begin(); + ValueHolder oldValue = silentGetAndPut(key, value); + putObserver.end(StoreOperationOutcomes.PutOutcome.PUT); + return oldValue; + } + + protected ValueHolder silentGetAndPut(final K key, final V value) throws StoreAccessException { + try { + PutOperation operation = new PutOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + ServerStoreProxy.ChainEntry chain = storeProxy.getAndAppend(extractedKey, payload); + return resolver.resolve(chain, key, timeSource.getTimeMillis()); + } catch (Exception re) { + throw handleException(re); + } + } + + @Override + public ValueHolder putIfAbsent(final K key, final V value, Consumer put) throws StoreAccessException { + putIfAbsentObserver.begin(); + ValueHolder result = silentPutIfAbsent(key, value); + if(result == null) { + putIfAbsentObserver.end(StoreOperationOutcomes.PutIfAbsentOutcome.PUT); + return null; + } else { + putIfAbsentObserver.end(StoreOperationOutcomes.PutIfAbsentOutcome.HIT); + return result; + } + } + + protected ValueHolder silentPutIfAbsent(K key, V value) throws StoreAccessException { + try { + PutIfAbsentOperation operation = new PutIfAbsentOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + ServerStoreProxy.ChainEntry chain = storeProxy.getAndAppend(extractedKey, payload); + return resolver.resolve(chain, key, timeSource.getTimeMillis(), chainCompactionLimit); + } catch (Exception re) { + throw handleException(re); + } + } + + @Override + public boolean remove(final K key) throws StoreAccessException { + removeObserver.begin(); + if(silentRemove(key) != null) { + removeObserver.end(StoreOperationOutcomes.RemoveOutcome.REMOVED); + return true; + } else { + removeObserver.end(StoreOperationOutcomes.RemoveOutcome.MISS); + return false; + } + } + + public ValueHolder getAndRemove(K key) throws StoreAccessException { + removeObserver.begin(); + ValueHolder value = silentRemove(key); + if(value != null) { + removeObserver.end(StoreOperationOutcomes.RemoveOutcome.REMOVED); + } else { + removeObserver.end(StoreOperationOutcomes.RemoveOutcome.MISS); + } + return value; + } + + protected ValueHolder silentRemove(final K key) throws StoreAccessException { + try { + RemoveOperation operation = new RemoveOperation<>(key, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + ServerStoreProxy.ChainEntry chain = storeProxy.getAndAppend(extractedKey, payload); + return resolver.resolve(chain, key, timeSource.getTimeMillis()); + } catch (Exception re) { + throw handleException(re); + } + } + + protected ValueHolder silentRemove(K key, V value) throws StoreAccessException { + try { + ConditionalRemoveOperation operation = new ConditionalRemoveOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + ServerStoreProxy.ChainEntry chain = storeProxy.getAndAppend(extractedKey, payload); + return resolver.resolve(chain, key, timeSource.getTimeMillis()); + } catch (Exception re) { + throw handleException(re); + } + } + + @Override + public RemoveStatus remove(final K key, final V value) throws StoreAccessException { + conditionalRemoveObserver.begin(); + ValueHolder result = silentRemove(key, value); + if(result != null) { + if(value.equals(result.get())) { + conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED); + return RemoveStatus.REMOVED; + } else { + conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS); + return RemoveStatus.KEY_PRESENT; + } + } else { + conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS); + return RemoveStatus.KEY_MISSING; + } + } + + @Override + public ValueHolder replace(final K key, final V value) throws StoreAccessException { + replaceObserver.begin(); + + ValueHolder result = silentReplace(key, value); + if(result == null) { + replaceObserver.end(StoreOperationOutcomes.ReplaceOutcome.MISS); + return null; + } else { + replaceObserver.end(StoreOperationOutcomes.ReplaceOutcome.REPLACED); + return result; + } + } + + protected ValueHolder silentReplace(K key, V value) throws StoreAccessException { + try { + ReplaceOperation operation = new ReplaceOperation<>(key, value, timeSource.getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + ServerStoreProxy.ChainEntry chain = storeProxy.getAndAppend(extractedKey, payload); + return resolver.resolve(chain, key, timeSource.getTimeMillis(), chainCompactionLimit); + } catch (Exception re) { + throw handleException(re); + } + } + + protected ValueHolder silentReplace(K key, V oldValue, V newValue) throws StoreAccessException { + try { + ConditionalReplaceOperation operation = new ConditionalReplaceOperation<>(key, oldValue, newValue, timeSource + .getTimeMillis()); + ByteBuffer payload = codec.encode(operation); + long extractedKey = extractLongKey(key); + ServerStoreProxy.ChainEntry chain = storeProxy.getAndAppend(extractedKey, payload); + return resolver.resolve(chain, key, timeSource.getTimeMillis(), chainCompactionLimit); + } catch (Exception re) { + throw handleException(re); + } + } + + @Override + public ReplaceStatus replace(final K key, final V oldValue, final V newValue) throws StoreAccessException { + conditionalReplaceObserver.begin(); + ValueHolder result = silentReplace(key, oldValue, newValue); + if(result != null) { + if(oldValue.equals(result.get())) { + conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED); + return ReplaceStatus.HIT; + } else { + conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS); + return ReplaceStatus.MISS_PRESENT; + } + } else { + conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS); + return ReplaceStatus.MISS_NOT_PRESENT; + } + } + + @Override + public void clear() throws StoreAccessException { + try { + storeProxy.clear(); + } catch (Exception re) { + throw handleException(re); + } + } + + @Override + public StoreEventSource getStoreEventSource() { + return storeEventDispatcher; + } + + @Override + public Iterator>> iterator() { + try { + java.util.Iterator> chainIterator = storeProxy.iterator(); + + return new Iterator>>() { + + private java.util.Iterator>> chain = nextChain(); + + @Override + public boolean hasNext() { + return chain.hasNext() || (chain = nextChain()).hasNext(); + } + + @Override + public Cache.Entry> next() { + try { + return chain.next(); + } catch (NoSuchElementException e) { + return (chain = nextChain()).next(); + } + } + + private java.util.Iterator>> nextChain() { + while (chainIterator.hasNext()) { + Map> chainContents = resolver.resolveAll(chainIterator.next().getValue(), timeSource.getTimeMillis()); + if (!chainContents.isEmpty()) { + return chainContents.entrySet().stream().map(entry -> { + K key = entry.getKey(); + + ValueHolder valueHolder = entry.getValue(); + return new Cache.Entry>() { + + @Override + public K getKey() { + return key; + } + + @Override + public ValueHolder getValue() { + return valueHolder; + } + + @Override + public String toString() { + return getKey() + "=" + getValue(); + } + }; + }).iterator(); + } + } + return emptyIterator(); + } + }; + } catch (Exception e) { + return new Iterator>>() { + + private boolean accessed; + + @Override + public boolean hasNext() { + return !accessed; + } + + @Override + public Cache.Entry> next() throws StoreAccessException { + accessed = true; + throw handleException(e); + } + }; + } + } + + @Override + public ValueHolder getAndCompute(final K key, final BiFunction mappingFunction) { + // TODO: Make appropriate ServerStoreProxy call + throw new UnsupportedOperationException("Implement me"); + } + + @Override + public ValueHolder computeAndGet(final K key, final BiFunction mappingFunction, final Supplier replaceEqual, Supplier invokeWriter) { + // TODO: Make appropriate ServerStoreProxy call + throw new UnsupportedOperationException("Implement me"); + } + + @Override + public ValueHolder computeIfAbsent(final K key, final Function mappingFunction) { + // TODO: Make appropriate ServerStoreProxy call + throw new UnsupportedOperationException("Implement me"); + } + + /** + * The assumption is that this method will be invoked only by cache.putAll and cache.removeAll methods. + */ + @Override + public Map> bulkCompute(final Set keys, final Function>, Iterable>> remappingFunction) + throws StoreAccessException { + Map> valueHolderMap = new HashMap<>(); + if(remappingFunction instanceof Ehcache.PutAllFunction) { + Ehcache.PutAllFunction putAllFunction = (Ehcache.PutAllFunction)remappingFunction; + Map entriesToRemap = putAllFunction.getEntriesToRemap(); + for(Map.Entry entry: entriesToRemap.entrySet()) { + silentPut(entry.getKey(), entry.getValue()); + putAllFunction.getActualPutCount().incrementAndGet(); + valueHolderMap.put(entry.getKey(), new ClusteredValueHolder<>(entry.getValue())); + } + } else if(remappingFunction instanceof Ehcache.RemoveAllFunction) { + Ehcache.RemoveAllFunction removeAllFunction = (Ehcache.RemoveAllFunction)remappingFunction; + for (K key : keys) { + boolean removed = silentRemove(key) != null; + if(removed) { + removeAllFunction.getActualRemoveCount().incrementAndGet(); + } + } + } else { + throw new UnsupportedOperationException("This bulkCompute method is not yet capable of handling generic computation functions"); + } + return valueHolderMap; + } + + @Override + public Map> bulkCompute(final Set keys, final Function>, Iterable>> remappingFunction, final Supplier replaceEqual) { + // TODO: Make appropriate ServerStoreProxy call + throw new UnsupportedOperationException("Implement me"); + } + + /** + * The assumption is that this method will be invoked only by cache.getAll method. + */ + @Override + public Map> bulkComputeIfAbsent(final Set keys, final Function, Iterable>> mappingFunction) + throws StoreAccessException { + if(mappingFunction instanceof Ehcache.GetAllFunction) { + Map> map = new HashMap<>(); + for (K key : keys) { + ValueHolder value; + try { + value = getInternal(key); + } catch (TimeoutException e) { + // This timeout handling is safe **only** in the context of a get/read operation! + value = null; + } + map.put(key, value); + } + return map; + } else { + throw new UnsupportedOperationException("This bulkComputeIfAbsent method is not yet capable of handling generic computation functions"); + } + } + + @Override + public List getConfigurationChangeListeners() { + // TODO: Make appropriate ServerStoreProxy call + return Collections.emptyList(); + } + + @Override + public ValueHolder getAndFault(K key) throws StoreAccessException { + getAndFaultObserver.begin(); + ValueHolder value; + try { + value = getInternal(key); + } catch (TimeoutException e) { + getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT); + return null; + } + if(value == null) { + getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); + return null; + } else { + getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT); + return value; + } + } + + @Override + public ValueHolder computeIfAbsentAndFault(K key, Function mappingFunction) throws StoreAccessException { + return computeIfAbsent(key, mappingFunction); + } + + @Override + public boolean flush(K key, ValueHolder valueHolder) { + // TODO wire this once metadata are maintained + return true; + } + + @Override + public void setInvalidationValve(InvalidationValve valve) { + this.invalidationValve = valve; + } + + /** + * Provider of {@link ClusteredStore} instances. + */ + @ServiceDependencies({TimeSourceService.class, ClusteringService.class}) + @OptionalServiceDependencies("org.ehcache.core.spi.service.StatisticsService") + public static class Provider extends BaseStoreProvider implements AuthoritativeTier.Provider { + + private static final Logger LOGGER = LoggerFactory.getLogger(Provider.class); + + private static final Set> CLUSTER_RESOURCES; + static { + Set> resourceTypes = new HashSet<>(); + Collections.addAll(resourceTypes, ClusteredResourceType.Types.values()); + CLUSTER_RESOURCES = Collections.unmodifiableSet(resourceTypes); + } + + private volatile ClusteringService clusteringService; + protected volatile ExecutionService executionService; + + private final Lock connectLock = new ReentrantLock(); + private final Map, StoreConfig> createdStores = new ConcurrentWeakIdentityHashMap<>(); + private final Map, OperationStatistic[]> tierOperationStatistics = new ConcurrentWeakIdentityHashMap<>(); + + @Override + @SuppressWarnings("unchecked") + protected ClusteredResourceType getResourceType() { + return ClusteredResourceType.Types.UNKNOWN; + } + + @Override + public ClusteredStore createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + ClusteredStore store = createStoreInternal(storeConfig, serviceConfigs); + + tierOperationStatistics.put(store, new OperationStatistic[] { + createTranslatedStatistic(store, "get", TierOperationOutcomes.GET_TRANSLATION, "get"), + createTranslatedStatistic(store, "eviction", TierOperationOutcomes.EVICTION_TRANSLATION, "eviction") + }); + + return store; + } + + private ClusteredStore createStoreInternal(Configuration storeConfig, Object[] serviceConfigs) { + connectLock.lock(); + try { + if (clusteringService == null) { + throw new IllegalStateException(Provider.class.getCanonicalName() + ".createStore called without ClusteringServiceConfiguration"); + } + + HashSet> clusteredResourceTypes = + new HashSet<>(storeConfig.getResourcePools().getResourceTypeSet()); + clusteredResourceTypes.retainAll(CLUSTER_RESOURCES); + + if (clusteredResourceTypes.isEmpty()) { + throw new IllegalStateException(Provider.class.getCanonicalName() + ".createStore called without ClusteredResourcePools"); + } + if (clusteredResourceTypes.size() != 1) { + throw new IllegalStateException(Provider.class.getCanonicalName() + ".createStore can not create clustered tier with multiple clustered resources"); + } + + ClusteredStoreConfiguration clusteredStoreConfiguration = findSingletonAmongst(ClusteredStoreConfiguration.class, serviceConfigs); + if (clusteredStoreConfiguration == null) { + clusteredStoreConfiguration = new ClusteredStoreConfiguration(); + } + ClusteredCacheIdentifier cacheId = findSingletonAmongst(ClusteredCacheIdentifier.class, serviceConfigs); + + TimeSource timeSource = getServiceProvider().getService(TimeSourceService.class).getTimeSource(); + + OperationsCodec codec = new OperationsCodec<>(storeConfig.getKeySerializer(), storeConfig.getValueSerializer()); + + ChainResolver resolver; + ExpiryPolicy expiry = storeConfig.getExpiry(); + if (ExpiryPolicyBuilder.noExpiration().equals(expiry)) { + resolver = new EternalChainResolver<>(codec); + } else { + resolver = new ExpiryChainResolver<>(codec, expiry); + } + + ClusteredStore store = createStore(storeConfig, codec, resolver, timeSource, storeConfig.useLoaderInAtomics(), serviceConfigs); + + createdStores.put(store, new StoreConfig(cacheId, storeConfig, clusteredStoreConfiguration.getConsistency())); + return store; + } finally { + connectLock.unlock(); + } + } + + protected ClusteredStore createStore(Configuration storeConfig, + OperationsCodec codec, + ChainResolver resolver, + TimeSource timeSource, + boolean useLoaderInAtomics, + Object[] serviceConfigs) { + StoreEventDispatcher storeEventDispatcher = new DefaultStoreEventDispatcher<>(storeConfig.getDispatcherConcurrency()); + return new ClusteredStore<>(storeConfig, codec, resolver, timeSource, storeEventDispatcher, getServiceProvider().getService(StatisticsService.class)); + } + + @Override + public void releaseStore(Store resource) { + connectLock.lock(); + try { + if (createdStores.remove(resource) == null) { + throw new IllegalArgumentException("Given clustered tier is not managed by this provider : " + resource); + } + ClusteredStore clusteredStore = (ClusteredStore) resource; + this.clusteringService.releaseServerStoreProxy(clusteredStore.storeProxy, false); + getStatisticsService().ifPresent(s -> s.cleanForNode(clusteredStore)); + tierOperationStatistics.remove(clusteredStore); + } finally { + connectLock.unlock(); + } + } + + @Override + public void initStore(Store resource) { + try { + initStoreInternal(resource); + } catch (CachePersistenceException e) { + throw new RuntimeException(e); + } + } + + private void initStoreInternal(Store resource) throws CachePersistenceException { + connectLock.lock(); + try { + StoreConfig storeConfig = createdStores.get(resource); + if (storeConfig == null) { + throw new IllegalArgumentException("Given clustered tier is not managed by this provider : " + resource); + } + ClusteredStore clusteredStore = (ClusteredStore) resource; + ClusteredCacheIdentifier cacheIdentifier = storeConfig.getCacheIdentifier(); + ServerStoreProxy storeProxy = clusteringService.getServerStoreProxy(cacheIdentifier, storeConfig.getStoreConfig(), storeConfig.getConsistency(), + getServerCallback(clusteredStore)); + ReconnectingServerStoreProxy reconnectingServerStoreProxy = new ReconnectingServerStoreProxy(storeProxy, () -> { + Runnable reconnectTask = () -> { + String cacheId = cacheIdentifier.getId(); + connectLock.lock(); + try { + try { + //TODO: handle race between disconnect event and connection closed exception being thrown + // this guy should wait till disconnect event processing is complete. + LOGGER.info("Cache {} got disconnected from cluster, reconnecting", cacheId); + clusteringService.releaseServerStoreProxy(clusteredStore.storeProxy, true); + initStoreInternal(clusteredStore); + LOGGER.info("Cache {} got reconnected to cluster", cacheId); + } catch (PerpetualCachePersistenceException t) { + LOGGER.error("Cache {} failed reconnecting to cluster (failure is perpetual)", cacheId, t); + clusteredStore.setStoreProxy(new FailedReconnectStoreProxy(cacheId, t)); + } + } catch (CachePersistenceException e) { + throw new RuntimeException(e); + } finally { + connectLock.unlock(); + } + }; + CompletableFuture.runAsync(reconnectTask, executionService.getUnorderedExecutor(null, new LinkedBlockingQueue<>())); + }); + clusteredStore.setStoreProxy(reconnectingServerStoreProxy); + + Serializer keySerializer = clusteredStore.codec.getKeySerializer(); + if (keySerializer instanceof StatefulSerializer) { + StateRepository stateRepository = clusteringService.getStateRepositoryWithin(cacheIdentifier, cacheIdentifier.getId() + "-Key"); + ((StatefulSerializer) keySerializer).init(stateRepository); + } + Serializer valueSerializer = clusteredStore.codec.getValueSerializer(); + if (valueSerializer instanceof StatefulSerializer) { + StateRepository stateRepository = clusteringService.getStateRepositoryWithin(cacheIdentifier, cacheIdentifier.getId() + "-Value"); + ((StatefulSerializer) valueSerializer).init(stateRepository); + } + } finally { + connectLock.unlock(); + } + } + + protected ServerCallback getServerCallback(ClusteredStore clusteredStore) { + return new ServerCallback() { + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + StoreEventSink sink = clusteredStore.storeEventDispatcher.eventSink(); + try { + Operation operation = clusteredStore.codec.decode(appended); + K key = operation.getKey(); + + PutOperation resolvedBefore = clusteredStore.resolver.resolve(beforeAppend, key); + PutOperation resolvedNow = clusteredStore.resolver.applyOperation(key, resolvedBefore, + new TimestampOperation<>(key, operation.timeStamp())); + PutOperation resolvedAfter = clusteredStore.resolver.applyOperation(key, resolvedNow, operation); + + /* + * If the old value was expired then we *must* fire expiry before the other event + */ + if (resolvedBefore != null && resolvedNow == null) { + sink.expired(key, resolvedBefore::getValue); + } + + if (resolvedNow == null && resolvedAfter != null) { + sink.created(key, resolvedAfter.getValue()); + } else if (resolvedNow != null && resolvedAfter == null) { + sink.removed(key, resolvedNow::getValue); + } else if (resolvedAfter != resolvedNow) { + sink.updated(key, resolvedNow::getValue, resolvedAfter.getValue()); + } + clusteredStore.storeEventDispatcher.releaseEventSink(sink); + } catch (Exception e) { + clusteredStore.storeEventDispatcher.releaseEventSinkAfterFailure(sink, e); + LOGGER.warn("Error processing server append event", e); + } + } + + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + EvictionOutcome result = EvictionOutcome.SUCCESS; + clusteredStore.evictionObserver.begin(); + if (clusteredStore.invalidationValve != null) { + try { + LOGGER.debug("CLIENT: calling invalidation valve for hash {}", hash); + clusteredStore.invalidationValve.invalidateAllWithHash(hash); + } catch (StoreAccessException sae) { + //TODO: what should be done here? delegate to resilience strategy? + LOGGER.error("Error invalidating hash {}", hash, sae); + result = EvictionOutcome.FAILURE; + } + } + if (evictedChain != null) { + StoreEventSink sink = clusteredStore.storeEventDispatcher.eventSink(); + Map> operationMap = clusteredStore.resolver.resolveAll(evictedChain); + long now = clusteredStore.timeSource.getTimeMillis(); + for (Map.Entry> entry : operationMap.entrySet()) { + K key = entry.getKey(); + ValueHolder valueHolder = entry.getValue(); + if (valueHolder.isExpired(now)) { + sink.expired(key, valueHolder); + } else { + sink.evicted(key, valueHolder); + } + } + clusteredStore.storeEventDispatcher.releaseEventSink(sink); + } + clusteredStore.evictionObserver.end(result); + } + + @Override + public void onInvalidateAll() { + if (clusteredStore.invalidationValve != null) { + try { + LOGGER.debug("CLIENT: calling invalidation valve for all"); + clusteredStore.invalidationValve.invalidateAll(); + } catch (StoreAccessException sae) { + //TODO: what should be done here? delegate to resilience strategy? + LOGGER.error("Error invalidating all", sae); + } + } + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + clusteredStore.resolver.compact(chain); + } + }; + } + + @Override + public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { + if (clusteringService == null || resourceTypes.size() > 1 || Collections.disjoint(resourceTypes, CLUSTER_RESOURCES)) { + // A ClusteredStore requires a ClusteringService *and* ClusteredResourcePool instances + return 0; + } + return 1; + } + + @Override + public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { + if (clusteringService == null) { + return 0; + } else { + return CLUSTER_RESOURCES.contains(authorityResource) ? 1 : 0; + } + } + + @Override + public void start(final ServiceProvider serviceProvider) { + connectLock.lock(); + try { + super.start(serviceProvider); + this.clusteringService = getServiceProvider().getService(ClusteringService.class); + this.executionService = getServiceProvider().getService(ExecutionService.class); + } finally { + connectLock.unlock(); + } + } + + @Override + public void stop() { + connectLock.lock(); + try { + createdStores.clear(); + } finally { + connectLock.unlock(); + super.stop(); + } + } + + @Override + public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + ClusteredStore authoritativeTier = createStoreInternal(storeConfig, serviceConfigs); + + tierOperationStatistics.put(authoritativeTier, new OperationStatistic[] { + createTranslatedStatistic(authoritativeTier, "get", TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "getAndFault"), + createTranslatedStatistic(authoritativeTier, "eviction", TierOperationOutcomes.EVICTION_TRANSLATION, "eviction") + }); + + return authoritativeTier; + } + + @Override + public void releaseAuthoritativeTier(AuthoritativeTier resource) { + releaseStore(resource); + } + + @Override + public void initAuthoritativeTier(AuthoritativeTier resource) { + initStore(resource); + } + + } + + private void setStoreProxy(ServerStoreProxy storeProxy) throws CachePersistenceException { + // don't change the order of the following two lines or you'll create a race condition that can + // make the server drop some event notifications during a client reconnection + this.storeEventDispatcher.setStoreProxy(storeProxy); + this.storeProxy = storeProxy; + } + + private static class StoreConfig { + + private final ClusteredCacheIdentifier cacheIdentifier; + private final Store.Configuration storeConfig; + private final Consistency consistency; + + StoreConfig(ClusteredCacheIdentifier cacheIdentifier, Configuration storeConfig, Consistency consistency) { + this.cacheIdentifier = cacheIdentifier; + this.storeConfig = storeConfig; + this.consistency = consistency; + } + + public Configuration getStoreConfig() { + return this.storeConfig; + } + + public ClusteredCacheIdentifier getCacheIdentifier() { + return this.cacheIdentifier; + } + + public Consistency getConsistency() { + return consistency; + } + } + + static class DelegatingStoreEventDispatcher implements StoreEventDispatcher { + private int listenerCounter; + private ServerStoreProxy storeProxy; // protected by synchronized blocks + private final StoreEventDispatcher delegate; + + DelegatingStoreEventDispatcher(StoreEventDispatcher delegate) { + this.delegate = delegate; + } + + synchronized void setStoreProxy(ServerStoreProxy storeProxy) throws CachePersistenceException { + if (storeProxy != null && listenerCounter > 0) { + try { + storeProxy.enableEvents(true); + } catch (TimeoutException te) { + throw new CachePersistenceException("Error enabling events", te); + } + } + this.storeProxy = storeProxy; + } + + @Override + public StoreEventSink eventSink() { + return delegate.eventSink(); + } + @Override + public void releaseEventSink(StoreEventSink eventSink) { + delegate.releaseEventSink(eventSink); + } + @Override + public void releaseEventSinkAfterFailure(StoreEventSink eventSink, Throwable throwable) { + delegate.releaseEventSinkAfterFailure(eventSink, throwable); + } + @Override + public void reset(StoreEventSink eventSink) { + delegate.reset(eventSink); + } + @Override + public synchronized void addEventListener(StoreEventListener eventListener) { + if (listenerCounter == 0 && storeProxy != null) { + try { + storeProxy.enableEvents(true); + } catch (TimeoutException te) { + throw new RuntimeException("Error enabling events", te); + } + } + if (listenerCounter < Integer.MAX_VALUE) { + listenerCounter++; + } + delegate.addEventListener(eventListener); + } + @Override + public synchronized void removeEventListener(StoreEventListener eventListener) { + if (listenerCounter == 1 && storeProxy != null) { + try { + storeProxy.enableEvents(false); + } catch (TimeoutException te) { + throw new RuntimeException("Error disabling events", te); + } + } + if (listenerCounter > 0) { + listenerCounter--; + } + delegate.removeEventListener(eventListener); + } + @Override + public void addEventFilter(StoreEventFilter eventFilter) { + delegate.addEventFilter(eventFilter); + } + @Override + public void setEventOrdering(boolean ordering) throws IllegalArgumentException { + delegate.setEventOrdering(ordering); + } + + @Override + public void setSynchronous(boolean synchronous) throws IllegalArgumentException { + if (synchronous) { + throw new IllegalArgumentException("Synchronous CacheEventListener is not supported with clustered tiers"); + } else { + delegate.setSynchronous(synchronous); + } + } + + @Override + public boolean isEventOrdering() { + return delegate.isEventOrdering(); + } + } + +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderFactory.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderFactory.java similarity index 85% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderFactory.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderFactory.java index 2f7733ae4d..a141224543 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderFactory.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderFactory.java @@ -18,18 +18,20 @@ import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; /** * Factory to create instances of {@link ClusteredStore.Provider}. */ +@Component public class ClusteredStoreProviderFactory implements ServiceFactory { @Override - public ClusteredStore.Provider create(final ServiceCreationConfiguration configuration) { + public ClusteredStore.Provider create(final ServiceCreationConfiguration configuration) { return new ClusteredStore.Provider(); } @Override - public Class getServiceType() { + public Class getServiceType() { return ClusteredStore.Provider.class; } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredValueHolder.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredValueHolder.java similarity index 87% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredValueHolder.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredValueHolder.java index f11db85e8b..9b6c21d411 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredValueHolder.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredValueHolder.java @@ -22,8 +22,6 @@ public class ClusteredValueHolder extends AbstractValueHolder { - public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; - private final V value; public ClusteredValueHolder(V value) { @@ -39,12 +37,7 @@ public ClusteredValueHolder(V value, long expirationTime) { } @Override - protected TimeUnit nativeTimeUnit() { - return TIME_UNIT; - } - - @Override - public V value() { + public V get() { return value; } } diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java new file mode 100644 index 0000000000..81ef2c294c --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java @@ -0,0 +1,304 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.common.internal.exceptions.ClusterException; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ClientInvalidateAll; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ClientInvalidateHash; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ServerAppend; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ServerInvalidateHash; +import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheResponseType; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.AppendMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAck; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAllAck; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetAndAppendMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ReplaceAtHeadMessage; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.config.units.MemoryUnit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.ByteBuffer; +import java.util.Iterator; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.TimeoutException; + +import static java.lang.Math.toIntExact; +import static java.util.Objects.requireNonNull; + +/** + * Provides client-side access to the services of a {@code ServerStore}. + */ +class CommonServerStoreProxy implements ServerStoreProxy { + + private static final int ITERATOR_BATCH_SIZE = toIntExact(MemoryUnit.KB.toBytes(100)); + + private static final Logger LOGGER = LoggerFactory.getLogger(CommonServerStoreProxy.class); + + private final String cacheId; + private final ClusterTierClientEntity entity; + + CommonServerStoreProxy(final String cacheId, final ClusterTierClientEntity entity, final ServerCallback invalidation) { + this.cacheId = requireNonNull(cacheId, "Cache-ID must be non-null"); + this.entity = requireNonNull(entity, "ClusterTierClientEntity must be non-null"); + requireNonNull(invalidation, "ServerCallback must be non-null"); + + entity.addDisconnectionListener(invalidation::onInvalidateAll); + + entity.addResponseListener(ServerAppend.class, response -> { + LOGGER.debug("CLIENT: on cache {}, server append notification", cacheId); + invalidation.onAppend(response.getBeforeAppend(), response.getAppended()); + }); + entity.addResponseListener(ServerInvalidateHash.class, response -> { + long key = response.getKey(); + Chain evictedChain = response.getEvictedChain(); + LOGGER.debug("CLIENT: on cache {}, server requesting hash {} to be invalidated (evicted chain : {})", cacheId, key, evictedChain); + invalidation.onInvalidateHash(key, evictedChain); + }); + entity.addResponseListener(ClientInvalidateHash.class, response -> { + long key = response.getKey(); + int invalidationId = response.getInvalidationId(); + + LOGGER.debug("CLIENT: doing work to invalidate hash {} from cache {} (ID {})", key, cacheId, invalidationId); + // evicted chain is always null: ClientInvalidateHash is fired when another client did an append, not when the server evicted + invalidation.onInvalidateHash(key, null); + + try { + LOGGER.debug("CLIENT: ack'ing invalidation of hash {} from cache {} (ID {})", key, cacheId, invalidationId); + entity.invokeAndWaitForSend(new ClientInvalidationAck(key, invalidationId), false); + } catch (ClusterException e) { + LOGGER.error("error acking client invalidation of hash {} on cache {}", key, cacheId, e); + } + }); + entity.addResponseListener(ClientInvalidateAll.class, response -> { + int invalidationId = response.getInvalidationId(); + + LOGGER.debug("CLIENT: doing work to invalidate all from cache {} (ID {})", cacheId, invalidationId); + invalidation.onInvalidateAll(); + + try { + LOGGER.debug("CLIENT: ack'ing invalidation of all from cache {} (ID {})", cacheId, invalidationId); + entity.invokeAndWaitForSend(new ClientInvalidationAllAck(invalidationId), false); + } catch (ClusterException e) { + LOGGER.error("error acking client invalidation of all on cache {}", cacheId, e); + } + }); + entity.addResponseListener(EhcacheEntityResponse.ResolveRequest.class, response -> { + invalidation.compact(new SimpleEntry(response.getKey(), response.getChain()), response.getKey()); + }); + } + + @Override + public String getCacheId() { + return cacheId; + } + + void addResponseListener(Class listenerClass, SimpleClusterTierClientEntity.ResponseListener listener) { + entity.addResponseListener(listenerClass, listener); + } + + @SuppressWarnings("unchecked") + @Override + public void close() { + entity.close(); + } + + @Override + public ChainEntry get(long key) throws TimeoutException { + EhcacheEntityResponse response; + try { + response = entity.invokeAndWaitForComplete(new GetMessage(key), false); + } catch (TimeoutException e) { + throw e; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + if (response != null && response.getResponseType() == EhcacheResponseType.GET_RESPONSE) { + return new SimpleEntry(key, ((EhcacheEntityResponse.GetResponse)response).getChain()); + } else { + throw new ServerStoreProxyException("Response for get operation was invalid : " + + (response != null ? response.getResponseType() : "null message")); + } + } + + @Override + public void append(long key, ByteBuffer payLoad) { + try { + entity.invokeAndWaitForReceive(new AppendMessage(key, payLoad), true); + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + } + + @Override + public ChainEntry getAndAppend(long key, ByteBuffer payLoad) throws TimeoutException { + EhcacheEntityResponse response; + try { + response = entity.invokeAndWaitForRetired(new GetAndAppendMessage(key, payLoad), true); + } catch (TimeoutException e) { + throw e; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + if (response != null && response.getResponseType() == EhcacheResponseType.GET_RESPONSE) { + return new SimpleEntry(key, ((EhcacheEntityResponse.GetResponse)response).getChain()); + } else { + throw new ServerStoreProxyException("Response for getAndAppend operation was invalid : " + + (response != null ? response.getResponseType() : "null message")); + } + } + + @Override + public void enableEvents(boolean enable) { + try { + entity.enableEvents(enable); + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + } + + @Override + public void replaceAtHead(long key, Chain expect, Chain update) { + // TODO: Optimize this method to just send sequences for expect Chain + try { + entity.invokeAndWaitForSend(new ReplaceAtHeadMessage(key, expect, update), false); + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + } + + @Override + public void clear() throws TimeoutException { + try { + entity.invokeAndWaitForRetired(new ServerStoreOpMessage.ClearMessage(), true); + } catch (TimeoutException e) { + throw e; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + } + + @Override + public Iterator> iterator() throws TimeoutException { + EhcacheEntityResponse.IteratorBatch iteratorBatch = openIterator(); + if (iteratorBatch.isLast()) { + return iteratorBatch.getChains().iterator(); + } else { + UUID iteratorId = iteratorBatch.getIdentity(); + return new Iterator>() { + + private boolean lastBatch = false; + private Iterator> batch = iteratorBatch.getChains().iterator(); + + @Override + public boolean hasNext() { + return !lastBatch || batch.hasNext(); + } + + @Override + public Map.Entry next() { + if (lastBatch || batch.hasNext()) { + return batch.next(); + } else { + try { + EhcacheEntityResponse.IteratorBatch batchResponse = fetchBatch(iteratorId); + batch = batchResponse.getChains().iterator(); + lastBatch = batchResponse.isLast(); + return batch.next(); + } catch (TimeoutException e) { + throw new RuntimeException(e); + } + } + } + + @Override + protected void finalize() throws Throwable { + if (!lastBatch) { + entity.invokeAndWaitForReceive(new ServerStoreOpMessage.IteratorCloseMessage(iteratorId), false); + } + } + }; + } + } + + private EhcacheEntityResponse.IteratorBatch openIterator() throws TimeoutException { + return fetchBatch(new ServerStoreOpMessage.IteratorOpenMessage(ITERATOR_BATCH_SIZE)); + } + + private EhcacheEntityResponse.IteratorBatch fetchBatch(UUID id) throws TimeoutException { + return fetchBatch(new ServerStoreOpMessage.IteratorAdvanceMessage(id, ITERATOR_BATCH_SIZE)); + } + + private EhcacheEntityResponse.IteratorBatch fetchBatch(EhcacheOperationMessage message) throws TimeoutException { + EhcacheEntityResponse response; + try { + response = entity.invokeAndWaitForComplete(message, false); + } catch (TimeoutException e) { + throw e; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + if (response != null && response.getResponseType() == EhcacheResponseType.ITERATOR_BATCH) { + return (EhcacheEntityResponse.IteratorBatch) response; + } else { + throw new ServerStoreProxyException("Response for iterator operation was invalid : " + + (response != null ? response.getResponseType() : "null message")); + } + } + + private class SimpleEntry implements ChainEntry { + + private final long key; + private final Chain chain; + + public SimpleEntry(long key, Chain chain) { + this.key = key; + this.chain = chain; + } + + @Override + public void append(ByteBuffer payLoad) throws TimeoutException { + CommonServerStoreProxy.this.append(key, payLoad); + } + + @Override + public void replaceAtHead(Chain equivalent) { + CommonServerStoreProxy.this.replaceAtHead(key, chain, equivalent); + } + + @Override + public boolean isEmpty() { + return chain.isEmpty(); + } + + @Override + public int length() { + return chain.length(); + } + + @Override + public Iterator iterator() { + return chain.iterator(); + } + } +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java similarity index 78% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java index f9f35ab1f3..f5c46b05e3 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java @@ -18,6 +18,8 @@ import org.ehcache.clustered.common.internal.store.Chain; import java.nio.ByteBuffer; +import java.util.Iterator; +import java.util.Map; import java.util.concurrent.TimeoutException; public class EventualServerStoreProxy implements ServerStoreProxy { @@ -39,7 +41,7 @@ public void close() { } @Override - public Chain get(long key) throws TimeoutException { + public ChainEntry get(long key) throws TimeoutException { return delegate.get(key); } @@ -49,10 +51,15 @@ public void append(final long key, final ByteBuffer payLoad) throws TimeoutExcep } @Override - public Chain getAndAppend(final long key, final ByteBuffer payLoad) throws TimeoutException { + public ChainEntry getAndAppend(final long key, final ByteBuffer payLoad) throws TimeoutException { return delegate.getAndAppend(key, payLoad); } + @Override + public void enableEvents(boolean enable) throws TimeoutException { + delegate.enableEvents(enable); + } + @Override public void replaceAtHead(long key, Chain expect, Chain update) { delegate.replaceAtHead(key, expect, update); @@ -62,4 +69,9 @@ public void replaceAtHead(long key, Chain expect, Chain update) { public void clear() throws TimeoutException { delegate.clear(); } + + @Override + public Iterator> iterator() throws TimeoutException { + return delegate.iterator(); + } } diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/FailedReconnectStoreProxy.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/FailedReconnectStoreProxy.java new file mode 100644 index 0000000000..430ef8a71b --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/FailedReconnectStoreProxy.java @@ -0,0 +1,89 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.client.internal.store.lock.LockingServerStoreProxy; +import org.ehcache.clustered.common.internal.store.Chain; + +import java.nio.ByteBuffer; +import java.util.Iterator; +import java.util.Map; + +public class FailedReconnectStoreProxy implements LockingServerStoreProxy { + private final Throwable failure; + private final String cacheId; + + public FailedReconnectStoreProxy(String cacheId, Throwable failure) { + this.cacheId = cacheId; + this.failure = failure; + } + + @Override + public ChainEntry get(long key) { + throw new RuntimeException("Cache " + getCacheId() + " failed reconnecting to cluster", failure); + } + + @Override + public void append(long key, ByteBuffer payLoad) { + throw new RuntimeException("Cache " + getCacheId() + " failed reconnecting to cluster", failure); + } + + @Override + public ChainEntry getAndAppend(long key, ByteBuffer payLoad) { + throw new RuntimeException("Cache " + getCacheId() + " failed reconnecting to cluster", failure); + } + + @Override + public void enableEvents(boolean enable) { + //do nothing + } + + @Override + public void replaceAtHead(long key, Chain expect, Chain update) { + throw new RuntimeException("Cache " + getCacheId() + " failed reconnecting to cluster", failure); + } + + @Override + public void clear() { + throw new RuntimeException("Cache " + getCacheId() + " failed reconnecting to cluster", failure); + } + + @Override + public Iterator> iterator() { + throw new RuntimeException("Cache " + getCacheId() + " failed reconnecting to cluster", failure); + } + + @Override + public String getCacheId() { + return cacheId; + } + + @Override + public void close() { + //ignore + } + + @Override + public ChainEntry lock(long hash) { + throw new RuntimeException("Cache " + getCacheId() + " failed reconnecting to cluster", failure); + } + + @Override + public void unlock(long hash, boolean localonly) { + throw new RuntimeException("Cache " + getCacheId() + " failed reconnecting to cluster", failure); + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/InternalClusterTierClientEntity.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/InternalClusterTierClientEntity.java new file mode 100644 index 0000000000..6ffd28401c --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/InternalClusterTierClientEntity.java @@ -0,0 +1,27 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.store; + +/* + * Since this interface has been used historically as the client-side interface that + * identifies a cluster-tier entity it must remain as **the** interface even though + * it is empty. We could remove it and hack up the server entity service to accept + * both variants but this seems like a cleaner and more future proof decision. This + * way if we need to introduce any 'internal' methods we can. + */ +public interface InternalClusterTierClientEntity extends ClusterTierClientEntity { +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ReconnectInProgressException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ReconnectInProgressException.java new file mode 100644 index 0000000000..940edf1f03 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ReconnectInProgressException.java @@ -0,0 +1,26 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store; + +public class ReconnectInProgressException extends RuntimeException { + + private static final long serialVersionUID = 2561046982957750120L; + + public ReconnectInProgressException() { + super("Connection lost to server. Client is trying to reconnect to server"); + } + +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ReconnectingServerStoreProxy.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ReconnectingServerStoreProxy.java new file mode 100644 index 0000000000..c28a8d55f5 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ReconnectingServerStoreProxy.java @@ -0,0 +1,273 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.client.internal.store.lock.LockingServerStoreProxy; +import org.ehcache.clustered.client.internal.store.lock.LockingServerStoreProxyImpl; +import org.ehcache.clustered.common.internal.store.Chain; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.exception.ConnectionClosedException; +import org.terracotta.exception.ConnectionShutdownException; + +import java.nio.ByteBuffer; +import java.util.Iterator; +import java.util.Map; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicReference; + +public class ReconnectingServerStoreProxy implements LockingServerStoreProxy { + + private static final Logger LOGGER = LoggerFactory.getLogger(ReconnectingServerStoreProxy.class); + + private final AtomicReference delegateRef; + private final Runnable onReconnect; + + public ReconnectingServerStoreProxy(ServerStoreProxy serverStoreProxy, Runnable onReconnect) { + if (serverStoreProxy instanceof LockingServerStoreProxy) { + this.delegateRef = new AtomicReference<>((LockingServerStoreProxy) serverStoreProxy); + } else { + this.delegateRef = new AtomicReference<>(unsupportedLocking(serverStoreProxy)); + } + this.onReconnect = onReconnect; + } + + @Override + public String getCacheId() { + return proxy().getCacheId(); + } + + @Override + public void close() { + try { + proxy().close(); + } catch (ConnectionClosedException | ConnectionShutdownException e) { + LOGGER.debug("Store was already closed, since connection was closed"); + } + } + + @Override + public ChainEntry get(long key) throws TimeoutException { + return onStoreProxy(serverStoreProxy -> serverStoreProxy.get(key)); + } + + @Override + public void append(long key, ByteBuffer payLoad) throws TimeoutException { + onStoreProxy(serverStoreProxy -> { + serverStoreProxy.append(key, payLoad); + return null; + }); + } + + @Override + public ChainEntry getAndAppend(long key, ByteBuffer payLoad) throws TimeoutException { + return onStoreProxy(serverStoreProxy -> serverStoreProxy.getAndAppend(key, payLoad)); + } + + @Override + public void enableEvents(boolean enable) throws TimeoutException { + onStoreProxy(serverStoreProxy -> { + serverStoreProxy.enableEvents(enable); + return null; + }); + } + + @Override + public void replaceAtHead(long key, Chain expect, Chain update) { + try { + onStoreProxy(serverStoreProxy -> { + serverStoreProxy.replaceAtHead(key, expect, update); + return null; + }); + } catch (TimeoutException e) { + throw new RuntimeException(e); + } + } + + @Override + public void clear() throws TimeoutException { + onStoreProxy(serverStoreProxy -> { + serverStoreProxy.clear(); + return null; + }); + } + + @Override + public Iterator> iterator() throws TimeoutException { + return onStoreProxy(LockingServerStoreProxy::iterator); + } + + private LockingServerStoreProxy proxy() { + return delegateRef.get(); + } + + private T onStoreProxy(TimeoutExceptionFunction function) throws TimeoutException { + LockingServerStoreProxy storeProxy = proxy(); + try { + return function.apply(storeProxy); + } catch (ServerStoreProxyException sspe) { + if (sspe.getCause() instanceof ConnectionClosedException) { + if (delegateRef.compareAndSet(storeProxy, new ReconnectInProgressProxy(storeProxy.getCacheId()))) { + onReconnect.run(); + } + return onStoreProxy(function); + } else { + throw sspe; + } + } + } + + @Override + public ChainEntry lock(long key) throws TimeoutException { + return onStoreProxy(lockingServerStoreProxy -> lockingServerStoreProxy.lock(key)); + } + + @Override + public void unlock(long key, boolean localonly) throws TimeoutException { + onStoreProxy(lockingServerStoreProxy -> { + lockingServerStoreProxy.unlock(key, localonly); + return null; + }); + } + + @FunctionalInterface + private interface TimeoutExceptionFunction { + V apply(U u) throws TimeoutException; + } + + private static class ReconnectInProgressProxy implements LockingServerStoreProxy { + + private final String cacheId; + + ReconnectInProgressProxy(String cacheId) { + this.cacheId = cacheId; + } + + @Override + public String getCacheId() { + return this.cacheId; + } + + @Override + public void close() { + throw new ReconnectInProgressException(); + } + + @Override + public ChainEntry get(long key) { + throw new ReconnectInProgressException(); + } + + @Override + public void append(long key, ByteBuffer payLoad) { + throw new ReconnectInProgressException(); + } + + @Override + public ChainEntry getAndAppend(long key, ByteBuffer payLoad) { + throw new ReconnectInProgressException(); + } + + @Override + public void replaceAtHead(long key, Chain expect, Chain update) { + throw new ReconnectInProgressException(); + } + + @Override + public void clear() { + throw new ReconnectInProgressException(); + } + + @Override + public Iterator> iterator() { + throw new ReconnectInProgressException(); + } + + @Override + public ChainEntry lock(long key) { + throw new ReconnectInProgressException(); + } + + @Override + public void unlock(long key, boolean localonly) { + throw new ReconnectInProgressException(); + } + + @Override + public void enableEvents(boolean enable) { + throw new ReconnectInProgressException(); + } + } + + private LockingServerStoreProxy unsupportedLocking(ServerStoreProxy serverStoreProxy) { + return new LockingServerStoreProxy() { + @Override + public ChainEntry lock(long hash) { + throw new UnsupportedOperationException("Lock ops are not supported"); + } + + @Override + public void unlock(long hash, boolean localonly) { + throw new UnsupportedOperationException("Lock ops are not supported"); + } + + @Override + public ChainEntry get(long key) throws TimeoutException { + return serverStoreProxy.get(key); + } + + @Override + public ChainEntry getAndAppend(long key, ByteBuffer payLoad) throws TimeoutException { + return serverStoreProxy.getAndAppend(key, payLoad); + } + + @Override + public void enableEvents(boolean enable) throws TimeoutException { + serverStoreProxy.enableEvents(enable); + } + + @Override + public String getCacheId() { + return serverStoreProxy.getCacheId(); + } + + @Override + public void close() { + serverStoreProxy.close(); + } + + @Override + public void append(long key, ByteBuffer payLoad) throws TimeoutException { + serverStoreProxy.append(key, payLoad); + } + + @Override + public void replaceAtHead(long key, Chain expect, Chain update) { + serverStoreProxy.replaceAtHead(key, expect, update); + } + + @Override + public void clear() throws TimeoutException { + serverStoreProxy.clear(); + } + + @Override + public Iterator> iterator() throws TimeoutException { + return serverStoreProxy.iterator(); + } + }; + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java new file mode 100644 index 0000000000..2eacd7a92a --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java @@ -0,0 +1,150 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.common.internal.store.ServerStore; + +import java.nio.ByteBuffer; +import java.util.concurrent.TimeoutException; + +/** + * @author Ludovic Orban + */ +public interface ServerStoreProxy extends ServerStore { + + /** + * {@inheritDoc} + *

+ * {@code ServerStoreProxy} instances return {@link ChainEntry} instances that support mutation of the associated store. + * + * @return the associated chain entry + */ + @Override + ChainEntry get(long key) throws TimeoutException; + + /** + * {@inheritDoc} + *

+ * {@code ServerStoreProxy} instances return {@link ChainEntry} instances that support mutation of the associated store. + * + * @return the associated chain entry + */ + @Override + ChainEntry getAndAppend(long key, ByteBuffer payLoad) throws TimeoutException; + + /** + * The invalidation listener + */ + interface ServerCallback { + /** + * Callback for invalidation of hash requests + * + * @param hash the hash of the keys to invalidate + * @param evictedChain the evicted chain, or null if it wasn't an eviction that triggered the invalidation but + * a change on a different client or when events are disabled. + */ + void onInvalidateHash(long hash, Chain evictedChain); + + /** + * Callback for invalidation of all requests + */ + void onInvalidateAll(); + + /** + * Callback append events + */ + void onAppend(Chain beforeAppend, ByteBuffer appended); + + void compact(ChainEntry chain); + + default void compact(ChainEntry chain, long hash) { + compact(chain); + } + + } + + /** + * Enable or disable event firing from the server + * @param enable {@code true} to enable, {@code false} to disable + */ + void enableEvents(boolean enable) throws TimeoutException; + + + /** + * Gets the identifier linking a client-side cache to a {@code ServerStore} instance. + * + * @return the cache identifier + */ + String getCacheId(); + + /** + * Closes this proxy. + */ + void close(); + + interface ChainEntry extends Chain { + + /** + * Appends the provided binary to this Chain + * While appending, the payLoad is stored in {@link Element}. + * Note that the {@code payLoad}'s position and limit are left untouched. + * + * @param payLoad to be appended + * + * @throws TimeoutException if the append exceeds the timeout configured for write operations + */ + void append(ByteBuffer payLoad) throws TimeoutException; + + + /** + * Replaces the provided Chain with the equivalent Chain present at the head. + * This operation is not guaranteed to succeed. + * The replaceAtHead is successful iff the Chain associated with the key has + * a sub-sequence of elements present as expected.. + * + * If below mapping is present: + * + * hash -> |payLoadA| - |payLoadB| - |payLoadC| + * + * And replaceAtHead(hash, |payLoadA| - |payLoadB| - |payLoadC|, |payLoadC'|) is invoked + * then this operation will succeed & the final mapping would be: + * + * hash -> |payLoadC'| + * + * The same operation will also succeed if the mapping was modified by the time replace was invoked to + * + * hash -> |payLoadA| - |payLoadB| - |payLoadC| - |payLoadD| + * + * Though the final mapping would be: + * + * hash -> |payLoadC'| - |payLoadD| + * + * Failure case: + * + * But before replaceAtHead if it was modified to : + * + * hash -> |payLoadC"| - |payLoadD| + * + * then replaceAtHead(hash, |payLoadA| - |payLoadB| - |payLoadC|, |payLoadC'|) will be ignored. + * Note that the payload's position and limit of all elements of both chains are left untouched. + * + * @param equivalent the new Chain to be replaced + */ + void replaceAtHead(Chain equivalent); + } +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxyException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxyException.java similarity index 94% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxyException.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxyException.java index e2ac646f13..7b406d8271 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxyException.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxyException.java @@ -15,10 +15,10 @@ */ package org.ehcache.clustered.client.internal.store; -/** - */ public class ServerStoreProxyException extends RuntimeException { + private static final long serialVersionUID = -3451273597124838171L; + /** * Creates a new exception wrapping the {@link Throwable cause} passed in. * diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/SimpleClusterTierClientEntity.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/SimpleClusterTierClientEntity.java similarity index 78% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/SimpleClusterTierClientEntity.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/SimpleClusterTierClientEntity.java index 3cfbabd1e6..a2aa60dbe4 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/SimpleClusterTierClientEntity.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/SimpleClusterTierClientEntity.java @@ -17,8 +17,6 @@ package org.ehcache.clustered.client.internal.store; import org.ehcache.clustered.client.config.Timeouts; -import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; -import org.ehcache.clustered.client.internal.service.ClusterTierException; import org.ehcache.clustered.client.internal.service.ClusterTierValidationException; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.ClusterException; @@ -31,6 +29,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheResponseType; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; import org.ehcache.clustered.common.internal.messages.ReconnectMessageCodec; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,9 +48,12 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.Executor; +import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeoutException; import java.util.function.LongSupplier; +import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.NANOSECONDS; import static org.ehcache.clustered.client.config.Timeouts.nanosStartingFromNow; @@ -61,7 +63,11 @@ public class SimpleClusterTierClientEntity implements InternalClusterTierClientEntity { private static final Logger LOGGER = LoggerFactory.getLogger(SimpleClusterTierClientEntity.class); - private static final Set GET_STORE_OPS = EnumSet.of(EhcacheMessageType.GET_STORE); + private static final Set GET_STORE_OPS = EnumSet.of( + EhcacheMessageType.GET_STORE, + EhcacheMessageType.ITERATOR_ADVANCE, + EhcacheMessageType.ITERATOR_OPEN, + EhcacheMessageType.ITERATOR_CLOSE); private final EntityClientEndpoint endpoint; private final LifeCycleMessageFactory messageFactory; @@ -69,20 +75,23 @@ public class SimpleClusterTierClientEntity implements InternalClusterTierClientE private final ReconnectMessageCodec reconnectMessageCodec = new ReconnectMessageCodec(); private final Map, List>> responseListeners = new ConcurrentHashMap<>(); + private final List disconnectionListeners = new CopyOnWriteArrayList<>(); + private final Timeouts timeouts; + private final String storeIdentifier; + + private final List reconnectListeners = new CopyOnWriteArrayList<>(); - private ReconnectListener reconnectListener = reconnectMessage -> { - // No op - }; - private DisconnectionListener disconnectionListener = () -> { - // No op - }; - private Timeouts timeouts = TimeoutsBuilder.timeouts().build(); - private String storeIdentifier; private volatile boolean connected = true; + private volatile boolean eventsEnabled; + private final Executor asyncWorker; - public SimpleClusterTierClientEntity(EntityClientEndpoint endpoint) { + public SimpleClusterTierClientEntity(EntityClientEndpoint endpoint, + Timeouts timeouts, String storeIdentifier, Executor asyncWorker) { this.endpoint = endpoint; + this.timeouts = timeouts; + this.storeIdentifier = storeIdentifier; + this.asyncWorker = requireNonNull(asyncWorker); this.messageFactory = new LifeCycleMessageFactory(); endpoint.setDelegate(new EndpointDelegate() { @Override @@ -94,27 +103,23 @@ public void handleMessage(EhcacheEntityResponse messageFromServer) { @Override public byte[] createExtendedReconnectData() { synchronized (lock) { - ClusterTierReconnectMessage reconnectMessage = new ClusterTierReconnectMessage(); - reconnectListener.onHandleReconnect(reconnectMessage); + ClusterTierReconnectMessage reconnectMessage = new ClusterTierReconnectMessage(eventsEnabled); + reconnectListeners.forEach(reconnectListener -> reconnectListener.onHandleReconnect(reconnectMessage)); return reconnectMessageCodec.encode(reconnectMessage); } } @Override public void didDisconnectUnexpectedly() { + LOGGER.info("Cluster tier for cache {} disconnected", storeIdentifier); fireDisconnectionEvent(); } }); } - @Override - public void setTimeouts(Timeouts timeouts) { - this.timeouts = timeouts; - } - void fireDisconnectionEvent() { connected = false; - disconnectionListener.onDisconnection(); + disconnectionListeners.forEach(DisconnectionListener::onDisconnection); } private void fireResponseEvent(T response) { @@ -126,15 +131,30 @@ private void fireResponseEvent(T response) { } LOGGER.debug("{} registered response listener(s) for {}", responseListeners.size(), response.getClass()); for (ResponseListener responseListener : responseListeners) { - responseListener.onResponse(response); + Runnable responseProcessing = () -> { + try { + responseListener.onResponse(response); + } catch (TimeoutException e) { + LOGGER.debug("Timeout exception processing: {} - resubmitting", response, e); + fireResponseEvent(response); + } catch (Exception e) { + LOGGER.warn("Unhandled failure processing: {}", response, e); + } + }; + try { + asyncWorker.execute(responseProcessing); + } catch (RejectedExecutionException f) { + LOGGER.warn("Response task execution rejected using inline execution: {}", response, f); + responseProcessing.run(); + } } } @Override public void close() { endpoint.close(); - reconnectListener = null; - disconnectionListener = null; + reconnectListeners.clear(); + disconnectionListeners.clear(); } @Override @@ -143,13 +163,23 @@ public Timeouts getTimeouts() { } @Override - public void setReconnectListener(ReconnectListener reconnectListener) { - this.reconnectListener = reconnectListener; + public void addReconnectListener(ReconnectListener reconnectListener) { + this.reconnectListeners.add(reconnectListener); + } + + @Override + public void enableEvents(boolean enable) throws ClusterException, TimeoutException { + if (enable == this.eventsEnabled) { + return; + } + // make sure the server received and processed the message before returning + this.invokeAndWaitForComplete(new ServerStoreOpMessage.EnableEventListenerMessage(enable), true); + this.eventsEnabled = enable; } @Override - public void setDisconnectionListener(DisconnectionListener disconnectionListener) { - this.disconnectionListener = disconnectionListener; + public void addDisconnectionListener(DisconnectionListener disconnectionListener) { + this.disconnectionListeners.add(disconnectionListener); } @Override @@ -168,7 +198,7 @@ public void addResponseListener(Class respo } @Override - public void validate(ServerStoreConfiguration clientStoreConfiguration) throws ClusterTierException, TimeoutException { + public void validate(ServerStoreConfiguration clientStoreConfiguration) throws ClusterTierValidationException, TimeoutException { try { invokeInternalAndWait(endpoint.beginInvoke(), timeouts.getConnectionTimeout(), messageFactory.validateServerStore(storeIdentifier , clientStoreConfiguration), false); } catch (ClusterException e) { @@ -176,29 +206,20 @@ public void validate(ServerStoreConfiguration clientStoreConfiguration) throws C } } - @Override - public void setStoreIdentifier(String storeIdentifier) { - this.storeIdentifier = storeIdentifier; - } - - void setConnected(boolean connected) { - this.connected = connected; - } - @Override public EhcacheEntityResponse invokeStateRepositoryOperation(StateRepositoryOpMessage message, boolean track) throws ClusterException, TimeoutException { return invokeAndWaitForRetired(message, track); } @Override - public void invokeAndWaitForSend(EhcacheOperationMessage message, boolean track) throws ClusterException, TimeoutException { + public void invokeAndWaitForSend(EhcacheOperationMessage message, boolean track) throws TimeoutException { invokeInternal(endpoint.beginInvoke().ackSent(), getTimeoutDuration(message), message, track); } @Override public void invokeAndWaitForReceive(EhcacheOperationMessage message, boolean track) - throws ClusterException, TimeoutException { - invokeInternal(endpoint.beginInvoke().ackReceived(), getTimeoutDuration(message), message, track); + throws ClusterException, TimeoutException { + invokeInternalAndWait(endpoint.beginInvoke().ackReceived(), message, track); } @Override diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java similarity index 90% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java index 8a5b576e39..571ce56c6f 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java @@ -23,6 +23,8 @@ import java.nio.ByteBuffer; import java.time.Duration; +import java.util.Iterator; +import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; @@ -50,8 +52,8 @@ public StrongServerStoreProxy(final String cacheId, final ClusterTierClientEntit delegate.addResponseListener(EhcacheEntityResponse.HashInvalidationDone.class, this::hashInvalidationDoneResponseListener); delegate.addResponseListener(EhcacheEntityResponse.AllInvalidationDone.class, this::allInvalidationDoneResponseListener); - entity.setReconnectListener(this::reconnectListener); - entity.setDisconnectionListener(this::disconnectionListener); + entity.addReconnectListener(this::reconnectListener); + entity.addDisconnectionListener(this::disconnectionListener); } private void disconnectionListener() { @@ -121,6 +123,9 @@ private T performWaitingForHashInvalidation(long key, Callable c, Duratio if (ex instanceof TimeoutException) { throw (TimeoutException)ex; } + if (ex instanceof ServerStoreProxyException) { + throw (ServerStoreProxyException)ex; + } throw new RuntimeException(ex); } } @@ -156,6 +161,9 @@ private T performWaitingForAllInvalidation(Callable c, Duration timeout) if (ex instanceof TimeoutException) { throw (TimeoutException)ex; } + if (ex instanceof ServerStoreProxyException) { + throw (ServerStoreProxyException)ex; + } throw new RuntimeException(ex); } } @@ -197,7 +205,7 @@ public void close() { } @Override - public Chain get(long key) throws TimeoutException { + public ChainEntry get(long key) throws TimeoutException { return delegate.get(key); } @@ -210,10 +218,15 @@ public void append(final long key, final ByteBuffer payLoad) throws TimeoutExcep } @Override - public Chain getAndAppend(final long key, final ByteBuffer payLoad) throws TimeoutException { + public ChainEntry getAndAppend(final long key, final ByteBuffer payLoad) throws TimeoutException { return performWaitingForHashInvalidation(key, () -> delegate.getAndAppend(key, payLoad), entity.getTimeouts().getWriteOperationTimeout()); } + @Override + public void enableEvents(boolean enable) { + delegate.enableEvents(enable); + } + @Override public void replaceAtHead(long key, Chain expect, Chain update) { delegate.replaceAtHead(key, expect, update); @@ -226,4 +239,9 @@ public void clear() throws TimeoutException { return null; }, entity.getTimeouts().getWriteOperationTimeout()); } + + @Override + public Iterator> iterator() throws TimeoutException { + return delegate.iterator(); + } } diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/lock/LockManager.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/lock/LockManager.java new file mode 100644 index 0000000000..220ac6ee66 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/lock/LockManager.java @@ -0,0 +1,83 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store.lock; + +import org.ehcache.clustered.client.internal.store.ClusterTierClientEntity; +import org.ehcache.clustered.client.internal.store.ServerStoreProxyException; +import org.ehcache.clustered.common.internal.messages.ClusterTierReconnectMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.LockSuccess; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.LockMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.UnlockMessage; +import org.ehcache.clustered.common.internal.store.Chain; + +import java.util.Collections; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeoutException; + +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.LOCK_FAILURE; + +public class LockManager { + + private final ClusterTierClientEntity clientEntity; + private final Set locksHeld = Collections.newSetFromMap(new ConcurrentHashMap<>()); + + public LockManager(ClusterTierClientEntity clientEntity) { + this.clientEntity = clientEntity; + clientEntity.addReconnectListener(this::reconnectListener); + } + + void reconnectListener(ClusterTierReconnectMessage reconnectMessage) { + reconnectMessage.addLocksHeld(locksHeld); + } + + public Chain lock(long hash) throws TimeoutException { + LockSuccess response = getlockResponse(hash); + locksHeld.add(hash); + return response.getChain(); + } + + private LockSuccess getlockResponse(long hash) throws TimeoutException { + EhcacheEntityResponse response; + do { + try { + response = clientEntity.invokeAndWaitForComplete(new LockMessage(hash), false); + } catch (TimeoutException tme) { + throw tme; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + if (response == null) { + throw new ServerStoreProxyException("Response for acquiring lock was invalid null message"); + } + } while (response.getResponseType() == LOCK_FAILURE); + return (LockSuccess) response; + } + + public void unlock(long hash, boolean localonly) throws TimeoutException { + try { + if (!localonly) { + clientEntity.invokeAndWaitForComplete(new UnlockMessage(hash), false); + } + locksHeld.remove(hash); + } catch (TimeoutException tme) { + throw tme; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/lock/LockingServerStoreProxy.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/lock/LockingServerStoreProxy.java new file mode 100644 index 0000000000..588ea8e768 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/lock/LockingServerStoreProxy.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store.lock; + +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; + +import java.util.concurrent.TimeoutException; + +public interface LockingServerStoreProxy extends ServerStoreProxy { + + /** + * + * @param hash + */ + ChainEntry lock(long hash) throws TimeoutException; + + /** + * + * @param hash + * @param localonly + */ + void unlock(long hash, boolean localonly) throws TimeoutException; +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/lock/LockingServerStoreProxyImpl.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/lock/LockingServerStoreProxyImpl.java new file mode 100644 index 0000000000..923d4926e3 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/lock/LockingServerStoreProxyImpl.java @@ -0,0 +1,117 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store.lock; + +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; + +import java.nio.ByteBuffer; +import java.util.Iterator; +import java.util.Map; +import java.util.concurrent.TimeoutException; + +public class LockingServerStoreProxyImpl implements LockingServerStoreProxy { + + private final ServerStoreProxy storeProxy; + private final LockManager lockManager; + + public LockingServerStoreProxyImpl(ServerStoreProxy storeProxy, LockManager lockManager) { + this.storeProxy = storeProxy; + this.lockManager = lockManager; + } + + @Override + public String getCacheId() { + return storeProxy.getCacheId(); + } + + @Override + public void close() { + storeProxy.close(); + } + + @Override + public ChainEntry lock(long key) throws TimeoutException { + Chain chain = lockManager.lock(key); + return new ChainEntry() { + @Override + public void append(ByteBuffer payLoad) throws TimeoutException { + LockingServerStoreProxyImpl.this.append(key, payLoad); + } + + @Override + public void replaceAtHead(Chain equivalent) { + LockingServerStoreProxyImpl.this.replaceAtHead(key, chain, equivalent); + } + + @Override + public boolean isEmpty() { + return chain.isEmpty(); + } + + @Override + public int length() { + return chain.length(); + } + + @Override + public Iterator iterator() { + return chain.iterator(); + } + }; + } + + @Override + public void unlock(long key, boolean localonly) throws TimeoutException { + lockManager.unlock(key, localonly); + } + + @Override + public ChainEntry get(long key) throws TimeoutException { + return storeProxy.get(key); + } + + @Override + public void append(long key, ByteBuffer payLoad) throws TimeoutException { + storeProxy.append(key, payLoad); + } + + @Override + public ChainEntry getAndAppend(long key, ByteBuffer payLoad) throws TimeoutException { + return storeProxy.getAndAppend(key, payLoad); + } + + @Override + public void enableEvents(boolean enable) throws TimeoutException { + storeProxy.enableEvents(enable); + } + + @Override + public void replaceAtHead(long key, Chain expect, Chain update) { + storeProxy.replaceAtHead(key, expect, update); + } + + @Override + public void clear() throws TimeoutException { + storeProxy.clear(); + } + + @Override + public Iterator> iterator() throws TimeoutException { + return storeProxy.iterator(); + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java new file mode 100644 index 0000000000..a5245c5507 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java @@ -0,0 +1,184 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.store.operations; + +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.common.internal.store.operations.Operation; +import org.ehcache.clustered.common.internal.store.operations.PutOperation; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.clustered.common.internal.util.ChainBuilder; +import org.ehcache.core.spi.store.Store.ValueHolder; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +/** + * An abstract chain resolver. + *

+ * Operation application is performed in subclasses specialized for eternal and non-eternal caches. + * + * @see EternalChainResolver + * @see ExpiryChainResolver + * + * @param key type + * @param value type + */ +public abstract class ChainResolver { + protected final OperationsCodec codec; + + public ChainResolver(final OperationsCodec codec) { + this.codec = codec; + } + + /** + * Resolves the given key within the given chain entry to its current value with a specific compaction threshold. + *

+ * If the resultant chain has shrunk by more than {@code threshold} elements then an attempt is made to perform the + * equivalent compaction on the server. + * + * @param entry target chain entry + * @param key target key + * @param now current time + * @param threshold compaction threshold + * @return the current value + */ + public abstract ValueHolder resolve(ServerStoreProxy.ChainEntry entry, K key, long now, int threshold); + + /** + * Resolves the given key within the given chain entry to its current value. + *

+ * This is exactly equivalent to calling {@link #resolve(ServerStoreProxy.ChainEntry, Object, long, int)} with a zero + * compaction threshold. + * + * @param entry target chain entry + * @param key target key + * @param now current time + * @return the current value + */ + public ValueHolder resolve(ServerStoreProxy.ChainEntry entry, K key, long now) { + return resolve(entry, key, now, 0); + } + + /** + * Resolves all keys within the given chain to their current values while removing expired values. + * + * @param chain target chain + * @param now current time + * @return a map of current values + */ + public abstract Map> resolveAll(Chain chain, long now); + + /** + * Resolves all keys within the given chain to their current values while retaining expired values. + * + * @param chain target chain + * @return a map of current values + */ + public abstract Map> resolveAll(Chain chain); + + /** + * Compacts the given chain entry by resolving every key within. + * + * @param entry an uncompacted heterogenous {@link ServerStoreProxy.ChainEntry} + */ + public void compact(ServerStoreProxy.ChainEntry entry) { + ChainBuilder builder = new ChainBuilder(); + for (PutOperation operation : resolveToSimplePuts(entry).values()) { + builder = builder.add(codec.encode(operation)); + } + Chain compacted = builder.build(); + if (compacted.length() < entry.length()) { + entry.replaceAtHead(compacted); + } + } + + /** + * Resolves the given key within the given chain entry to an equivalent put operation. + *

+ * If the resultant chain has shrunk by more than {@code threshold} elements then an attempt is made to perform the + * equivalent compaction on the server. + * + * @param entry target chain entry + * @param key target key + * @param threshold compaction threshold + * @return equivalent put operation + */ + protected PutOperation resolve(ServerStoreProxy.ChainEntry entry, K key, int threshold) { + PutOperation result = null; + ChainBuilder resolvedChain = new ChainBuilder(); + for (Element element : entry) { + ByteBuffer payload = element.getPayload(); + Operation operation = codec.decode(payload); + + if(key.equals(operation.getKey())) { + result = applyOperation(key, result, operation); + } else { + payload.rewind(); + resolvedChain = resolvedChain.add(payload); + } + } + if(result != null) { + resolvedChain = resolvedChain.add(codec.encode(result)); + } + + if (entry.length() - resolvedChain.length() > threshold) { + entry.replaceAtHead(resolvedChain.build()); + } + return result; + } + + /** + * Resolves all keys within the given chain to their equivalent put operations. + * + * @param chain target chain + * @return a map of equivalent put operations + */ + public Map> resolveToSimplePuts(Chain chain) { + //absent hash-collisions this should always be a 1 entry map + Map> compacted = new HashMap<>(2); + for (Element element : chain) { + ByteBuffer payload = element.getPayload(); + Operation operation = codec.decode(payload); + compacted.compute(operation.getKey(), (k, v) -> applyOperation(k, v, operation)); + } + return compacted; + } + + /** + * Resolves a key within the given chain to its equivalent put operation. + * + * @param chain target chain + * @param key the key + * @return the equivalent put operation + */ + public PutOperation resolve(Chain chain, K key) { + return resolveToSimplePuts(chain).get(key); + } + + /** + * Applies the given operation to the current state. + * + * @param key cache key + * @param existing current state + * @param operation operation to apply + * @return an equivalent put operation + */ + public abstract PutOperation applyOperation(K key, PutOperation existing, Operation operation); +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/operations/EternalChainResolver.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/operations/EternalChainResolver.java new file mode 100644 index 0000000000..0fe81f3a25 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/operations/EternalChainResolver.java @@ -0,0 +1,82 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.store.operations; + +import org.ehcache.clustered.client.internal.store.ClusteredValueHolder; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.operations.Operation; +import org.ehcache.clustered.common.internal.store.operations.PutOperation; +import org.ehcache.clustered.common.internal.store.operations.Result; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.core.spi.store.Store.ValueHolder; + +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.unmodifiableMap; + +/** + * A specialized chain resolver for eternal caches. + * + * @see org.ehcache.expiry.Expirations#noExpiration() + * + * @param key type + * @param value type + */ +public class EternalChainResolver extends ChainResolver { + + public EternalChainResolver(final OperationsCodec codec) { + super(codec); + } + + @Override + public ValueHolder resolve(ServerStoreProxy.ChainEntry entry, K key, long now, int threshold) { + PutOperation resolved = resolve(entry, key, threshold); + return resolved == null ? null : new ClusteredValueHolder<>(resolved.getValue()); + } + + @Override + public Map> resolveAll(Chain chain) { + Map> resolved = resolveToSimplePuts(chain); + + Map> values = new HashMap<>(resolved.size()); + for (Map.Entry> e : resolved.entrySet()) { + values.put(e.getKey(), new ClusteredValueHolder<>(e.getValue().getValue())); + } + return unmodifiableMap(values); + } + + @Override + public Map> resolveAll(Chain chain, long now) { + return resolveAll(chain); + } + + /** + * Applies the given operation returning a result that never expires. + * + * {@inheritDoc} + */ + public PutOperation applyOperation(K key, PutOperation existing, Operation operation) { + final Result newValue = operation.apply(existing); + if (newValue == null) { + return null; + } else { + return newValue.asOperationExpiringAt(Long.MAX_VALUE); + } + } +} diff --git a/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ExpiryChainResolver.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ExpiryChainResolver.java new file mode 100644 index 0000000000..70d9df31d9 --- /dev/null +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ExpiryChainResolver.java @@ -0,0 +1,168 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.store.operations; + +import org.ehcache.clustered.client.internal.store.ClusteredValueHolder; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.operations.Operation; +import org.ehcache.clustered.common.internal.store.operations.PutOperation; +import org.ehcache.clustered.common.internal.store.operations.Result; +import org.ehcache.clustered.common.internal.store.operations.TimestampOperation; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.core.config.ExpiryUtils; +import org.ehcache.core.spi.store.Store.ValueHolder; +import org.ehcache.expiry.ExpiryPolicy; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.TimeoutException; + +import static java.util.Collections.unmodifiableMap; +import static java.util.Objects.requireNonNull; +import static org.ehcache.core.config.ExpiryUtils.isExpiryDurationInfinite; + +/** + * A specialized chain resolver for non-eternal caches. + * + * @param key type + * @param value type + */ +public class ExpiryChainResolver extends ChainResolver { + + private static final Logger LOG = LoggerFactory.getLogger(ExpiryChainResolver.class); + + private final ExpiryPolicy expiry; + + /** + * Creates a resolver with the given codec and expiry policy. + * + * @param codec operation codec + * @param expiry expiry policy + */ + public ExpiryChainResolver(final OperationsCodec codec, ExpiryPolicy expiry) { + super(codec); + this.expiry = requireNonNull(expiry, "Expiry cannot be null"); + } + + @Override + public ValueHolder resolve(ServerStoreProxy.ChainEntry entry, K key, long now, int threshold) { + PutOperation resolved = resolve(entry, key, threshold); + + if (resolved == null) { + return null; + } else if (now >= resolved.expirationTime()) { + try { + entry.append(codec.encode(new TimestampOperation<>(key, now))); + } catch (TimeoutException e) { + LOG.debug("Failed to append timestamp operation", e); + } + return null; + } else { + return new ClusteredValueHolder<>(resolved.getValue(), resolved.expirationTime()); + } + } + + @Override + public Map> resolveAll(Chain chain, long now) { + Map> resolved = resolveAll(chain); + + Map> values = new HashMap<>(resolved.size()); + for (Map.Entry> e : resolved.entrySet()) { + if (!e.getValue().isExpired(now)) { + values.put(e.getKey(), e.getValue()); + } + } + return unmodifiableMap(values); + } + + @Override + public Map> resolveAll(Chain chain) { + Map> resolved = resolveToSimplePuts(chain); + + Map> values = new HashMap<>(resolved.size()); + for (Map.Entry> e : resolved.entrySet()) { + values.put(e.getKey(), new ClusteredValueHolder<>(e.getValue().getValue(), e.getValue().expirationTime())); + } + return unmodifiableMap(values); + } + + /** + * Applies the given operation returning a result with an expiry time determined by this resolvers expiry policy. + *

+ * If the resolved operations expiry time has passed then {@code null} is returned. + * + * @param key cache key + * @param existing current state + * @param operation operation to apply + * @return the equivalent put operation + */ + @Override + public PutOperation applyOperation(K key, PutOperation existing, Operation operation) { + if (existing != null && operation.timeStamp() >= existing.expirationTime()) { + existing = null; + } + + final Result newValue = operation.apply(existing); + if (newValue == null) { + return null; + } else if (newValue == existing) { + return existing; + } else { + return newValue.asOperationExpiringAt(calculateExpiryTime(key, existing, operation, newValue)); + } + } + + /** + * Calculates the expiration time of the new state based on this resolvers expiry policy. + * + * @param key cache key + * @param existing current state + * @param operation operation to apply + * @param newValue new state + * @return the calculated expiry time + */ + private long calculateExpiryTime(K key, PutOperation existing, Operation operation, Result newValue) { + if (operation.isExpiryAvailable()) { + return operation.expirationTime(); + } else { + try { + Duration duration; + if (existing == null) { + duration = requireNonNull(expiry.getExpiryForCreation(key, newValue.getValue())); + } else { + duration = expiry.getExpiryForUpdate(key, existing::getValue, newValue.getValue()); + if (duration == null) { + return existing.expirationTime(); + } + } + if (duration.isNegative()) { + duration = Duration.ZERO; + } else if (isExpiryDurationInfinite(duration)) { + return Long.MAX_VALUE; + } + return ExpiryUtils.getExpirationMillis(operation.timeStamp(), duration); + } catch (Exception ex) { + LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", ex); + return Long.MIN_VALUE; + } + } + } +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/service/ClientEntityFactory.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/service/ClientEntityFactory.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/service/ClientEntityFactory.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/service/ClientEntityFactory.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java similarity index 86% rename from clustered/client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java index e03a50ec2d..89ace14e41 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java @@ -59,8 +59,23 @@ ServerStoreProxy getServerStoreProxy(ClusteredCacheIdentifier cacheIdenti * Releases access to a {@link ServerStoreProxy} and the server-resident {@code ServerStore} it represents. * * @param serverStoreProxy a {@link ServerStoreProxy} obtained through {@link #getServerStoreProxy} + * @param isReconnect whether client is trying to reconnect */ - void releaseServerStoreProxy(ServerStoreProxy serverStoreProxy); + void releaseServerStoreProxy(ServerStoreProxy serverStoreProxy, boolean isReconnect); + + /** + * Add a block to execute when the connection is recovered after it was closed. + * + * @param runnable the execution block + */ + void addConnectionRecoveryListener(Runnable runnable); + + /** + * Remove a block to execute when the connection is recovered after it was closed. + * + * @param runnable the execution block + */ + void removeConnectionRecoveryListener(Runnable runnable); /** * A {@link org.ehcache.spi.persistence.PersistableResourceService.PersistenceSpaceIdentifier PersistenceSpaceIdentifier} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/service/EntityBusyException.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/service/EntityBusyException.java similarity index 88% rename from clustered/client/src/main/java/org/ehcache/clustered/client/service/EntityBusyException.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/service/EntityBusyException.java index ac5b0e42d0..2812e911fe 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/service/EntityBusyException.java +++ b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/service/EntityBusyException.java @@ -17,7 +17,7 @@ /** * Thrown by {@link org.terracotta.connection.entity.Entity} operations requiring access to the - * {@code {@link org.terracotta.entity.ActiveServerEntity}} when the {@code {@link org.terracotta.entity.ActiveServerEntity}} is not available. + * {@code org.terracotta.entity.ActiveServerEntity} when the {@code org.terracotta.entity.ActiveServerEntity} is not available. */ public class EntityBusyException extends Exception { private static final long serialVersionUID = -7706902691622092177L; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/service/EntityService.java b/clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/service/EntityService.java similarity index 100% rename from clustered/client/src/main/java/org/ehcache/clustered/client/service/EntityService.java rename to clustered/ehcache-client/src/main/java/org/ehcache/clustered/client/service/EntityService.java diff --git a/clustered/ehcache-client/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory b/clustered/ehcache-client/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory new file mode 100644 index 0000000000..99428db970 --- /dev/null +++ b/clustered/ehcache-client/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory @@ -0,0 +1,5 @@ +org.ehcache.clustered.client.internal.service.ClusteringServiceFactory +org.ehcache.clustered.client.internal.store.ClusteredStoreProviderFactory +org.ehcache.clustered.client.internal.loaderwriter.DelegatingLoaderWriterStoreProviderFactory +org.ehcache.clustered.client.internal.loaderwriter.ClusteredLoaderWriterStoreProviderFactory +org.ehcache.clustered.client.internal.loaderwriter.writebehind.ClusteredWriteBehindStoreProviderFactory diff --git a/clustered/ehcache-client/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser b/clustered/ehcache-client/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser new file mode 100644 index 0000000000..3cfe6d381a --- /dev/null +++ b/clustered/ehcache-client/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser @@ -0,0 +1 @@ +org.ehcache.clustered.client.internal.config.xml.ClusteringCacheManagerServiceConfigurationParser diff --git a/clustered/client/src/main/resources/META-INF/services/org.ehcache.xml.CacheResourceConfigurationParser b/clustered/ehcache-client/src/main/resources/META-INF/services/org.ehcache.xml.CacheResourceConfigurationParser similarity index 100% rename from clustered/client/src/main/resources/META-INF/services/org.ehcache.xml.CacheResourceConfigurationParser rename to clustered/ehcache-client/src/main/resources/META-INF/services/org.ehcache.xml.CacheResourceConfigurationParser diff --git a/clustered/ehcache-client/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser b/clustered/ehcache-client/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser new file mode 100644 index 0000000000..5a8e03b57d --- /dev/null +++ b/clustered/ehcache-client/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser @@ -0,0 +1 @@ +org.ehcache.clustered.client.internal.config.xml.ClusteringCacheServiceConfigurationParser diff --git a/clustered/client/src/main/resources/META-INF/services/org.terracotta.entity.EntityClientService b/clustered/ehcache-client/src/main/resources/META-INF/services/org.terracotta.entity.EntityClientService similarity index 100% rename from clustered/client/src/main/resources/META-INF/services/org.terracotta.entity.EntityClientService rename to clustered/ehcache-client/src/main/resources/META-INF/services/org.terracotta.entity.EntityClientService diff --git a/clustered/ehcache-client/src/main/resources/ehcache-clustered-ext.xsd b/clustered/ehcache-client/src/main/resources/ehcache-clustered-ext.xsd new file mode 100644 index 0000000000..f42a6c5ff7 --- /dev/null +++ b/clustered/ehcache-client/src/main/resources/ehcache-clustered-ext.xsd @@ -0,0 +1,286 @@ + + + + + + + + + + + + Used within the /config/service element of an Ehcache configuration, this element + describes cluster service properties. + + + + + + + + Specifies the server-side configuration of the entity to be accessed and or created. + + + + + + + + + + + + + + + Specifies the amount of time a cache read operation will wait for a response from a cluster + server before abandoning the cluster operation. + + + + + + + Specifies the amount of time a cache write operation will wait for a response from a cluster + server before abandoning the cluster operation. + + + + + + + Specifies the amount of time a cache will wait to connect to a cluster + server before abandoning the cluster operation. + + + + + + + + + + + + Identifies a single cluster member by URL. + + + + + + + + + + + + + + + + + + + + + + + + Cluster Tier Manager identifier. + + + + + + + + + Data specific to a particular server. + + + + + + + The host that the server is running on. + + + + + + + The port that the server is listening on. + + + + + + + + + + + Specifies the default server-side storage resource to use for storing cache data. + + + + + + + Defines a pool of server-side storage resource to be shared amongst multiple caches. + + + + + + + + + True if server side components should be automatically created if they are absent. + + This attribute is deprecated, and has been replaced by the 'client-mode' attribute. + Use of both at the same time (although legal per the schema) will fail at parse time in Ehcache. + + + + + + xml:lang="en"> + The client connection behavior, either: + * expecting - expect a matching server configuration (fail otherwise) + * auto-create - expect a matching server configuration or create one if none is present + * auto-create-on-reconnect - as 'auto-create' but also auto-create on reconnection + + + + + + + + + + + + + + + + + Shared pool name. + + + + + + + + + + + + + + + + + + + + + + + + Clustered cache resource with a dedicated size. + + + + + + + + + + + + + + + + Clustered cache resource sharing a pool with other cache resources. + + + + + + + + + Name of the shared pool this resource uses. + + + + + + + + + Clustered cache resource which inherits the resource pool configured on the server. + + + + + + + + + + Optional reference to a server-side storage resource. + + + + + + + + + + Required reference to a server-side storage resource. + + + + + + + + + + + + + + + + + + diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java new file mode 100644 index 0000000000..78273ff50a --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java @@ -0,0 +1,111 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.config.CacheRuntimeConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.net.URI; + +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredShared; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThrows; + +public class ClusteredResourcePoolUpdationTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); + + private static PersistentCacheManager cacheManager; + private static Cache dedicatedCache; + private static Cache sharedCache; + + @BeforeClass + public static void setUp() throws Exception { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 8, MemoryUnit.MB) + .resource("secondary-server-resource", 8, MemoryUnit.MB) + .build()); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER_URI).autoCreate(server -> server + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 2, MemoryUnit.MB, "secondary-server-resource") + .resourcePool("resource-pool-b", 4, MemoryUnit.MB))) + .withCache("dedicated-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB)))) + .withCache("shared-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool-a")))) + .build(); + cacheManager.init(); + + dedicatedCache = cacheManager.getCache("dedicated-cache", Long.class, String.class); + sharedCache = cacheManager.getCache("shared-cache", Long.class, String.class); + } + + @AfterClass + public static void tearDown() throws Exception { + try { + cacheManager.close(); + UnitTestConnectionService.remove(CLUSTER_URI); + } finally { + cacheManager = null; + dedicatedCache = null; + sharedCache = null; + } + } + + @Test + public void testClusteredDedicatedResourcePoolUpdation() throws Exception { + CacheRuntimeConfiguration runtimeConfiguration = dedicatedCache.getRuntimeConfiguration(); + UnsupportedOperationException thrown = assertThrows(UnsupportedOperationException.class, () -> + runtimeConfiguration.updateResourcePools(newResourcePoolsBuilder() + .with(clusteredDedicated("primary-server-resource", 8, MB)) + .build() + )); + assertThat(thrown, hasProperty("message", is("Updating CLUSTERED resource is not supported"))); + } + + @Test + public void testClusteredSharedResourcePoolUpdation() throws Exception { + CacheRuntimeConfiguration runtimeConfiguration = sharedCache.getRuntimeConfiguration(); + UnsupportedOperationException thrown = assertThrows(UnsupportedOperationException.class, () -> + runtimeConfiguration.updateResourcePools(newResourcePoolsBuilder() + .with(clusteredShared("resource-pool-a")) + .build() + )); + assertThat(thrown, hasProperty("message", is("Updating CLUSTERED resource is not supported"))); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java new file mode 100644 index 0000000000..ae0e3623f1 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java @@ -0,0 +1,147 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.impl.internal.TimeSourceConfiguration; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.net.URI; +import java.time.Duration; + +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +/** + * + */ +public class BasicClusteredCacheExpiryTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); + private static final CacheManagerBuilder commonClusteredCacheManagerBuilder = + newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1L))) + .withService(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG))); + + @Before + public void definePassthroughServer() throws Exception { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 64, MemoryUnit.MB) + .resource("secondary-server-resource", 64, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() throws Exception { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + @Test + public void testGetExpiredSingleClient() { + + TestTimeSource timeSource = new TestTimeSource(); + TimeSourceConfiguration timeSourceConfiguration = new TimeSourceConfiguration(timeSource); + + final CacheManagerBuilder clusteredCacheManagerBuilder = + commonClusteredCacheManagerBuilder.using(timeSourceConfiguration); + + try (PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { + + final Cache cache = cacheManager.getCache("clustered-cache", Long.class, String.class); + + cache.put(1L, "value"); + assertThat(cache.get(1L), is("value")); + + timeSource.advanceTime(1); + + assertThat(cache.get(1L), nullValue()); + } + } + + @Test + public void testGetExpiredTwoClients() { + + TestTimeSource timeSource = new TestTimeSource(); + TimeSourceConfiguration timeSourceConfiguration = new TimeSourceConfiguration(timeSource); + + final CacheManagerBuilder clusteredCacheManagerBuilder = + commonClusteredCacheManagerBuilder.using(timeSourceConfiguration); + + try (PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true)) { + try (PersistentCacheManager cacheManager2 = clusteredCacheManagerBuilder.build(true)) { + + final Cache cache1 = cacheManager1.getCache("clustered-cache", Long.class, String.class); + final Cache cache2 = cacheManager2.getCache("clustered-cache", Long.class, String.class); + + assertThat(cache2.get(1L), nullValue()); + cache1.put(1L, "value1"); + assertThat(cache1.get(1L), is("value1")); + timeSource.advanceTime(1L); + + assertThat(cache2.get(1L), nullValue()); + assertThat(cache1.get(1L), nullValue()); + } + } + } + + @Test + public void testContainsKeyExpiredTwoClients() { + + TestTimeSource timeSource = new TestTimeSource(); + TimeSourceConfiguration timeSourceConfiguration = new TimeSourceConfiguration(timeSource); + + final CacheManagerBuilder clusteredCacheManagerBuilder = + commonClusteredCacheManagerBuilder.using(timeSourceConfiguration); + + try (PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true)) { + try (PersistentCacheManager cacheManager2 = clusteredCacheManagerBuilder.build(true)) { + + final Cache cache1 = cacheManager1.getCache("clustered-cache", Long.class, String.class); + final Cache cache2 = cacheManager2.getCache("clustered-cache", Long.class, String.class); + + assertThat(cache2.get(1L), nullValue()); + cache1.put(1L, "value1"); + assertThat(cache1.containsKey(1L), is(true)); + timeSource.advanceTime(1L); + + assertThat(cache1.containsKey(1L), is(false)); + assertThat(cache2.containsKey(1L), is(false)); + } + } + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java new file mode 100644 index 0000000000..3f29a53628 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java @@ -0,0 +1,239 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.client.internal.UnitTestConnectionService.PassthroughServerBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.Serializable; +import java.math.BigInteger; +import java.net.URI; +import java.util.Random; + +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +/** + * Provides basic tests for creation of a cache using a {@link org.ehcache.clustered.client.internal.store.ClusteredStore ClusteredStore}. + */ +public class BasicClusteredCacheTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); + + @Before + public void definePassthroughServer() throws Exception { + UnitTestConnectionService.add(CLUSTER_URI, + new PassthroughServerBuilder() + .resource("primary-server-resource", 64, MemoryUnit.MB) + .resource("secondary-server-resource", 64, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() throws Exception { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + @Test + public void testClusteredCacheSingleClient() throws Exception { + + final CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); + try (PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { + + final Cache cache = cacheManager.getCache("clustered-cache", Long.class, String.class); + + cache.put(1L, "value"); + assertThat(cache.get(1L), is("value")); + } + } + + @Test + public void testClusteredCacheTwoClients() throws Exception { + final CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) + .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) + .withService(new ClusteredStoreConfiguration(Consistency.STRONG))); + + try (PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true)) { + try (PersistentCacheManager cacheManager2 = clusteredCacheManagerBuilder.build(true)) { + + final Cache cache1 = cacheManager1.getCache("clustered-cache", Long.class, String.class); + final Cache cache2 = cacheManager2.getCache("clustered-cache", Long.class, String.class); + + assertThat(cache2.get(1L), nullValue()); + cache1.put(1L, "value1"); + assertThat(cache2.get(1L), is("value1")); + assertThat(cache1.get(1L), is("value1")); + cache1.put(1L, "value2"); + assertThat(cache2.get(1L), is("value2")); + assertThat(cache1.get(1L), is("value2")); + } + } + } + + @Test + public void testClustered3TierCacheTwoClients() throws Exception { + final CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, EntryUnit.ENTRIES).offheap(1, MemoryUnit.MB) + .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) + .withService(new ClusteredStoreConfiguration(Consistency.STRONG))); + + try (PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true)) { + try (PersistentCacheManager cacheManager2 = clusteredCacheManagerBuilder.build(true)) { + + final Cache cache1 = cacheManager1.getCache("clustered-cache", Long.class, String.class); + final Cache cache2 = cacheManager2.getCache("clustered-cache", Long.class, String.class); + + assertThat(cache2.get(1L), nullValue()); + cache1.put(1L, "value1"); + cache1.put(2L, "value2"); + cache1.put(3L, "value3"); + assertThat(cache2.get(1L), is("value1")); + assertThat(cache2.get(2L), is("value2")); + assertThat(cache2.get(3L), is("value3")); + assertThat(cache2.get(1L), is("value1")); + assertThat(cache2.get(2L), is("value2")); + assertThat(cache2.get(3L), is("value3")); + assertThat(cache1.get(1L), is("value1")); + assertThat(cache1.get(2L), is("value2")); + assertThat(cache1.get(3L), is("value3")); + assertThat(cache1.get(1L), is("value1")); + assertThat(cache1.get(2L), is("value2")); + assertThat(cache1.get(3L), is("value3")); + cache1.put(1L, "value11"); + cache1.put(2L, "value12"); + cache1.put(3L, "value13"); + assertThat(cache2.get(1L), is("value11")); + assertThat(cache2.get(2L), is("value12")); + assertThat(cache2.get(3L), is("value13")); + assertThat(cache2.get(1L), is("value11")); + assertThat(cache2.get(2L), is("value12")); + assertThat(cache2.get(3L), is("value13")); + assertThat(cache1.get(1L), is("value11")); + assertThat(cache1.get(2L), is("value12")); + assertThat(cache1.get(3L), is("value13")); + assertThat(cache1.get(1L), is("value11")); + assertThat(cache1.get(2L), is("value12")); + assertThat(cache1.get(3L), is("value13")); + } + } + } + + @Test + public void testTieredClusteredCache() throws Exception { + final CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, + heap(2) + .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); + try (PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { + + final Cache cache = cacheManager.getCache("clustered-cache", Long.class, String.class); + + cache.put(1L, "value"); + assertThat(cache.get(1L), is("value")); + } + } + + @Test + public void testClusteredCacheWithSerializableValue() throws Exception { + final CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder().with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, Person.class, + newResourcePoolsBuilder().with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); + try (PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { + Cache cache = cacheManager.getCache("clustered-cache", Long.class, Person.class); + + cache.put(38L, new Person("Clustered Joe", 28)); + } + + try (PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { + Cache cache = cacheManager.getCache("clustered-cache", Long.class, Person.class); + + assertThat(cache.get(38L).name, is("Clustered Joe")); + } + } + + @Test + public void testLargeValues() throws Exception { + DefaultStatisticsService statisticsService = new DefaultStatisticsService(); + CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .using(statisticsService) + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("small-cache", newCacheConfigurationBuilder(Long.class, BigInteger.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(clusteredDedicated("secondary-server-resource", 4, MemoryUnit.MB)))); + + // The idea here is to add big things in the cache, and cause eviction of them to see if something crashes + + try(PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { + + Cache cache = cacheManager.getCache("small-cache", Long.class, BigInteger.class); + + Random random = new Random(); + for (long i = 0; i < 100; i++) { + BigInteger value = new BigInteger(30 * 1024 * 128 * (1 + random.nextInt(10)), random); + cache.put(i, value); + } + } + } + + public static class Person implements Serializable { + + private static final long serialVersionUID = 1L; + + final String name; + final int age; + + public Person(String name, int age) { + this.name = name; + this.age = age; + } + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/CacheManagerDestroyTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/CacheManagerDestroyTest.java new file mode 100644 index 0000000000..36b91c7d94 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/CacheManagerDestroyTest.java @@ -0,0 +1,191 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client; + +import org.ehcache.Cache; +import org.ehcache.CachePersistenceException; +import org.ehcache.PersistentCacheManager; +import org.ehcache.StateTransitionException; +import org.ehcache.Status; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.net.URI; + +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.fail; + +public class CacheManagerDestroyTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); + + private static final CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)); + + @Before + public void definePassthroughServer() throws Exception { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 64, MemoryUnit.MB) + .resource("secondary-server-resource", 64, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() throws Exception { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + @Test + public void testDestroyCacheManagerWithSingleClient() throws CachePersistenceException { + PersistentCacheManager persistentCacheManager = clusteredCacheManagerBuilder.build(true); + + persistentCacheManager.close(); + persistentCacheManager.destroy(); + + assertThat(persistentCacheManager.getStatus(), is(Status.UNINITIALIZED)); + } + + @Test + public void testCreateDestroyCreate() throws Exception { + PersistentCacheManager cacheManager = newCacheManagerBuilder().with(cluster(CLUSTER_URI) + .autoCreate(c -> c.defaultServerResource("primary-server-resource"))) + .withCache("my-cache", newCacheConfigurationBuilder(Long.class, String.class, heap(10).with(ClusteredResourcePoolBuilder + .clusteredDedicated(2, MemoryUnit.MB)))) + .build(true); + + cacheManager.close(); + cacheManager.destroy(); + + cacheManager.init(); + + cacheManager.close(); + } + + @Test + public void testDestroyCacheManagerWithMultipleClients() throws CachePersistenceException { + PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true); + try (PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true)) { + + persistentCacheManager1.close(); + + try { + persistentCacheManager1.destroy(); + fail("StateTransitionException expected"); + } catch (StateTransitionException e) { + assertThat(e.getMessage(), is("Couldn't acquire cluster-wide maintenance lease")); + } + + assertThat(persistentCacheManager1.getStatus(), is(Status.UNINITIALIZED)); + + assertThat(persistentCacheManager2.getStatus(), is(Status.AVAILABLE)); + + Cache cache = persistentCacheManager2.createCache("test", newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); + + cache.put(1L, "One"); + + assertThat(cache.get(1L), is("One")); + } + } + + @Test + public void testDestroyCacheManagerDoesNotAffectsExistingCacheWithExistingClientsConnected() throws CachePersistenceException { + + CacheManagerBuilder cacheManagerBuilder = clusteredCacheManagerBuilder + .withCache("test", newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); + + PersistentCacheManager persistentCacheManager1 = cacheManagerBuilder.build(true); + try (PersistentCacheManager persistentCacheManager2 = cacheManagerBuilder.build(true)) { + + persistentCacheManager1.close(); + try { + persistentCacheManager1.destroy(); + fail("StateTransitionException expected"); + } catch (StateTransitionException e) { + assertThat(e.getMessage(), is("Couldn't acquire cluster-wide maintenance lease")); + } + + Cache cache = persistentCacheManager2.getCache("test", Long.class, String.class); + + cache.put(1L, "One"); + + assertThat(cache.get(1L), is("One")); + } + } + + @Test + public void testCloseCacheManagerSingleClient() { + CacheManagerBuilder cacheManagerBuilder = clusteredCacheManagerBuilder + .withCache("test", newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); + + PersistentCacheManager persistentCacheManager1 = cacheManagerBuilder.build(true); + + persistentCacheManager1.close(); + + persistentCacheManager1.init(); + + Cache cache = persistentCacheManager1.getCache("test", Long.class, String.class); + cache.put(1L, "One"); + + assertThat(cache.get(1L), is("One")); + + persistentCacheManager1.close(); + } + + @Test + public void testCloseCacheManagerMultipleClients() { + CacheManagerBuilder cacheManagerBuilder = clusteredCacheManagerBuilder + .withCache("test", newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)))); + + PersistentCacheManager persistentCacheManager1 = cacheManagerBuilder.build(true); + try (PersistentCacheManager persistentCacheManager2 = cacheManagerBuilder.build(true)) { + + Cache cache = persistentCacheManager1.getCache("test", Long.class, String.class); + cache.put(1L, "One"); + + assertThat(cache.get(1L), is("One")); + + persistentCacheManager1.close(); + assertThat(persistentCacheManager1.getStatus(), is(Status.UNINITIALIZED)); + + Cache cache2 = persistentCacheManager2.getCache("test", Long.class, String.class); + + assertThat(cache2.get(1L), is("One")); + } + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java new file mode 100644 index 0000000000..587f315eae --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java @@ -0,0 +1,228 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client; + +import org.ehcache.Cache; +import org.ehcache.CachePersistenceException; +import org.ehcache.PersistentCacheManager; +import org.ehcache.Status; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.net.URI; + +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clustered; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.fail; + +public class ClusteredCacheDestroyTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); + private static final String CLUSTERED_CACHE = "clustered-cache"; + + private static final CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache(CLUSTERED_CACHE, newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) + .withService(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG))); + + @Before + public void definePassthroughServer() throws Exception { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 16, MemoryUnit.MB) + .resource("secondary-server-resource", 16, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() throws Exception { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + @Test + public void testDestroyCacheWhenSingleClientIsConnected() throws CachePersistenceException { + try (PersistentCacheManager persistentCacheManager = clusteredCacheManagerBuilder.build(true)) { + + persistentCacheManager.destroyCache(CLUSTERED_CACHE); + + final Cache cache = persistentCacheManager.getCache(CLUSTERED_CACHE, Long.class, String.class); + + assertThat(cache, nullValue()); + } + } + + @Test + public void testDestroyFreesUpTheAllocatedResource() throws CachePersistenceException { + try (PersistentCacheManager persistentCacheManager = clusteredCacheManagerBuilder.build(true)) { + + CacheConfigurationBuilder configBuilder = newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 10, MemoryUnit.MB))); + + try { + Cache anotherCache = persistentCacheManager.createCache("another-cache", configBuilder); + fail(); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), is("Cache 'another-cache' creation in EhcacheManager failed.")); + } + + persistentCacheManager.destroyCache(CLUSTERED_CACHE); + + Cache anotherCache = persistentCacheManager.createCache("another-cache", configBuilder); + + anotherCache.put(1L, "One"); + assertThat(anotherCache.get(1L), is("One")); + } + } + + @Test + public void testDestroyUnknownCacheAlias() throws Exception { + clusteredCacheManagerBuilder.build(true).close(); + + try (PersistentCacheManager cacheManager = newCacheManagerBuilder().with(cluster(CLUSTER_URI).expecting(c -> c)).build(true)) { + + cacheManager.destroyCache(CLUSTERED_CACHE); + + try { + cacheManager.createCache(CLUSTERED_CACHE, newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder() + .with(clustered()))); + fail("Expected exception as clustered store no longer exists"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), containsString(CLUSTERED_CACHE)); + } + } + } + + @Test + public void testDestroyNonExistentCache() throws CachePersistenceException { + try (PersistentCacheManager persistentCacheManager = clusteredCacheManagerBuilder.build(true)) { + + String nonExistent = "this-is-not-the-cache-you-are-looking-for"; + assertThat(persistentCacheManager.getCache(nonExistent, Long.class, String.class), nullValue()); + persistentCacheManager.destroyCache(nonExistent); + } + } + + @Test + public void testDestroyCacheWhenMultipleClientsConnected() { + try (PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true)) { + try (PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true)) { + + final Cache cache1 = persistentCacheManager1.getCache(CLUSTERED_CACHE, Long.class, String.class); + + final Cache cache2 = persistentCacheManager2.getCache(CLUSTERED_CACHE, Long.class, String.class); + + try { + persistentCacheManager1.destroyCache(CLUSTERED_CACHE); + fail(); + } catch (CachePersistenceException e) { + assertThat(e.getMessage(), containsString("Cannot destroy cluster tier")); + } + + try { + cache1.put(1L, "One"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), is("State is UNINITIALIZED")); + } + + assertThat(cache2.get(1L), nullValue()); + + cache2.put(1L, "One"); + + assertThat(cache2.get(1L), is("One")); + } + } + } + + @Test + public void testDestroyCacheWithCacheManagerStopped() throws CachePersistenceException { + PersistentCacheManager persistentCacheManager = clusteredCacheManagerBuilder.build(true); + persistentCacheManager.close(); + persistentCacheManager.destroyCache(CLUSTERED_CACHE); + assertThat(persistentCacheManager.getStatus(), is(Status.UNINITIALIZED)); + } + + @Test + public void testDestroyNonExistentCacheWithCacheManagerStopped() throws CachePersistenceException { + PersistentCacheManager persistentCacheManager = clusteredCacheManagerBuilder.build(true); + persistentCacheManager.close(); + persistentCacheManager.destroyCache("this-is-not-the-cache-you-are-looking-for"); + assertThat(persistentCacheManager.getStatus(), is(Status.UNINITIALIZED)); + } + + @Test + public void testDestroyCacheOnNonExistentCacheManager() throws CachePersistenceException { + PersistentCacheManager persistentCacheManager = clusteredCacheManagerBuilder.build(true); + persistentCacheManager.close(); + persistentCacheManager.destroy(); + + persistentCacheManager.destroyCache("this-is-not-the-cache-you-are-looking-for"); + assertThat(persistentCacheManager.getStatus(), is(Status.UNINITIALIZED)); + } + + @Test + @SuppressWarnings("try") + public void testDestroyCacheWithTwoCacheManagerOnSameCache_forbiddenWhenInUse() throws CachePersistenceException { + try (PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true)) { + try (PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true)) { + CachePersistenceException thrown = assertThrows(CachePersistenceException.class, () -> persistentCacheManager1.destroyCache(CLUSTERED_CACHE)); + assertThat(thrown, hasProperty("message", is("Cannot destroy cluster tier 'clustered-cache': in use by other client(s)"))); + } + } + } + + @Test + public void testDestroyCacheWithTwoCacheManagerOnSameCache_firstRemovesSecondDestroy() throws CachePersistenceException { + try (PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true)) { + try (PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true)) { + persistentCacheManager2.removeCache(CLUSTERED_CACHE); + persistentCacheManager1.destroyCache(CLUSTERED_CACHE); + } + } + } + + @Test + public void testDestroyCacheWithTwoCacheManagerOnSameCache_secondDoesntHaveTheCacheButPreventExclusiveAccessToCluster() throws CachePersistenceException { + PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(false); + try (PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true)) { + persistentCacheManager2.removeCache(CLUSTERED_CACHE); + persistentCacheManager1.destroyCache(CLUSTERED_CACHE); + } + } +} + diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheExpirationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/ClusteredCacheExpirationTest.java similarity index 88% rename from clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheExpirationTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/ClusteredCacheExpirationTest.java index 8d9176a080..a8a2a8164e 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheExpirationTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/ClusteredCacheExpirationTest.java @@ -24,22 +24,22 @@ import org.ehcache.clustered.client.internal.UnitTestConnectionService; import org.ehcache.clustered.common.Consistency; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.spi.service.StatisticsService; import org.ehcache.core.statistics.TierStatistics; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.TimeSourceConfiguration; -import org.ehcache.impl.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.net.URI; +import java.time.Duration; import java.util.Map; -import java.util.concurrent.TimeUnit; import static org.assertj.core.api.Assertions.assertThat; import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; @@ -55,22 +55,22 @@ public class ClusteredCacheExpirationTest { private StatisticsService statisticsService = new DefaultStatisticsService(); - private CacheManagerBuilder cacheManagerBuilder(Expiry expiry) { + private CacheManagerBuilder cacheManagerBuilder(ExpiryPolicy expiry) { return newCacheManagerBuilder() .using(statisticsService) .using(new TimeSourceConfiguration(timeSource)) - .with(cluster(CLUSTER_URI).autoCreate()) + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) .withCache(CLUSTERED_CACHE, newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10) - .offheap(10, MemoryUnit.MB) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 32, MemoryUnit.MB))) + .heap(10, EntryUnit.ENTRIES) + .offheap(6, MemoryUnit.MB) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) .withExpiry(expiry) - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG))); + .withService(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG))); } - private Expiry oneSecondExpiration() { - return Expirations.timeToLiveExpiration(Duration.of(1, TimeUnit.SECONDS)); + private ExpiryPolicy oneSecondExpiration() { + return ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1)); } @Before @@ -117,7 +117,7 @@ public void testGetExpirationPropagatedToHigherTiers() throws CachePersistenceEx @Test public void testGetNoExpirationPropagatedToHigherTiers() throws CachePersistenceException { - CacheManagerBuilder clusteredCacheManagerBuilder = cacheManagerBuilder(Expirations.noExpiration()); + CacheManagerBuilder clusteredCacheManagerBuilder = cacheManagerBuilder(ExpiryPolicyBuilder.noExpiration()); try(PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java similarity index 89% rename from clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java index 16dbc084e7..ea4d8952f3 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java @@ -94,14 +94,14 @@ private Runnable content(final CountDownLatch latch) { return () -> { try { CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER_URI).autoCreate() - .defaultServerResource("primary-server-resource") - .resourcePool("resource-pool-a", 32, MemoryUnit.MB) - .resourcePool("resource-pool-b", 32, MemoryUnit.MB, "secondary-server-resource")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER_URI) + .autoCreate(server -> server.defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 8, MemoryUnit.MB) + .resourcePool("resource-pool-b", 8, MemoryUnit.MB, "secondary-server-resource"))) .withCache(CACHE_NAME, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 32, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG))); + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) + .withService(new ClusteredStoreConfiguration(Consistency.STRONG))); latch.countDown(); try { @@ -110,7 +110,7 @@ private Runnable content(final CountDownLatch latch) { // continue } - clusteredCacheManagerBuilder.build(true); + clusteredCacheManagerBuilder.build(true).close(); } catch (Throwable t) { exception.compareAndSet(null, t); // only keep the first exception } diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/ClusteredEventsTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/ClusteredEventsTest.java new file mode 100644 index 0000000000..34f75b4518 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/ClusteredEventsTest.java @@ -0,0 +1,205 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.event.CacheEvent; +import org.ehcache.event.EventFiring; +import org.ehcache.event.EventOrdering; +import org.ehcache.event.EventType; +import org.ehcache.impl.internal.TimeSourceConfiguration; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestName; + +import java.net.URI; +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.TimeoutException; + +import static java.util.EnumSet.allOf; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ExpiryPolicyBuilder.timeToLiveExpiration; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.collection.IsIterableContainingInOrder.contains; +import static org.hamcrest.core.IsNull.nullValue; +import static org.terracotta.utilities.test.matchers.Eventually.within; + +public class ClusteredEventsTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); + + @Rule + public final TestName runningTest = new TestName(); + + @Before + public void definePassthroughServer() { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 32, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + @Test + public void testNonExpiringEventSequence() throws TimeoutException { + CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(s -> s.defaultServerResource("primary-server-resource"))) + .withCache(runningTest.getMethodName(), newCacheConfigurationBuilder(Long.class, String.class, + newResourcePoolsBuilder().with(clusteredDedicated(16, MemoryUnit.MB)))); + + try (PersistentCacheManager driver = clusteredCacheManagerBuilder.build(true)) { + Cache driverCache = driver.getCache(runningTest.getMethodName(), Long.class, String.class); + try (PersistentCacheManager observer = clusteredCacheManagerBuilder.build(true)) { + Cache observerCache = observer.getCache(runningTest.getMethodName(), Long.class, String.class); + + List> driverEvents = new ArrayList<>(); + driverCache.getRuntimeConfiguration().registerCacheEventListener(driverEvents::add, EventOrdering.ORDERED, EventFiring.ASYNCHRONOUS, allOf(EventType.class)); + + List> observerEvents = new ArrayList<>(); + observerCache.getRuntimeConfiguration().registerCacheEventListener(observerEvents::add, EventOrdering.ORDERED, EventFiring.ASYNCHRONOUS, allOf(EventType.class)); + + + driverCache.put(1L, "foo"); + driverCache.put(1L, "bar"); + driverCache.remove(1L); + driverCache.putIfAbsent(1L, "baz"); + driverCache.replace(1L, "bat"); + driverCache.replace(1L, "bat", "bag"); + driverCache.remove(1L, "bag"); + + @SuppressWarnings("unchecked") + Matcher>> expectedSequence = contains( + created(1L, "foo"), + updated(1L, "foo", "bar"), + removed(1L, "bar"), + created(1L, "baz"), + updated(1L, "baz", "bat"), + updated(1L, "bat", "bag"), + removed(1L, "bag")); + + within(Duration.ofSeconds(10)).runsCleanly(() -> { + assertThat(driverEvents, expectedSequence); + assertThat(observerEvents, expectedSequence); + }); + } + } + } + + @Test + public void testExpiringEventSequence() throws TimeoutException { + TestTimeSource timeSource = new TestTimeSource(); + + CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .using(new TimeSourceConfiguration(timeSource)) + .with(cluster(CLUSTER_URI).autoCreate(s -> s.defaultServerResource("primary-server-resource"))) + .withCache(runningTest.getMethodName(), newCacheConfigurationBuilder(Long.class, String.class, + newResourcePoolsBuilder().with(clusteredDedicated(16, MemoryUnit.MB))) + .withExpiry(timeToLiveExpiration(Duration.ofMillis(1000)))); + + try (PersistentCacheManager driver = clusteredCacheManagerBuilder.build(true)) { + Cache driverCache = driver.getCache(runningTest.getMethodName(), Long.class, String.class); + try (PersistentCacheManager observer = clusteredCacheManagerBuilder.build(true)) { + Cache observerCache = observer.getCache(runningTest.getMethodName(), Long.class, String.class); + + List> driverEvents = new ArrayList<>(); + driverCache.getRuntimeConfiguration().registerCacheEventListener(driverEvents::add, EventOrdering.ORDERED, EventFiring.ASYNCHRONOUS, allOf(EventType.class)); + + List> observerEvents = new ArrayList<>(); + observerCache.getRuntimeConfiguration().registerCacheEventListener(observerEvents::add, EventOrdering.ORDERED, EventFiring.ASYNCHRONOUS, allOf(EventType.class)); + + + driverCache.put(1L, "foo"); + timeSource.advanceTime(1100); + driverCache.putIfAbsent(1L, "bar"); + timeSource.advanceTime(1100); + driverCache.remove(1L); + driverCache.put(1L, "baz"); + timeSource.advanceTime(1100); + assertThat(driverCache.get(1L), nullValue()); + + @SuppressWarnings("unchecked") + Matcher>> expectedSequence = contains( + created(1L, "foo"), + expired(1L, "foo"), + created(1L, "bar"), + expired(1L, "bar"), + created(1L, "baz"), + expired(1L, "baz")); + + within(Duration.ofSeconds(10)).runsCleanly(() -> { + assertThat(driverEvents, expectedSequence); + assertThat(observerEvents, expectedSequence); + }); + } + } + } + + private static Matcher> created(K key, V value) { + return event(EventType.CREATED, key, null, value); + } + + private static Matcher> updated(K key, V oldValue, V newValue) { + return event(EventType.UPDATED, key, oldValue, newValue); + } + + private static Matcher> removed(K key, V value) { + return event(EventType.REMOVED, key, value, null); + } + + private static Matcher> expired(K key, V value) { + return event(EventType.EXPIRED, key, value, null); + } + + private static Matcher> event(EventType type, K key, V oldValue, V newValue) { + return new TypeSafeMatcher>() { + @Override + protected boolean matchesSafely(CacheEvent item) { + return type.equals(item.getType()) && key.equals(item.getKey()) + && Objects.equals(oldValue, item.getOldValue()) + && Objects.equals(newValue, item.getNewValue()); + } + + @Override + public void describeTo(Description description) { + description.appendText(" on '").appendValue(key).appendText("' ").appendValue(type) + .appendText(" [").appendValue(oldValue).appendText(" => ").appendValue(newValue).appendText("]"); + } + }; + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/EntityServiceTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/EntityServiceTest.java similarity index 78% rename from clustered/client/src/test/java/org/ehcache/clustered/client/EntityServiceTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/EntityServiceTest.java index d220400d2d..1d9b027970 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/EntityServiceTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/EntityServiceTest.java @@ -20,7 +20,6 @@ import org.ehcache.clustered.client.internal.UnitTestConnectionService.PassthroughServerBuilder; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockClient; import org.ehcache.clustered.client.service.ClientEntityFactory; -import org.ehcache.clustered.client.service.ClusteringService; import org.ehcache.clustered.client.service.EntityBusyException; import org.ehcache.clustered.client.service.EntityService; import org.ehcache.config.units.MemoryUnit; @@ -39,7 +38,7 @@ import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.fail; public class EntityServiceTest { @@ -59,41 +58,40 @@ public void removePassthroughServer() throws Exception { UnitTestConnectionService.remove(CLUSTER_URI); } - @Test + @Test @SuppressWarnings("try") public void test() throws Exception { ClusteredManagementService clusteredManagementService = new ClusteredManagementService(); - CacheManager cacheManager = newCacheManagerBuilder() + try (CacheManager cacheManager = newCacheManagerBuilder() .using(clusteredManagementService) - .with(cluster(CLUSTER_URI).autoCreate()) - .build(true); + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .build(true)) { - assertThat(clusteredManagementService.clientEntityFactory, is(notNullValue())); + assertThat(clusteredManagementService.clientEntityFactory, is(notNullValue())); - clusteredManagementService.clientEntityFactory.create(); - - try { clusteredManagementService.clientEntityFactory.create(); - fail(); - } catch (Exception e) { - assertThat(e, instanceOf(EntityAlreadyExistsException.class)); - } - VoltronReadWriteLockClient entity = clusteredManagementService.clientEntityFactory.retrieve(); - assertThat(entity, is(notNullValue())); + try { + clusteredManagementService.clientEntityFactory.create(); + fail(); + } catch (Exception e) { + assertThat(e, instanceOf(EntityAlreadyExistsException.class)); + } - try { - clusteredManagementService.clientEntityFactory.destroy(); - fail(); - } catch (Exception e) { - assertThat(e, instanceOf(EntityBusyException.class)); - } + VoltronReadWriteLockClient entity = clusteredManagementService.clientEntityFactory.retrieve(); + assertThat(entity, is(notNullValue())); - entity.close(); + try { + clusteredManagementService.clientEntityFactory.destroy(); + fail(); + } catch (Exception e) { + assertThat(e, instanceOf(EntityBusyException.class)); + } - clusteredManagementService.clientEntityFactory.destroy(); + entity.close(); - cacheManager.close(); + clusteredManagementService.clientEntityFactory.destroy(); + } } @ServiceDependencies(EntityService.class) diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/NonClusteredCacheTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/NonClusteredCacheTest.java new file mode 100644 index 0000000000..51350722c3 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/NonClusteredCacheTest.java @@ -0,0 +1,70 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client; + +import org.ehcache.CacheManager; +import org.ehcache.clustered.client.internal.service.DefaultClusteringService; +import org.ehcache.clustered.client.internal.store.ClusteredStore; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.core.util.ClassLoading; +import org.junit.Test; + +import java.util.stream.Collectors; + +import static java.util.Spliterators.spliterator; +import static java.util.stream.StreamSupport.stream; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.IsCollectionContaining.hasItems; + +/** + * Ensures that a non-clustered {@code CacheManager} can be created when clustered classes are + * available in classpath. + */ +public class NonClusteredCacheTest { + + @Test + public void testNonClustered() throws Exception { + + /* + * Ensure the cluster provider classes are loadable through the ServiceLoader mechanism. + */ + assertThat(stream(spliterator(ClassLoading.servicesOfType(ServiceFactory.class).iterator(), Long.MAX_VALUE, 0), false).map(f -> f.getServiceType()).collect(Collectors.toList()), + hasItems(ClusteredStore.Provider.class, DefaultClusteringService.class)); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder( + String.class, + String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .build()) + .build(); + + + try (CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().build(true)) { + cacheManager.createCache("cache-1", cacheConfiguration); + cacheManager.createCache("cache-2", cacheConfiguration); + } + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/SimpleClusteredCacheByXmlTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/SimpleClusteredCacheByXmlTest.java similarity index 97% rename from clustered/client/src/test/java/org/ehcache/clustered/client/SimpleClusteredCacheByXmlTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/SimpleClusteredCacheByXmlTest.java index ddc50e979b..bfd5bf1102 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/SimpleClusteredCacheByXmlTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/SimpleClusteredCacheByXmlTest.java @@ -27,11 +27,11 @@ import org.junit.After; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; import org.junit.Before; /** diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/TerracottaUriXmlTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/TerracottaUriXmlTest.java similarity index 89% rename from clustered/client/src/test/java/org/ehcache/clustered/client/TerracottaUriXmlTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/TerracottaUriXmlTest.java index b360c25ee6..75ad3a942f 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/TerracottaUriXmlTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/TerracottaUriXmlTest.java @@ -22,8 +22,9 @@ import org.ehcache.xml.exceptions.XmlConfigurationException; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; -import static org.junit.Assert.assertThat; /** * TerracottaUriXmlTest @@ -43,7 +44,7 @@ public void testFailsWithInvalidClusterUri() { try { new XmlConfiguration(getClass().getResource("/configs/cluster-invalid-uri.xml")); } catch (XmlConfigurationException e) { - assertThat(e.getCause().getMessage(), containsString("not facet-valid with respect to pattern")); + assertThat(e.getCause().getMessage(), allOf(containsString("facet"), containsString("pattern"))); } } } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/TestTimeSource.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/TestTimeSource.java similarity index 100% rename from clustered/client/src/test/java/org/ehcache/clustered/client/TestTimeSource.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/TestTimeSource.java diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWithClusteredCacheTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWithClusteredCacheTest.java new file mode 100644 index 0000000000..b4a5460c1d --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWithClusteredCacheTest.java @@ -0,0 +1,162 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client; + +import bitronix.tm.BitronixTransactionManager; +import bitronix.tm.TransactionManagerServices; + +import org.ehcache.PersistentCacheManager; +import org.ehcache.StateTransitionException; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.event.CacheEvent; +import org.ehcache.event.CacheEventListener; +import org.ehcache.event.EventType; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; +import org.ehcache.transactions.xa.txmgr.btm.BitronixTransactionManagerLookup; +import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProviderConfiguration; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.net.URI; +import java.util.Map; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.fail; + +/** + * This class should be removed as and when following features are done. + */ +public class UnSupportedCombinationsWithClusteredCacheTest { + + @Before + public void resetPassthroughServer() throws Exception { + UnitTestConnectionService.add("terracotta://localhost/my-application", + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 128, MemoryUnit.MB) + .resource("secondary-server-resource", 96, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() throws Exception { + UnitTestConnectionService.remove("terracotta://localhost/my-application"); + } + + @Test + public void testClusteredCacheWithSynchronousEventListeners() { + CacheEventListenerConfigurationBuilder cacheEventListenerConfiguration = CacheEventListenerConfigurationBuilder + .newEventListenerConfiguration(new TestEventListener(), EventType.CREATED, EventType.UPDATED) + .unordered().synchronous(); + + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) + .autoCreate(c -> c)); + try (PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) + .withService(cacheEventListenerConfiguration) + .build(); + + cacheManager.createCache("test", config); + fail("IllegalStateException expected"); + } catch (IllegalStateException e){ + assertThat(e.getCause().getMessage(), is("Synchronous CacheEventListener is not supported with clustered tiers")); + } + } + + @Test + public void testClusteredCacheWithXA() throws Exception { + TransactionManagerServices.getConfiguration().setJournal("null"); + + BitronixTransactionManager transactionManager = + TransactionManagerServices.getTransactionManager(); + + try { + CacheManagerBuilder.newCacheManagerBuilder() + .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")).autoCreate(c -> c)) + .withCache("xaCache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB)) + ) + .withService(new XAStoreConfiguration("xaCache")) + .build() + ) + .build(true).close(); + fail("Expected StateTransitionException"); + } catch (StateTransitionException e) { + assertThat(e.getCause().getCause().getMessage(), is("Unsupported resource type : interface org.ehcache.clustered.client.config.DedicatedClusteredResourcePool")); + } + + transactionManager.shutdown(); + } + + private static class TestLoaderWriter implements CacheLoaderWriter { + + @Override + public String load(Long key) { + return null; + } + + @Override + public Map loadAll(Iterable keys) { + return null; + } + + @Override + public void write(Long key, String value) { + + } + + @Override + public void writeAll(Iterable> entries) { + + } + + @Override + public void delete(Long key) { + + } + + @Override + public void deleteAll(Iterable keys) { + + } + } + + private static class TestEventListener implements CacheEventListener { + + @Override + public void onEvent(CacheEvent event) { + + } + } + +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/XmlConsistencyTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/XmlConsistencyTest.java similarity index 98% rename from clustered/client/src/test/java/org/ehcache/clustered/client/XmlConsistencyTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/XmlConsistencyTest.java index 10bdf14cea..fdcd802b9c 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/XmlConsistencyTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/XmlConsistencyTest.java @@ -25,9 +25,9 @@ import org.junit.Before; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; /** * XmlConsistencyTest diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/XmlUnknownCacheTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/XmlUnknownCacheTest.java new file mode 100644 index 0000000000..0b449d0dc9 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/XmlUnknownCacheTest.java @@ -0,0 +1,61 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.junit.Assert.fail; + +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.junit.Test; + +/** + * + * @author GGIB + */ +public class XmlUnknownCacheTest { + + @Test + public void testGetUnknownCache() { + XmlConfiguration xmlConfiguration = new XmlConfiguration(this.getClass().getResource("/configs/unknown-cluster-cache.xml")); + assertThat(xmlConfiguration.getCacheConfigurations().keySet(),contains("unknownCache")); + } + + @Test + public void testGetUnknownCacheInvalidAttribute() { + try { + new XmlConfiguration(this.getClass().getResource("/configs/unknown-cluster-cache-invalid-attribute.xml")); + fail("Expected XmlConfigurationException"); + } catch(XmlConfigurationException xce) { + assertThat(xce.getCause().getMessage(), allOf(containsString("unit"), containsString("not allowed"), containsString("clustered"))); + } + } + + @Test + public void testGetUnknownCacheInvalidElement() { + try { + new XmlConfiguration(this.getClass().getResource("/configs/unknown-cluster-cache-invalid-element.xml")); + fail("Expected XmlConfigurationException"); + } catch(XmlConfigurationException xce) { + assertThat(xce.getCause().getMessage(), allOf(containsString("haracter"), containsString("clustered"), containsString("empty"))); + } + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/ClusteredConfigurationDerivationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/ClusteredConfigurationDerivationTest.java new file mode 100644 index 0000000000..4dbf576386 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/ClusteredConfigurationDerivationTest.java @@ -0,0 +1,50 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.config; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.Configuration; +import org.ehcache.xml.XmlConfiguration; +import org.junit.Test; + +import java.net.URI; + +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; + +public class ClusteredConfigurationDerivationTest { + private static final String SIMPLE_CLUSTER_XML = "/configs/simple-cluster.xml"; + private static final URI UPDATED_CLUSTER_URI = URI.create("terracotta://updated.example.com:9540/cachemanager"); + + @Test + public void testUpdateUri() throws Exception { + final XmlConfiguration configuration = new XmlConfiguration(this.getClass().getResource(SIMPLE_CLUSTER_XML)); + + Configuration newServer = configuration.derive().updateServices(ClusteringServiceConfiguration.class, existing -> + existing.usingUri(UPDATED_CLUSTER_URI)).build(); + assertThat(findSingletonAmongst(ClusteringServiceConfiguration.class, newServer.getServiceCreationConfigurations()).getClusterUri(), is(UPDATED_CLUSTER_URI)); + } + + @Test + public void testAddConsistency() { + final XmlConfiguration configuration = new XmlConfiguration(this.getClass().getResource(SIMPLE_CLUSTER_XML)); + + Configuration newConsistency = configuration.derive().updateCache("simple-cache", existing -> + existing.withService(new ClusteredStoreConfiguration(Consistency.STRONG))).build(); + assertThat(findSingletonAmongst(ClusteredStoreConfiguration.class, newConsistency.getCacheConfigurations().get("simple-cache").getServiceConfigurations()).getConsistency(), is(Consistency.STRONG)); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/ClusteredStoreConfigurationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/ClusteredStoreConfigurationTest.java new file mode 100644 index 0000000000..fdb39f8f40 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/ClusteredStoreConfigurationTest.java @@ -0,0 +1,38 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.config; + +import org.ehcache.clustered.common.Consistency; +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; +import static org.junit.Assert.fail; + +public class ClusteredStoreConfigurationTest { + + @Test + public void testDeriveDetachesProperly() { + ClusteredStoreConfiguration configuration = new ClusteredStoreConfiguration(Consistency.EVENTUAL); + ClusteredStoreConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getConsistency(), is(configuration.getConsistency())); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java new file mode 100644 index 0000000000..7e9b4803eb --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java @@ -0,0 +1,209 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.config; + +import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; +import org.ehcache.clustered.client.internal.ConnectionSource; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.net.InetSocketAddress; +import java.net.URI; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; +import java.util.Properties; + +import static java.time.Duration.ofSeconds; +import static java.util.Collections.singletonMap; +import static org.assertj.core.api.Assertions.assertThat; + +@SuppressWarnings("deprecation") +public class ClusteringServiceConfigurationTest { + + private static final URI DEFAULT_URI = URI.create("terracotta://localhost:9450"); + private static final Iterable SERVERS = Collections.singletonList(InetSocketAddress.createUnresolved("localhost", 9450)); + private static final String CACHE_MANAGER = "cacheManager"; + + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + @Test + public void testGetConnectionUrlNull() { + expectedException.expect(NullPointerException.class); + new ClusteringServiceConfiguration((URI)null); + } + + @Test + public void testGetServersNull() { + expectedException.expect(NullPointerException.class); + new ClusteringServiceConfiguration(null, CACHE_MANAGER); + } + + @Test + public void testGetConnectionUrl() { + assertThat(new ClusteringServiceConfiguration(DEFAULT_URI).getClusterUri()).isEqualTo(DEFAULT_URI); + } + + @Test + public void testGetServersAndCacheManager() { + ConnectionSource.ServerList connectionSource = (ConnectionSource.ServerList) new ClusteringServiceConfiguration(SERVERS, CACHE_MANAGER).getConnectionSource(); + assertThat(connectionSource.getServers()).isEqualTo(SERVERS); + assertThat(new ClusteringServiceConfiguration(SERVERS, CACHE_MANAGER).getConnectionSource().getClusterTierManager()).isEqualTo(CACHE_MANAGER); + } + + @Test + public void testGetServersAndRemove() { + ConnectionSource.ServerList connectionSource = (ConnectionSource.ServerList) new ClusteringServiceConfiguration(SERVERS, CACHE_MANAGER).getConnectionSource(); + Iterator iterator = connectionSource.getServers().iterator(); + iterator.next(); + iterator.remove(); + assertThat(connectionSource.getServers()).isEqualTo(SERVERS); + } + + @Test + public void testTimeoutsWithURI() { + Timeouts timeouts = TimeoutsBuilder.timeouts().build(); + assertThat(new ClusteringServiceConfiguration(DEFAULT_URI, timeouts).getTimeouts()).isSameAs(timeouts); + } + + @Test + public void testTimeoutsWithServers() { + Timeouts timeouts = TimeoutsBuilder.timeouts().build(); + assertThat(new ClusteringServiceConfiguration(SERVERS, CACHE_MANAGER, timeouts).getTimeouts()).isSameAs(timeouts); + } + + @Test + public void testDefaultTimeoutsWithURI() { + assertThat(new ClusteringServiceConfiguration(DEFAULT_URI).getTimeouts()).isEqualTo(TimeoutsBuilder.timeouts().build()); + } + + @Test + public void testDefaultTimeoutsWithServers() { + assertThat(new ClusteringServiceConfiguration(SERVERS, CACHE_MANAGER).getTimeouts()).isEqualTo(TimeoutsBuilder.timeouts().build()); + } + + @Test + public void testTimeoutsCannotBeNull2ArgsWithURI() { + expectedException.expect(NullPointerException.class); + new ClusteringServiceConfiguration(DEFAULT_URI, (Timeouts) null); + } + + @Test + public void testTimeoutsCannotBeNull2ArgsWithServers() { + expectedException.expect(NullPointerException.class); + new ClusteringServiceConfiguration(SERVERS, CACHE_MANAGER, null); + } + + @Test + public void testTimeoutsCannotBeNull3ArgsWithURI() { + expectedException.expect(NullPointerException.class); + new ClusteringServiceConfiguration(DEFAULT_URI, null, new ServerSideConfiguration(Collections.emptyMap())); + } + + @Test + public void testTimeoutsCannotBeNull3ArgsWithServers() { + expectedException.expect(NullPointerException.class); + new ClusteringServiceConfiguration(SERVERS, CACHE_MANAGER, null, new ServerSideConfiguration(Collections.emptyMap())); + } + + @Test + public void testTimeoutsCannotBeNull4ArgsWithURI() { + expectedException.expect(NullPointerException.class); + new ClusteringServiceConfiguration(DEFAULT_URI, null, true, new ServerSideConfiguration(Collections.emptyMap())); + } + + @Test + public void testTimeoutsCannotBeNull4ArgsWithServers() { + expectedException.expect(NullPointerException.class); + new ClusteringServiceConfiguration(SERVERS, CACHE_MANAGER, null, true, new ServerSideConfiguration(Collections.emptyMap())); + } + + @Test + public void testGetServiceTypeWithURI() { + assertThat(new ClusteringServiceConfiguration(DEFAULT_URI).getServiceType()).isEqualTo(ClusteringService.class); + } + + @Test + public void testGetServiceTypeWithServers() { + assertThat(new ClusteringServiceConfiguration(SERVERS, CACHE_MANAGER).getServiceType()).isEqualTo(ClusteringService.class); + } + + @Test + public void testGetAutoCreateWithURI() { + assertThat(new ClusteringServiceConfiguration(DEFAULT_URI, true, + new ServerSideConfiguration(Collections.emptyMap())).isAutoCreate()).isTrue(); + } + + @Test + public void testGetAutoCreateWithServers() { + assertThat(new ClusteringServiceConfiguration(SERVERS, CACHE_MANAGER, true, + new ServerSideConfiguration(Collections.emptyMap())).isAutoCreate()).isTrue(); + } + + @Test + public void testBuilderWithURI() { + assertThat(new ClusteringServiceConfiguration(DEFAULT_URI) + .builder(CacheManagerBuilder.newCacheManagerBuilder())).isExactlyInstanceOf(CacheManagerBuilder.class); + } + + @Test + public void testBuilderWithServers() { + assertThat(new ClusteringServiceConfiguration(SERVERS, CACHE_MANAGER) + .builder(CacheManagerBuilder.newCacheManagerBuilder())).isExactlyInstanceOf(CacheManagerBuilder.class); + } + + @Test + public void testReadableString() { + ClusteringServiceConfiguration cfg; + + cfg = new ClusteringServiceConfiguration(SERVERS, CACHE_MANAGER); + assertThat(cfg.readableString()).isNotNull(); + + cfg = new ClusteringServiceConfiguration(DEFAULT_URI); + assertThat(cfg.readableString()).isNotNull(); + + cfg = new ClusteringServiceConfiguration(DEFAULT_URI, TimeoutsBuilder.timeouts().build()); + assertThat(cfg.readableString()).isNotNull(); + } + + @Test + public void testDerivedConfiguration() { + URI uri = URI.create("blah-blah"); + Timeouts timeouts = new Timeouts(ofSeconds(1), ofSeconds(2), ofSeconds(3)); + Map pools = singletonMap("default", new ServerSideConfiguration.Pool(42L, "resource")); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfiguration("default", pools); + Properties properties = new Properties(); + properties.setProperty("foo", "bar"); + + ClusteringServiceConfiguration configuration = new ClusteringServiceConfiguration(uri, timeouts, true, serverSideConfiguration, properties); + + + ClusteringServiceConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived).isNotSameAs(configuration); + assertThat(derived.getClusterUri()).isEqualTo(uri); + assertThat(derived.getTimeouts()).isEqualTo(timeouts); + assertThat(derived.getServerConfiguration().getDefaultServerResource()).isEqualTo("default"); + assertThat(derived.getServerConfiguration().getResourcePools()).isEqualTo(pools); + assertThat(derived.getProperties()).isEqualTo(properties); + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/config/builders/ClusteredResourcePoolBuilderTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/builders/ClusteredResourcePoolBuilderTest.java similarity index 77% rename from clustered/client/src/test/java/org/ehcache/clustered/client/config/builders/ClusteredResourcePoolBuilderTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/builders/ClusteredResourcePoolBuilderTest.java index 2f9544e109..5c0fd438fd 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/config/builders/ClusteredResourcePoolBuilderTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/builders/ClusteredResourcePoolBuilderTest.java @@ -19,15 +19,14 @@ import org.ehcache.clustered.client.config.ClusteredResourceType; import org.ehcache.clustered.client.config.SharedClusteredResourcePool; import org.ehcache.config.ResourcePool; -import org.ehcache.config.ResourceType; import org.ehcache.config.units.MemoryUnit; import org.hamcrest.Matchers; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.*; import org.ehcache.clustered.client.config.DedicatedClusteredResourcePool; public class ClusteredResourcePoolBuilderTest { @@ -36,28 +35,23 @@ public class ClusteredResourcePoolBuilderTest { public void dedicated2Arg() throws Exception { ResourcePool pool = ClusteredResourcePoolBuilder.clusteredDedicated(16, MemoryUnit.GB); assertThat(pool, is(instanceOf(DedicatedClusteredResourcePool.class))); - assertThat(pool.getType(), Matchers.is(ClusteredResourceType.Types.DEDICATED)); + assertThat(pool.getType(), Matchers.is(ClusteredResourceType.Types.DEDICATED)); assertThat(pool.isPersistent(), is(true)); assertThat(((DedicatedClusteredResourcePool)pool).getSize(), is(16L)); assertThat(((DedicatedClusteredResourcePool)pool).getUnit(), is(MemoryUnit.GB)); assertThat(((DedicatedClusteredResourcePool)pool).getFromResource(), is(nullValue())); } - @Test + @Test(expected = NullPointerException.class) public void dedicated2ArgUnitNull() throws Exception { - try { - ClusteredResourcePoolBuilder.clusteredDedicated(16, null); - fail(); - } catch (NullPointerException e) { - // expected - } + ClusteredResourcePoolBuilder.clusteredDedicated(16, null); } @Test public void dedicated3Arg() throws Exception { ResourcePool pool = ClusteredResourcePoolBuilder.clusteredDedicated("resourceId", 16, MemoryUnit.GB); assertThat(pool, is(instanceOf(DedicatedClusteredResourcePool.class))); - assertThat(pool.getType(), Matchers.is(ClusteredResourceType.Types.DEDICATED)); + assertThat(pool.getType(), is(ClusteredResourceType.Types.DEDICATED)); assertThat(pool.isPersistent(), is(true)); assertThat(((DedicatedClusteredResourcePool)pool).getSize(), is(16L)); assertThat(((DedicatedClusteredResourcePool)pool).getUnit(), is(MemoryUnit.GB)); @@ -68,40 +62,29 @@ public void dedicated3Arg() throws Exception { public void dedicated3ArgFromNull() throws Exception { ResourcePool pool = ClusteredResourcePoolBuilder.clusteredDedicated(null, 16, MemoryUnit.GB); assertThat(pool, is(instanceOf(DedicatedClusteredResourcePool.class))); - assertThat(pool.getType(), Matchers.is(ClusteredResourceType.Types.DEDICATED)); + assertThat(pool.getType(), is(ClusteredResourceType.Types.DEDICATED)); assertThat(pool.isPersistent(), is(true)); assertThat(((DedicatedClusteredResourcePool)pool).getSize(), is(16L)); assertThat(((DedicatedClusteredResourcePool)pool).getUnit(), is(MemoryUnit.GB)); assertThat(((DedicatedClusteredResourcePool)pool).getFromResource(), is(nullValue())); } - @Test + @Test(expected = NullPointerException.class) public void dedicated3ArgUnitNull() throws Exception { - try { - ClusteredResourcePoolBuilder.clusteredDedicated("resourceId", 16, null); - fail(); - } catch (NullPointerException e) { - // expected - } + ClusteredResourcePoolBuilder.clusteredDedicated("resourceId", 16, null); } @Test public void shared() throws Exception { ResourcePool pool = ClusteredResourcePoolBuilder.clusteredShared("resourceId"); assertThat(pool, is(instanceOf(SharedClusteredResourcePool.class))); - assertThat(pool.getType(), Matchers.is(ClusteredResourceType.Types.SHARED)); + assertThat(pool.getType(), is(ClusteredResourceType.Types.SHARED)); assertThat(pool.isPersistent(), is(true)); assertThat(((SharedClusteredResourcePool)pool).getSharedResourcePool(), is("resourceId")); } - @Test + @Test(expected = NullPointerException.class) public void sharedSharedResourceNull() throws Exception { - try { - ClusteredResourcePoolBuilder.clusteredShared(null); - fail(); - } catch (NullPointerException e) { - // expected - } - + ClusteredResourcePoolBuilder.clusteredShared(null); } -} \ No newline at end of file +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/config/builders/TimeoutsBuilderTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/builders/TimeoutsBuilderTest.java similarity index 92% rename from clustered/client/src/test/java/org/ehcache/clustered/client/config/builders/TimeoutsBuilderTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/builders/TimeoutsBuilderTest.java index 2f1d67910b..266799a904 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/config/builders/TimeoutsBuilderTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/config/builders/TimeoutsBuilderTest.java @@ -26,15 +26,15 @@ public class TimeoutsBuilderTest { @Test - public void build_empty() throws Exception { + public void build_empty() { Timeouts t = TimeoutsBuilder.timeouts().build(); assertThat(t.getReadOperationTimeout()).isEqualTo(Timeouts.DEFAULT_OPERATION_TIMEOUT); assertThat(t.getWriteOperationTimeout()).isEqualTo(Timeouts.DEFAULT_OPERATION_TIMEOUT); - assertThat(t.getConnectionTimeout()).isEqualTo(Timeouts.INFINITE_TIMEOUT); + assertThat(t.getConnectionTimeout()).isEqualTo(Timeouts.DEFAULT_CONNECTION_TIMEOUT); } @Test - public void build_filled() throws Exception { + public void build_filled() { Timeouts t = TimeoutsBuilder.timeouts() .read(Duration.ofDays(1)) .write(Duration.ofDays(2)) diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/ConfigurationDerivation.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/ConfigurationDerivation.java new file mode 100644 index 0000000000..44a23f9d3b --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/ConfigurationDerivation.java @@ -0,0 +1,102 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.docs; + +import org.ehcache.clustered.client.config.ClusteredResourceType; +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.Configuration; +import org.ehcache.config.FluentConfigurationBuilder; +import org.ehcache.config.ResourcePool; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.ConfigurationBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.resilience.ThrowingResilienceStrategy; +import org.ehcache.core.spi.service.ServiceUtils; +import org.ehcache.impl.config.event.DefaultCacheEventListenerConfiguration; +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyConfiguration; +import org.hamcrest.core.Is; +import org.hamcrest.core.IsCollectionContaining; +import org.hamcrest.core.IsInstanceOf; +import org.hamcrest.core.IsNot; +import org.junit.Test; + +import java.net.URI; +import java.util.List; +import java.util.stream.Collectors; + +import static org.hamcrest.MatcherAssert.assertThat; + +public class ConfigurationDerivation { + + @Test + public void removingServices() { + Configuration configuration = ConfigurationBuilder.newConfigurationBuilder() + .withService(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://example.com/myCacheManager"))) + .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.heap(1000).with(ClusteredResourcePoolBuilder.clusteredDedicated("offheap", 128, MemoryUnit.MB)))) + .build(); + + //tag::removeService[] + Configuration withoutClustering = configuration.derive() + .updateCaches(cache -> cache // <1> + .withoutServices(ClusteredStoreConfiguration.class) // <2> + .updateResourcePools(existing -> { + ResourcePoolsBuilder poolsBuilder = ResourcePoolsBuilder.newResourcePoolsBuilder(); // <3> + for (ResourcePool pool : existing.getResourceTypeSet().stream() // <4> + .filter(p -> !(p instanceof ClusteredResourceType)) // <5> + .map(existing::getPoolForResource) + .toArray(ResourcePool[]::new)) { + poolsBuilder = poolsBuilder.with(pool); // <6> + } + return poolsBuilder.build(); + })) + .withoutServices(ClusteringServiceConfiguration.class) // <7> + .build(); + //end::removeService[] + + assertThat(withoutClustering.getServiceCreationConfigurations(), IsNot.not(IsCollectionContaining.hasItem( + IsInstanceOf.instanceOf(ClusteringServiceConfiguration.class)))); + } + + @Test + public void updateService() { + Configuration configuration = ConfigurationBuilder.newConfigurationBuilder() + .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) + .withService(new ClusteredStoreConfiguration(Consistency.STRONG))) + .build(); + + //tag::updateService[] + Configuration changedConsistency = configuration.derive() + .updateCache("cache", cache -> cache.updateServices( + ClusteredStoreConfiguration.class, + existing -> Consistency.EVENTUAL) + ) + .build(); + //end::updateService[] + + assertThat(ServiceUtils.findSingletonAmongst(ClusteredStoreConfiguration.class, + configuration.getCacheConfigurations().get("cache").getServiceConfigurations()).getConsistency(), Is.is(Consistency.STRONG)); + + assertThat(ServiceUtils.findSingletonAmongst(ClusteredStoreConfiguration.class, + changedConsistency.getCacheConfigurations().get("cache").getServiceConfigurations()).getConsistency(), Is.is(Consistency.EVENTUAL)); + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java new file mode 100644 index 0000000000..7f7ef7b0ee --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java @@ -0,0 +1,265 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.docs; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.xml.XmlConfiguration; +import org.junit.After; +import org.junit.Test; + +import java.net.URI; + +import org.junit.Before; + +/** + * Samples demonstrating use of a clustered cache. + */ +public class GettingStarted { + + @Before + public void resetPassthroughServer() throws Exception { + UnitTestConnectionService.add("terracotta://localhost/my-application", + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 128, MemoryUnit.MB) + .resource("secondary-server-resource", 96, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() throws Exception { + UnitTestConnectionService.remove("terracotta://localhost/my-application"); + } + + @Test + public void clusteredCacheManagerExample() throws Exception { + // tag::clusteredCacheManagerExample[] + CacheManagerBuilder clusteredCacheManagerBuilder = + CacheManagerBuilder.newCacheManagerBuilder() // <1> + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) // <2> + .autoCreateOnReconnect(c -> c)); // <3> + PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); // <4> + + cacheManager.close(); // <5> + // end::clusteredCacheManagerExample[] + } + + @Test + public void clusteredCacheManagerWithServerSideConfigExample() throws Exception { + // tag::clusteredCacheManagerWithServerSideConfigExample[] + CacheManagerBuilder clusteredCacheManagerBuilder = + CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")).autoCreateOnReconnect(server -> server + .defaultServerResource("primary-server-resource") // <1> + .resourcePool("resource-pool-a", 8, MemoryUnit.MB, "secondary-server-resource") // <2> + .resourcePool("resource-pool-b", 10, MemoryUnit.MB))) // <3> + .withCache("clustered-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, // <4> + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB)))) // <5> + .withCache("shared-cache-1", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool-a")))) // <6> + .withCache("shared-cache-2", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool-a")))); // <7> + PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); // <8> + + cacheManager.close(); + // end::clusteredCacheManagerWithServerSideConfigExample[] + } + + @Test + public void clusteredCacheManagerWithDynamicallyAddedCacheExample() throws Exception { + // tag::clusteredCacheManagerWithDynamicallyAddedCacheExample[] + CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) + .autoCreateOnReconnect(server -> server.defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 8, MemoryUnit.MB))); + PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); + cacheManager.init(); + + try { + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))).build(); + + Cache cache = cacheManager.createCache("clustered-cache", config); + + } finally { + cacheManager.close(); + } + // end::clusteredCacheManagerWithDynamicallyAddedCacheExample[] + } + + @Test + public void explicitConsistencyConfiguration() throws Exception { + CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) + .autoCreateOnReconnect(server -> server.defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 8, MemoryUnit.MB))); + PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); + cacheManager.init(); + + try { + // tag::clusteredCacheConsistency[] + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) + .withService(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) // <1> + .build(); + + Cache cache = cacheManager.createCache("clustered-cache", config); + cache.put(42L, "All you need to know!"); // <2> + + // end::clusteredCacheConsistency[] + } finally { + cacheManager.close(); + } + } + + @Test + public void clusteredCacheTieredExample() throws Exception { + CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) + .autoCreateOnReconnect(server -> server.defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 8, MemoryUnit.MB))); + PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); + cacheManager.init(); + + try { + // tag::clusteredCacheTieredExample[] + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(2, MemoryUnit.MB) // <1> + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) // <2> + .withService(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) + .build(); + + Cache cache = cacheManager.createCache("clustered-cache-tiered", config); + cache.put(42L, "All you need to know!"); + + // end::clusteredCacheTieredExample[] + } finally { + cacheManager.close(); + } + } + + @Test + public void clusteredCacheManagerLifecycleExamples() throws Exception { + // tag::clusteredCacheManagerLifecycle[] + CacheManagerBuilder autoCreate = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) + .autoCreate(server -> server // <1> + .resourcePool("resource-pool", 8, MemoryUnit.MB, "primary-server-resource"))) + .withCache("clustered-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool")))); + + CacheManagerBuilder autoCreateOnReconnect = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) + .autoCreateOnReconnect(server -> server // <2> + .resourcePool("resource-pool", 8, MemoryUnit.MB, "primary-server-resource"))) + .withCache("clustered-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool")))); + + CacheManagerBuilder expecting = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) + .expecting(server -> server // <3> + .resourcePool("resource-pool", 8, MemoryUnit.MB, "primary-server-resource"))) + .withCache("clustered-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool")))); + + CacheManagerBuilder configless = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application"))) + // <4> + .withCache("clustered-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool")))); + // end::clusteredCacheManagerLifecycle[] + + autoCreate.build(true).close(); + autoCreateOnReconnect.build(true).close(); + expecting.build(true).close(); + configless.build(true).close(); + } + + @Test + public void loadDocsXml() throws Exception { + new XmlConfiguration(getClass().getResource("/configs/docs/ehcache-clustered.xml")); + } + + @Test + public void unknownClusteredCacheExample() + { + // tag::unspecifiedClusteredCacheExample[] + + CacheManagerBuilder cacheManagerBuilderAutoCreate = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) + .autoCreateOnReconnect(server -> server // <1> + .resourcePool("resource-pool", 8, MemoryUnit.MB, "primary-server-resource"))); + + PersistentCacheManager cacheManager1 = cacheManagerBuilderAutoCreate.build(false); + cacheManager1.init(); + try { + CacheConfiguration cacheConfigDedicated = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) // <2> + .withService(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) + .build(); + + Cache cacheDedicated = cacheManager1.createCache("my-dedicated-cache", cacheConfigDedicated); // <3> + + CacheManagerBuilder cacheManagerBuilderExpecting = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) + .expecting(server -> server // <4> + .resourcePool("resource-pool", 8, MemoryUnit.MB, "primary-server-resource"))); + + PersistentCacheManager cacheManager2 = cacheManagerBuilderExpecting.build(false); + cacheManager2.init(); + try { + CacheConfiguration cacheConfigUnspecified = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clustered())) // <5> + .withService(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) + .build(); + + Cache cacheUnspecified = cacheManager2.createCache("my-dedicated-cache", cacheConfigUnspecified); // <6> + } finally { + cacheManager2.close(); + } + } finally { + cacheManager1.close(); + } + // end::unspecifiedClusteredCacheExample[] + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/Resilience.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/Resilience.java new file mode 100644 index 0000000000..fd15cf8436 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/Resilience.java @@ -0,0 +1,62 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.docs; + +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.Timeouts; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.net.URI; +import java.time.Duration; + +public class Resilience { + + @Before + public void resetPassthroughServer() throws Exception { + UnitTestConnectionService.add("terracotta://localhost/my-application", + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 128, MemoryUnit.MB) + .resource("secondary-server-resource", 96, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() throws Exception { + UnitTestConnectionService.remove("terracotta://localhost/my-application"); + } + + @Test + public void clusteredCacheManagerExample() throws Exception { + // tag::timeoutsExample[] + CacheManagerBuilder clusteredCacheManagerBuilder = + CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost/my-application")) + .timeouts(TimeoutsBuilder.timeouts() // <1> + .read(Duration.ofSeconds(10)) // <2> + .write(Timeouts.DEFAULT_OPERATION_TIMEOUT) // <3> + .connection(Timeouts.INFINITE_TIMEOUT)) // <4> + .autoCreate(c -> c)); + // end::timeoutsExample[] + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/Tiering.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/Tiering.java new file mode 100644 index 0000000000..7f90b671dc --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/Tiering.java @@ -0,0 +1,82 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.docs; + +import java.net.URI; + +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; + +/** + * Tiering + */ +public class Tiering { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com/my-application"); + + @Before + public void definePassthroughServer() throws Exception { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 64, MemoryUnit.MB) + .resource("secondary-server-resource", 64, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() throws Exception { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + @Test + public void testSingleTier() { + // tag::clusteredOnly[] + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, // <1> + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated(2, MemoryUnit.GB))); // <2> + // end::clusteredOnly[] + } + + @Test + public void threeTiersCacheManager() throws Exception { + // tag::threeTiersCacheManager[] + PersistentCacheManager persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) // <1> + .withCache("threeTierCache", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) // <2> + .offheap(1, MemoryUnit.MB) // <3> + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)) // <4> + ) + ).build(true); + // end::threeTiersCacheManager[] + + persistentCacheManager.close(); + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/ClusterTierManagerClientEntityFactoryTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/ClusterTierManagerClientEntityFactoryTest.java similarity index 92% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/ClusterTierManagerClientEntityFactoryTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/ClusterTierManagerClientEntityFactoryTest.java index 89d4fc6b52..d82db588d0 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/ClusterTierManagerClientEntityFactoryTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/ClusterTierManagerClientEntityFactoryTest.java @@ -25,8 +25,8 @@ import org.mockito.MockitoAnnotations; import org.terracotta.connection.Connection; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.isNull; @@ -67,7 +67,7 @@ public void testCreate() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-ClusterTierManagerClientEntityFactory-AccessLock-test"); - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection, Runnable::run); factory.create("test", null); verify(entityRef).create(isA(ClusterTierManagerConfiguration.class)); verifyNoMoreInteractions(entityRef); @@ -79,7 +79,7 @@ public void testCreateBadConfig() throws Exception { when(getEntityRef(ClusterTierManagerClientEntity.class)).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-ClusterTierManagerClientEntityFactory-AccessLock-test"); - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection, Runnable::run); try { factory.create("test", null); fail("Expecting ClusterTierManagerCreationException"); @@ -95,7 +95,7 @@ public void testCreateWhenExisting() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-ClusterTierManagerClientEntityFactory-AccessLock-test"); - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection, Runnable::run); try { factory.create("test", null); fail("Expected EntityAlreadyExistsException"); @@ -111,7 +111,7 @@ public void testRetrieve() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-ClusterTierManagerClientEntityFactory-AccessLock-test"); - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection, Runnable::run); assertThat(factory.retrieve("test", null), is(entity)); verify(entity).validate(isNull()); verify(entity, never()).close(); @@ -125,9 +125,9 @@ public void testRetrieveFailedValidate() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-ClusterTierManagerClientEntityFactory-AccessLock-test"); - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection, Runnable::run); try { - factory.retrieve("test", null); + factory.retrieve("test", null).close(); fail("Expecting IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected @@ -145,9 +145,9 @@ public void testRetrieveWhenNotExisting() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-ClusterTierManagerClientEntityFactory-AccessLock-test"); - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection, Runnable::run); try { - factory.retrieve("test", null); + factory.retrieve("test", null).close(); fail("Expected EntityNotFoundException"); } catch (EntityNotFoundException e) { //expected @@ -163,7 +163,7 @@ public void testDestroy() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-ClusterTierManagerClientEntityFactory-AccessLock-test"); - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection, Runnable::run); factory.destroy("test"); verify(entityRef).destroy(); } @@ -177,7 +177,7 @@ public void testDestroyWhenNotExisting() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-ClusterTierManagerClientEntityFactory-AccessLock-test"); - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(connection, Runnable::run); factory.destroy("test"); verify(entityRef).destroy(); } diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/MockConnectionService.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/MockConnectionService.java new file mode 100644 index 0000000000..22e99a9669 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/MockConnectionService.java @@ -0,0 +1,61 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal; + +import org.terracotta.connection.Connection; +import org.terracotta.connection.ConnectionException; +import org.terracotta.connection.ConnectionService; + +import java.net.InetSocketAddress; +import java.net.URI; +import java.util.Properties; + +/** + * MockConnectionService + */ +public class MockConnectionService implements ConnectionService { + + private static final String CONNECTION_TYPE = "mock"; + public static Connection mockConnection; + + @Override + public boolean handlesURI(URI uri) { + return handlesConnectionType(uri.getScheme()); + } + + @Override + public boolean handlesConnectionType(String s) { + return CONNECTION_TYPE.equals(s); + } + + @Override + public Connection connect(URI uri, Properties properties) throws ConnectionException { + return getConnection(); + } + + @Override + public Connection connect(Iterable iterable, Properties properties) throws ConnectionException { + return getConnection(); + } + + private Connection getConnection() throws ConnectionException { + if (mockConnection == null) { + throw new ConnectionException(new IllegalStateException("Set mock connection first")); + } + return mockConnection; + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java similarity index 85% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java index dc1b8c3cbf..c6b66f3f6d 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java @@ -23,6 +23,7 @@ import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.math.BigInteger; +import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; @@ -34,6 +35,7 @@ import java.util.List; import java.util.Map; import java.util.Properties; +import java.util.stream.Collectors; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; import org.ehcache.clustered.client.internal.store.ClusterTierClientEntityService; @@ -41,6 +43,7 @@ import org.ehcache.clustered.server.ClusterTierManagerServerEntityService; import org.ehcache.clustered.server.store.ClusterTierServerEntityService; +import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.connection.Connection; @@ -67,6 +70,8 @@ import org.terracotta.passthrough.PassthroughServer; import org.terracotta.passthrough.PassthroughServerRegistry; +import static java.util.stream.Collectors.toCollection; +import static java.util.stream.Collectors.toList; import static org.mockito.Mockito.mock; @@ -126,8 +131,8 @@ public class UnitTestConnectionService implements ConnectionService { private static final Logger LOGGER = LoggerFactory.getLogger(UnitTestConnectionService.class); - private static final Map STRIPES = new HashMap<>(); - private static final Map SERVERS = new HashMap<>(); + private static final Map STRIPES = new ConcurrentHashMap<>(); + private static final Map SERVERS = new ConcurrentHashMap<>(); private static final String PASSTHROUGH = "passthrough"; @@ -140,26 +145,20 @@ public class UnitTestConnectionService implements ConnectionService { * @param server the {@code PassthroughServer} instance to use for connections to {@code uri} */ public static void add(URI uri, PassthroughServer server) { - URI keyURI = createKey(uri); - if (SERVERS.containsKey(keyURI)) { - throw new AssertionError("Server at " + uri + " already provided; use remove() to remove"); - } - - SERVERS.put(keyURI, new ServerDescriptor(server)); - // TODO rework that better - server.registerAsynchronousServerCrasher(mock(IAsynchronousServerCrasher.class)); - server.start(true, false); - LOGGER.info("Started PassthroughServer at {}", keyURI); + SERVERS.compute(createKey(uri), (u, existing) -> { + if (existing == null) { + server.registerAsynchronousServerCrasher(mock(IAsynchronousServerCrasher.class)); + server.start(true, false); + LOGGER.info("Started PassthroughServer at {}", u); + return new ServerDescriptor(server); + } else { + throw new AssertionError("Server at " + u + " already provided; use remove() to remove"); + } + }); } public static void addServerToStripe(String stripeName, PassthroughServer server) { - - if (STRIPES.get(stripeName) == null) { - StripeDescriptor stripeDescriptor = new StripeDescriptor(); - STRIPES.put(stripeName, stripeDescriptor); - } - - STRIPES.get(stripeName).addServer(server); + STRIPES.computeIfAbsent(stripeName, k -> new StripeDescriptor()).addServer(server); } public static void removeStripe(String stripeName) { @@ -220,35 +219,34 @@ public static PassthroughServer remove(URI uri) { try { LOGGER.warn("Force close {}", formatConnectionId(connection)); connection.close(); - } catch (AssertionError | IOException e) { - // Ignored -- https://github.com/Terracotta-OSS/terracotta-apis/issues/102 + } catch (IOException e) { + throw new AssertionError(e); } } //open destroy connection. You need to make sure connection doesn't have any entities associated with it. - PassthroughConnection connection = serverDescriptor.server.connectNewClient("destroy-connection"); - - // destroy in reverse order of the creation to keep coherence - List> keys = new ArrayList<>(serverDescriptor.knownEntities.keySet()); - Collections.reverse(keys); - for(Class type : keys) { - Object[] args = serverDescriptor.knownEntities.get(type); - - Long version = (Long) args[0]; - String stringArg = (String) args[1]; - - try { - EntityRef entityRef = connection.getEntityRef(type, version, stringArg); - entityRef.destroy(); - } catch (EntityNotProvidedException ex) { - LOGGER.error("Entity destroy failed (not provided???): ", ex); - } catch (EntityNotFoundException ex) { - LOGGER.error("Entity destroy failed: ", ex); - } catch (PermanentEntityException ex) { - LOGGER.error("Entity destroy failed (permanent???): ", ex); + try (PassthroughConnection connection = serverDescriptor.server.connectNewClient("destroy-connection")) { + // destroy in reverse order of the creation to keep coherence + List> keys = new ArrayList<>(serverDescriptor.knownEntities.keySet()); + Collections.reverse(keys); + for (Class type : keys) { + Object[] args = serverDescriptor.knownEntities.get(type); + + Long version = (Long) args[0]; + String stringArg = (String) args[1]; + + try { + EntityRef entityRef = connection.getEntityRef(type, version, stringArg); + entityRef.destroy(); + } catch (EntityNotProvidedException ex) { + LOGGER.error("Entity destroy failed (not provided???): ", ex); + } catch (EntityNotFoundException ex) { + LOGGER.error("Entity destroy failed: ", ex); + } catch (PermanentEntityException ex) { + LOGGER.error("Entity destroy failed (permanent???): ", ex); + } } } - serverDescriptor.server.stop(); LOGGER.info("Stopped PassthroughServer at {}", keyURI); return serverDescriptor.server; @@ -283,7 +281,7 @@ public static PassthroughServer remove(String uri) { @SuppressWarnings("unused") public static final class PassthroughServerBuilder { private final List> serverEntityServices = new ArrayList<>(); - private final List> clientEntityServices = + private final List> clientEntityServices = new ArrayList<>(); private final Map serviceProviders = new IdentityHashMap<>(); @@ -336,7 +334,7 @@ public PassthroughServerBuilder serverEntityService(EntityServerService se return this; } - public PassthroughServerBuilder clientEntityService(EntityClientService service) { + public PassthroughServerBuilder clientEntityService(EntityClientService service) { this.clientEntityServices.add(service); return this; } @@ -360,13 +358,11 @@ public PassthroughServer build() { newServer.registerServerEntityService(service); } - for (EntityClientService service : clientEntityServices) { + for (EntityClientService service : clientEntityServices) { newServer.registerClientEntityService(service); } - if (!this.resources.getResource().isEmpty()) { - newServer.registerExtendedConfiguration(new OffHeapResourcesProvider(this.resources)); - } + newServer.registerExtendedConfiguration(new OffHeapResourcesProvider(this.resources)); for (Map.Entry entry : serviceProviders.entrySet()) { newServer.registerServiceProvider(entry.getKey(), entry.getValue()); @@ -378,27 +374,40 @@ public PassthroughServer build() { public static Collection getConnectionProperties(URI uri) { ServerDescriptor serverDescriptor = SERVERS.get(createKey(uri)); - if (serverDescriptor != null) { - return serverDescriptor.getConnections().values(); - } else { + if (serverDescriptor == null) { return Collections.emptyList(); + } else { + return serverDescriptor.getConnections().values(); } } + public static Collection getConnections(URI uri) { + ServerDescriptor serverDescriptor = SERVERS.get(createKey(uri)); + return serverDescriptor.getConnections().keySet().stream().map(c -> proxyConnection(serverDescriptor, c)).collect(toList()); + } + @Override public boolean handlesURI(URI uri) { if (PASSTHROUGH.equals(uri.getScheme())) { return STRIPES.containsKey(uri.getAuthority()); + } else { + return SERVERS.containsKey(checkURI(uri)); } - checkURI(uri); - return SERVERS.containsKey(uri); + } + + @Override + public boolean handlesConnectionType(String s) { + throw new UnsupportedOperationException("Operation not supported. Use handlesURI(URI) instead."); } @Override public Connection connect(URI uri, Properties properties) throws ConnectionException { if (PASSTHROUGH.equals(uri.getScheme())) { - if(STRIPES.containsKey(uri.getAuthority())) { + StripeDescriptor stripeDescriptor = STRIPES.get(uri.getAuthority()); + if (stripeDescriptor == null) { + throw new IllegalArgumentException("UnitTestConnectionService failed to find stripe" + uri.getAuthority()); + } else { String serverName = uri.getHost(); PassthroughServer server = PassthroughServerRegistry.getSharedInstance().getServerForName(serverName); if(null != server) { @@ -411,8 +420,6 @@ public Connection connect(URI uri, Properties properties) throws ConnectionExcep STRIPES.get(uri.getAuthority()).add(connection); return connection; } - } else { - throw new IllegalArgumentException("UnitTestConnectionService failed to find stripe" + uri.getAuthority()); } } @@ -435,11 +442,20 @@ public Connection connect(URI uri, Properties properties) throws ConnectionExcep /* * Uses a Proxy around Connection so closed connections can be removed from the ServerDescriptor. */ + return proxyConnection(serverDescriptor, connection); + } + + private static Connection proxyConnection(ServerDescriptor serverDescriptor, Connection connection) { return (Connection) Proxy.newProxyInstance(Connection.class.getClassLoader(), - new Class[] { Connection.class }, + new Class[] { Connection.class }, new ConnectionInvocationHandler(serverDescriptor, connection)); } + @Override + public Connection connect(Iterable iterable, Properties properties) { + throw new UnsupportedOperationException("Operation not supported. Use connect(URI, Properties) instead"); + } + /** * Ensures that the {@code URI} presented conforms to the value used to locate a server. * @@ -450,8 +466,10 @@ public Connection connect(URI uri, Properties properties) throws ConnectionExcep * * @see #checkURI(URI) */ - private static void checkURI(URI requestURI) throws IllegalArgumentException { - if (!requestURI.equals(createKey(requestURI))) { + private static URI checkURI(URI requestURI) throws IllegalArgumentException { + if (requestURI.equals(createKey(requestURI))) { + return requestURI; + } else { throw new IllegalArgumentException("Connection URI contains user-info, path, query, and/or fragment"); } } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImplTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImplTest.java similarity index 100% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImplTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImplTest.java diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImplTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImplTest.java similarity index 100% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImplTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImplTest.java diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImplTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImplTest.java similarity index 100% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImplTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImplTest.java diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteredCacheConfigurationParserIT.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteredCacheConfigurationParserIT.java new file mode 100644 index 0000000000..3b90be498b --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteredCacheConfigurationParserIT.java @@ -0,0 +1,39 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.config.xml; + +import org.ehcache.config.Configuration; +import org.ehcache.xml.XmlConfiguration; +import org.junit.Test; + +import java.net.URL; + +import static org.ehcache.xml.XmlConfigurationMatchers.isSameConfigurationAs; +import static org.hamcrest.MatcherAssert.assertThat; + +/** + * ClusteredCacheConfigurationParserIT + */ +public class ClusteredCacheConfigurationParserIT { + + @Test + public void testClusteredCacheXmlTranslationToString() { + URL resource = ClusteredCacheConfigurationParserIT.class.getResource("/configs/clustered-cache.xml"); + Configuration config = new XmlConfiguration(resource); + XmlConfiguration xmlConfig = new XmlConfiguration(config); + assertThat(xmlConfig.toString(), isSameConfigurationAs(resource)); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteredResourceConfigurationParserTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteredResourceConfigurationParserTest.java new file mode 100644 index 0000000000..70a0e69e9a --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteredResourceConfigurationParserTest.java @@ -0,0 +1,122 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.config.xml; + +import org.ehcache.clustered.client.config.DedicatedClusteredResourcePool; +import org.ehcache.clustered.client.config.SharedClusteredResourcePool; +import org.ehcache.clustered.client.internal.config.ClusteredResourcePoolImpl; +import org.ehcache.clustered.client.internal.config.DedicatedClusteredResourcePoolImpl; +import org.ehcache.clustered.client.internal.config.SharedClusteredResourcePoolImpl; +import org.ehcache.config.units.MemoryUnit; +import org.junit.Test; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.xml.sax.InputSource; +import org.xml.sax.SAXException; + +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.io.IOException; +import java.io.StringReader; + +import static org.ehcache.xml.XmlConfigurationMatchers.isSameConfigurationAs; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +/** + * ClusteredResourceConfigurationParserTest + */ +public class ClusteredResourceConfigurationParserTest { + + @Test + public void testClusteredSharedUsingProperties() throws ParserConfigurationException, IOException, SAXException { + String property = ClusteredResourceConfigurationParserTest.class.getName() + ":sharing"; + String inputString = ""; + + ClusteredResourceConfigurationParser parser = new ClusteredResourceConfigurationParser(); + + DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + documentBuilderFactory.setNamespaceAware(true); + Element node = documentBuilderFactory.newDocumentBuilder() + .parse(new InputSource(new StringReader(inputString))).getDocumentElement(); + + System.setProperty(property, "foobar"); + try { + SharedClusteredResourcePool configuration = (SharedClusteredResourcePool) parser.parseResourceConfig(node); + + assertThat(configuration.getSharedResourcePool(), is("foobar")); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testClusteredDedicatedUsingProperties() throws ParserConfigurationException, IOException, SAXException { + String fromProperty = ClusteredResourceConfigurationParserTest.class.getName() + ":from"; + String sizeProperty = ClusteredResourceConfigurationParserTest.class.getName() + ":size"; + String inputString = "" + + "${" + sizeProperty + "}"; + + ClusteredResourceConfigurationParser parser = new ClusteredResourceConfigurationParser(); + + DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + documentBuilderFactory.setNamespaceAware(true); + Element node = documentBuilderFactory.newDocumentBuilder() + .parse(new InputSource(new StringReader(inputString))).getDocumentElement(); + + System.setProperty(fromProperty, "foobar"); + System.setProperty(sizeProperty, "1024"); + try { + DedicatedClusteredResourcePool configuration = (DedicatedClusteredResourcePool) parser.parseResourceConfig(node); + + assertThat(configuration.getFromResource(), is("foobar")); + assertThat(configuration.getSize(), is(1024L)); + } finally { + System.clearProperty(fromProperty); + System.clearProperty(sizeProperty); + } + } + + @Test + public void testTranslateClusteredResourcePoolConfiguration() { + ClusteredResourceConfigurationParser configTranslator = new ClusteredResourceConfigurationParser(); + ClusteredResourcePoolImpl clusteredResourcePool = new ClusteredResourcePoolImpl(); + Node retElement = configTranslator.unparseResourcePool(clusteredResourcePool); + String inputString = ""; + assertThat(retElement, isSameConfigurationAs(inputString)); + } + + @Test + public void testTranslateDedicatedResourcePoolConfiguration() { + ClusteredResourceConfigurationParser configTranslator = new ClusteredResourceConfigurationParser(); + DedicatedClusteredResourcePoolImpl dedicatedClusteredResourcePool = new DedicatedClusteredResourcePoolImpl("my-from", 12, MemoryUnit.GB); + Node retElement = configTranslator.unparseResourcePool(dedicatedClusteredResourcePool); + String inputString = "12"; + assertThat(retElement, isSameConfigurationAs(inputString)); + } + + @Test + public void testTranslateSharedResourcePoolConfiguration() { + ClusteredResourceConfigurationParser configTranslator = new ClusteredResourceConfigurationParser(); + SharedClusteredResourcePoolImpl sharedResourcePool = new SharedClusteredResourcePoolImpl("shared-pool"); + Node retElement = configTranslator.unparseResourcePool(sharedResourcePool); + String inputString = ""; + assertThat(retElement, isSameConfigurationAs(inputString)); + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringCacheManagerServiceConfigurationParserTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringCacheManagerServiceConfigurationParserTest.java new file mode 100644 index 0000000000..6598cca839 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringCacheManagerServiceConfigurationParserTest.java @@ -0,0 +1,841 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.config.xml; + +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.Timeouts; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; +import org.ehcache.clustered.client.internal.ConnectionSource; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.util.ClassLoading; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.CacheManagerServiceConfigurationParser; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.TimeTypeWithPropSubst; +import org.hamcrest.Matchers; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestName; +import org.w3c.dom.Attr; +import org.w3c.dom.Element; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.math.BigInteger; +import java.net.InetSocketAddress; +import java.net.URI; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.time.temporal.TemporalUnit; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.transform.stream.StreamSource; + +import static java.time.temporal.ChronoUnit.MINUTES; +import static java.util.Spliterators.spliterator; +import static java.util.stream.StreamSupport.stream; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; +import static org.ehcache.xml.XmlConfigurationMatchers.isSameConfigurationAs; +import static org.ehcache.xml.XmlModel.convertToJavaTimeUnit; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.IsCollectionContaining.hasItem; +import static org.junit.Assert.fail; + +public class ClusteringCacheManagerServiceConfigurationParserTest { + + @ClassRule + public static final TemporaryFolder folder = new TemporaryFolder(); + + @Rule + public final TestName testName = new TestName(); + + private static final String PROPERTY_PREFIX = ClusteringCacheManagerServiceConfigurationParserTest.class.getName() + ":"; + + /** + * Ensures the {@link ClusteringCacheManagerServiceConfigurationParser} is locatable as a + * {@link CacheManagerServiceConfigurationParser} instance. + */ + @Test + public void testServiceLocator() throws Exception { + assertThat(stream(spliterator(ClassLoading.servicesOfType(CacheManagerServiceConfigurationParser.class).iterator(), Long.MAX_VALUE, 0), false).map(Object::getClass).collect(Collectors.toList()), + hasItem(ClusteringCacheManagerServiceConfigurationParser.class)); + } + + /** + * Ensures the namespace declared by {@link ClusteringCacheManagerServiceConfigurationParser} and its + * schema are the same. + */ + @Test + public void testSchema() throws Exception { + final ClusteringCacheManagerServiceConfigurationParser parser = new ClusteringCacheManagerServiceConfigurationParser(); + final StreamSource schemaSource = (StreamSource) parser.getXmlSchema(); + + final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + factory.setNamespaceAware(true); + factory.setIgnoringComments(true); + factory.setIgnoringElementContentWhitespace(true); + + final DocumentBuilder domBuilder = factory.newDocumentBuilder(); + final Element schema = domBuilder.parse(schemaSource.getInputStream()).getDocumentElement(); + final Attr targetNamespaceAttr = schema.getAttributeNode("targetNamespace"); + assertThat(targetNamespaceAttr, is(not(nullValue()))); + assertThat(targetNamespaceAttr.getValue(), is(parser.getNamespace().toString())); + } + + @Test + public void testGetTimeout() throws Exception { + + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " 5", + " 10", + " 15", + " ", + " ", + "", + "" + }; + + final Configuration configuration = new XmlConfiguration(makeConfig(config)); + + Collection> serviceCreationConfigurations = + configuration.getServiceCreationConfigurations(); + assertThat(serviceCreationConfigurations, is(not(Matchers.empty()))); + + ClusteringServiceConfiguration clusteringServiceConfiguration = + findSingletonAmongst(ClusteringServiceConfiguration.class, serviceCreationConfigurations); + assertThat(clusteringServiceConfiguration, is(notNullValue())); + + Timeouts timeouts = clusteringServiceConfiguration.getTimeouts(); + assertThat(timeouts.getReadOperationTimeout(), is(Duration.of(5, MINUTES))); + assertThat(timeouts.getWriteOperationTimeout(), is(Duration.of(10, MINUTES))); + assertThat(timeouts.getConnectionTimeout(), is(Duration.of(15, MINUTES))); + } + + @Test + public void testGetTimeoutNone() throws Exception { + + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " ", + " ", + "", + "" + }; + + final Configuration configuration = new XmlConfiguration(makeConfig(config)); + + Collection> serviceCreationConfigurations = + configuration.getServiceCreationConfigurations(); + assertThat(serviceCreationConfigurations, is(not(Matchers.empty()))); + + ClusteringServiceConfiguration clusteringServiceConfiguration = + findSingletonAmongst(ClusteringServiceConfiguration.class, serviceCreationConfigurations); + assertThat(clusteringServiceConfiguration, is(notNullValue())); + + assertThat(clusteringServiceConfiguration.getTimeouts(), is(TimeoutsBuilder.timeouts().build())); + } + + @Test + public void testGetTimeoutUnitDefault() throws Exception { + + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " 5", + " ", + " ", + "", + "" + }; + + final Configuration configuration = new XmlConfiguration(makeConfig(config)); + + Collection> serviceCreationConfigurations = + configuration.getServiceCreationConfigurations(); + assertThat(serviceCreationConfigurations, is(not(Matchers.empty()))); + + ClusteringServiceConfiguration clusteringServiceConfiguration = + findSingletonAmongst(ClusteringServiceConfiguration.class, serviceCreationConfigurations); + assertThat(clusteringServiceConfiguration, is(notNullValue())); + + TemporalUnit defaultUnit = convertToJavaTimeUnit(new TimeTypeWithPropSubst().getUnit()); + assertThat(clusteringServiceConfiguration.getTimeouts().getReadOperationTimeout(), + is(equalTo(Duration.of(5, defaultUnit)))); + } + + @Test + public void testGetTimeoutUnitBad() throws Exception { + + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " 5", + " ", + " ", + "", + "" + }; + + try { + new XmlConfiguration(makeConfig(config)); + fail("Expecting XmlConfigurationException"); + } catch (XmlConfigurationException e) { + assertThat(e.getMessage(), containsString("Error parsing XML configuration ")); + assertThat(e.getCause().getMessage(), allOf(containsString("facet"), containsString("enumeration"), containsString("femtos"))); + } + } + + @Test + public void testGetTimeoutValueTooBig() throws Exception { + + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " " + + BigInteger.ONE.add(BigInteger.valueOf(Long.MAX_VALUE)) + + "", + " ", + " ", + "", + "" + }; + + try { + new XmlConfiguration(makeConfig(config)); + fail("Expecting XmlConfigurationException"); + } catch (XmlConfigurationException e) { + assertThat(e.getMessage(), containsString(" exceeds allowed value ")); + } + } + + @Test + public void testGetTimeoutValueOmitted() throws Exception { + + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " ", + " ", + " ", + "", + "" + }; + + try { + new XmlConfiguration(makeConfig(config)); + fail("Expecting XmlConfigurationException"); + } catch (XmlConfigurationException e) { + assertThat(e.getMessage(), containsString("Error parsing XML configuration ")); + assertThat(e.getCause().getMessage(), allOf(containsString("propertyOrPositiveInteger"), containsString("valid"), containsString("not"))); + } + } + + @Test + public void testGetTimeoutAsProperty() throws Exception { + String readTimeoutProperty = PROPERTY_PREFIX + testName.getMethodName() + ":read"; + String writeTimeoutProperty = PROPERTY_PREFIX + testName.getMethodName() + ":write"; + String connectTimeoutProperty = PROPERTY_PREFIX + testName.getMethodName() + ":connect"; + Map properties = new HashMap<>(); + properties.put(readTimeoutProperty, "5"); + properties.put(writeTimeoutProperty, "10"); + properties.put(connectTimeoutProperty, "15"); + + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " ${" + readTimeoutProperty + "}", + " ${" + writeTimeoutProperty + "}", + " ${" + connectTimeoutProperty + "}", + " ", + " ", + "", + "" + }; + + properties.forEach(System::setProperty); + try { + final Configuration configuration = new XmlConfiguration(makeConfig(config)); + + Collection> serviceCreationConfigurations = + configuration.getServiceCreationConfigurations(); + assertThat(serviceCreationConfigurations, is(not(Matchers.empty()))); + + ClusteringServiceConfiguration clusteringServiceConfiguration = + findSingletonAmongst(ClusteringServiceConfiguration.class, serviceCreationConfigurations); + assertThat(clusteringServiceConfiguration, is(notNullValue())); + + Timeouts timeouts = clusteringServiceConfiguration.getTimeouts(); + assertThat(timeouts.getReadOperationTimeout(), is(Duration.of(5, MINUTES))); + assertThat(timeouts.getWriteOperationTimeout(), is(Duration.of(10, MINUTES))); + assertThat(timeouts.getConnectionTimeout(), is(Duration.of(15, MINUTES))); + } finally { + properties.keySet().forEach(System::clearProperty); + } + } + + @Test + public void testUrlWithProperty() throws Exception { + String serverProperty = PROPERTY_PREFIX + testName.getMethodName() + ":server"; + String portProperty = PROPERTY_PREFIX + testName.getMethodName() + ":port"; + Map properties = new HashMap<>(); + properties.put(serverProperty, "example.com"); + properties.put(portProperty, "9540"); + + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " ", + " ", + "", + "" + }; + + properties.forEach(System::setProperty); + try { + XmlConfiguration configuration = new XmlConfiguration(makeConfig(config)); + ClusteringServiceConfiguration clusteringConfig = findSingletonAmongst(ClusteringServiceConfiguration.class, configuration.getServiceCreationConfigurations()); + ConnectionSource.ClusterUri connectionSource = (ConnectionSource.ClusterUri) clusteringConfig.getConnectionSource(); + assertThat(connectionSource.getClusterUri(), is(URI.create("terracotta://example.com:9540/cachemanager"))); + } finally { + properties.keySet().forEach(System::clearProperty); + } + } + + @Test(expected = XmlConfigurationException.class) + public void testUrlAndServers() throws Exception { + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + "", + "" + }; + + new XmlConfiguration(makeConfig(config)); + } + + @Test(expected = XmlConfigurationException.class) + public void testServersOnly() throws Exception { + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + "", + "" + }; + + new XmlConfiguration(makeConfig(config)); + } + + @Test + public void testServersWithClusterTierManager() throws Exception { + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + "", + "" + }; + + final Configuration configuration = new XmlConfiguration(makeConfig(config)); + Collection> serviceCreationConfigurations = configuration.getServiceCreationConfigurations(); + ClusteringServiceConfiguration clusteringServiceConfiguration = + findSingletonAmongst(ClusteringServiceConfiguration.class, serviceCreationConfigurations); + ConnectionSource.ServerList connectionSource = (ConnectionSource.ServerList) clusteringServiceConfiguration.getConnectionSource(); + Iterable servers = connectionSource.getServers(); + + InetSocketAddress firstServer = InetSocketAddress.createUnresolved("server-1", 9510); + InetSocketAddress secondServer = InetSocketAddress.createUnresolved("server-2", 9540); + List expectedServers = Arrays.asList(firstServer, secondServer); + + assertThat(connectionSource.getClusterTierManager(), is("cM")); + assertThat(servers, is(expectedServers)); + } + + @Test + public void testServersWithClusterTierManagerAndOptionalPorts() throws Exception { + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + "", + "" + }; + + final Configuration configuration = new XmlConfiguration(makeConfig(config)); + Collection> serviceCreationConfigurations = configuration.getServiceCreationConfigurations(); + ClusteringServiceConfiguration clusteringServiceConfiguration = + findSingletonAmongst(ClusteringServiceConfiguration.class, serviceCreationConfigurations); + ConnectionSource.ServerList connectionSource = (ConnectionSource.ServerList)clusteringServiceConfiguration.getConnectionSource(); + Iterable servers = connectionSource.getServers(); + + InetSocketAddress firstServer = InetSocketAddress.createUnresolved("100.100.100.100", 9510); + InetSocketAddress secondServer = InetSocketAddress.createUnresolved("server-2", 0); + InetSocketAddress thirdServer = InetSocketAddress.createUnresolved("[::1]", 0); + InetSocketAddress fourthServer = InetSocketAddress.createUnresolved("[fe80::1453:846e:7be4:15fe]", 9710); + List expectedServers = Arrays.asList(firstServer, secondServer, thirdServer, fourthServer); + + assertThat(connectionSource.getClusterTierManager(), is("cM")); + assertThat(servers, is(expectedServers)); + } + + @Test + public void testServersWithClusterTierManagerAndOptionalPortsUsingProperties() throws Exception { + String hostProperty = PROPERTY_PREFIX + testName.getMethodName() + ":host"; + String portProperty = PROPERTY_PREFIX + testName.getMethodName() + ":port"; + String tierManagerProperty = PROPERTY_PREFIX + testName.getMethodName() + ":tierManager"; + Map properties = new HashMap<>(); + properties.put(hostProperty, "100.100.100.100"); + properties.put(portProperty, "9510"); + properties.put(tierManagerProperty, "george"); + + final String[] config = new String[] + { + "", + "", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + "", + "" + }; + + properties.forEach(System::setProperty); + try { + final Configuration configuration = new XmlConfiguration(makeConfig(config)); + Collection> serviceCreationConfigurations = configuration.getServiceCreationConfigurations(); + ClusteringServiceConfiguration clusteringServiceConfiguration = + findSingletonAmongst(ClusteringServiceConfiguration.class, serviceCreationConfigurations); + ConnectionSource.ServerList connectionSource = (ConnectionSource.ServerList) clusteringServiceConfiguration.getConnectionSource(); + Iterable servers = connectionSource.getServers(); + + assertThat(connectionSource.getClusterTierManager(), is("george")); + assertThat(servers, contains(InetSocketAddress.createUnresolved("100.100.100.100", 9510))); + } finally { + properties.keySet().forEach(System::clearProperty); + } + } + + @Test @SuppressWarnings("deprecation") + public void testAutoCreateFalseMapsToExpecting() throws IOException { + final String[] config = new String[] + { + "", + " ", + " ", + " ", + " ", + " ", + " ", + "" + }; + + XmlConfiguration configuration = new XmlConfiguration(makeConfig(config)); + ClusteringServiceConfiguration clusterConfig = findSingletonAmongst(ClusteringServiceConfiguration.class, configuration.getServiceCreationConfigurations()); + + assertThat(clusterConfig.isAutoCreate(), is(false)); + assertThat(clusterConfig.getClientMode(), is(ClusteringServiceConfiguration.ClientMode.EXPECTING)); + } + + @Test @SuppressWarnings("deprecation") + public void testAutoCreateTrueMapsToAutoCreate() throws IOException { + final String[] config = new String[] + { + "", + " ", + " ", + " ", + " ", + " ", + " ", + "" + }; + + XmlConfiguration configuration = new XmlConfiguration(makeConfig(config)); + ClusteringServiceConfiguration clusterConfig = findSingletonAmongst(ClusteringServiceConfiguration.class, configuration.getServiceCreationConfigurations()); + + assertThat(clusterConfig.isAutoCreate(), is(true)); + assertThat(clusterConfig.getClientMode(), is(ClusteringServiceConfiguration.ClientMode.AUTO_CREATE)); + } + + @Test + public void testBothAutoCreateAndClientModeIsDisallowed() throws IOException { + final String[] config = new String[] + { + "", + " ", + " ", + " ", + " ", + " ", + " ", + "" + }; + + try { + new XmlConfiguration(makeConfig(config)); + } catch (XmlConfigurationException e) { + assertThat(e.getMessage(), is("Cannot define both 'auto-create' and 'client-mode' attributes")); + } + } + + @Test + public void testClientModeAsAProperty() throws IOException { + String clientModeProperty = PROPERTY_PREFIX + testName.getMethodName() + ":client-mode"; + Map properties = new HashMap<>(); + properties.put(clientModeProperty, "auto-create"); + + final String[] config = new String[] + { + "", + " ", + " ", + " ", + " ", + " ", + " ", + "" + }; + + properties.forEach(System::setProperty); + try { + XmlConfiguration configuration = new XmlConfiguration(makeConfig(config)); + ClusteringServiceConfiguration clusterConfig = findSingletonAmongst(ClusteringServiceConfiguration.class, configuration.getServiceCreationConfigurations()); + assertThat(clusterConfig.getClientMode(), is(ClusteringServiceConfiguration.ClientMode.AUTO_CREATE)); + } finally { + properties.keySet().forEach(System::clearProperty); + } + } + + @Test + public void testSharedPoolUsingProperties() throws IOException { + String poolSizeProperty = PROPERTY_PREFIX + testName.getMethodName() + ":pool-size"; + String fromProperty = PROPERTY_PREFIX + testName.getMethodName() + ":from"; + Map properties = new HashMap<>(); + properties.put(poolSizeProperty, "1024"); + properties.put(fromProperty, "source"); + + final String[] config = new String[] + { + "", + " ", + " ", + " ", + " ", + " ", + " ${" + poolSizeProperty + "}", + " ", + " ", + " ", + " ", + "" + }; + + properties.forEach(System::setProperty); + try { + XmlConfiguration configuration = new XmlConfiguration(makeConfig(config)); + ClusteringServiceConfiguration clusterConfig = findSingletonAmongst(ClusteringServiceConfiguration.class, configuration.getServiceCreationConfigurations()); + ServerSideConfiguration.Pool pool = clusterConfig.getServerConfiguration().getResourcePools().get("pool"); + assertThat(pool.getSize(), is(1024L)); + assertThat(pool.getServerResource(), is("source")); + } finally { + properties.keySet().forEach(System::clearProperty); + } + } + + @Test + public void testDefaultResourceAsAProperty() throws IOException { + String fromProperty = PROPERTY_PREFIX + testName.getMethodName() + ":from"; + Map properties = new HashMap<>(); + properties.put(fromProperty, "source"); + + final String[] config = new String[] + { + "", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + "" + }; + + properties.forEach(System::setProperty); + try { + XmlConfiguration configuration = new XmlConfiguration(makeConfig(config)); + ClusteringServiceConfiguration clusterConfig = findSingletonAmongst(ClusteringServiceConfiguration.class, configuration.getServiceCreationConfigurations()); + assertThat(clusterConfig.getServerConfiguration().getDefaultServerResource(), is("source")); + } finally { + properties.keySet().forEach(System::clearProperty); + } + } + + @Test + public void testTranslateServiceCreationConfiguration() throws Exception { + URI connectionUri = new URI("terracotta://localhost:9510/my-application"); + ClusteringServiceConfiguration serviceConfig = ClusteringServiceConfigurationBuilder.cluster(connectionUri) + .timeouts(Timeouts.DEFAULT) + .autoCreate(server -> server + .defaultServerResource("main") + .resourcePool("primaryresource", 5, MemoryUnit.GB) + .resourcePool("secondaryresource", 10, MemoryUnit.GB, "optional")) + .build(); + + ClusteringCacheManagerServiceConfigurationParser parser = new ClusteringCacheManagerServiceConfigurationParser(); + Element returnElement = parser.unparseServiceCreationConfiguration(serviceConfig); + + String inputString = "" + + "" + + "5" + + "5" + + "150" + + "" + + "" + + "5368709120" + + "10737418240" + + ""; + assertThat(returnElement, isSameConfigurationAs(inputString)); + } + + @Test + public void testTranslateServiceCreationConfigurationWithNoResourcePoolAndAutoCreateFalse() throws Exception { + URI connectionUri = new URI("terracotta://localhost:9510/my-application"); + ClusteringServiceConfiguration serviceConfig = ClusteringServiceConfigurationBuilder.cluster(connectionUri) + .timeouts(Timeouts.DEFAULT) + .expecting(server -> server.defaultServerResource("main")) + .build(); + + + ClusteringCacheManagerServiceConfigurationParser parser = new ClusteringCacheManagerServiceConfigurationParser(); + Element returnElement = parser.unparseServiceCreationConfiguration(serviceConfig); + + String inputString = "" + + "" + + "5" + + "5" + + "150" + + "" + + "" + + ""; + assertThat(returnElement, isSameConfigurationAs(inputString)); + } + + @Test + public void testTranslateServiceCreationConfigurationWithNoServerSideConfig() throws Exception { + URI connectionUri = new URI("terracotta://localhost:9510/my-application"); + ClusteringServiceConfiguration serviceConfig = ClusteringServiceConfigurationBuilder.cluster(connectionUri) + .timeouts(Timeouts.DEFAULT) + .build(); + + ClusteringCacheManagerServiceConfigurationParser parser = new ClusteringCacheManagerServiceConfigurationParser(); + Element returnElement = parser.unparseServiceCreationConfiguration(serviceConfig); + + String inputString = "" + + "" + + "5" + + "5" + + "150" + + ""; + assertThat(returnElement, isSameConfigurationAs(inputString)); + } + + @Test + public void testTranslateServiceCreationConfigurationWithInetSocketAddress() { + + InetSocketAddress firstServer = InetSocketAddress.createUnresolved("100.100.100.100", 9510); + InetSocketAddress secondServer = InetSocketAddress.createUnresolved("server-2", 0); + InetSocketAddress thirdServer = InetSocketAddress.createUnresolved("[::1]", 0); + InetSocketAddress fourthServer = InetSocketAddress.createUnresolved("[fe80::1453:846e:7be4:15fe]", 9710); + List servers = Arrays.asList(firstServer, secondServer, thirdServer, fourthServer); + ClusteringServiceConfiguration serviceConfig = ClusteringServiceConfigurationBuilder.cluster(servers, "my-application") + .timeouts(Timeouts.DEFAULT) + .build(); + + + ClusteringCacheManagerServiceConfigurationParser parser = new ClusteringCacheManagerServiceConfigurationParser(); + Element returnElement = parser.unparseServiceCreationConfiguration(serviceConfig); + + String inputString = "" + + "" + + "" + + "" + + "" + + "" + + "" + + "5" + + "5" + + "150" + + ""; + assertThat(returnElement, isSameConfigurationAs(inputString)); + } + + /** + * Constructs a temporary XML configuration file. + * + * @param lines the lines to include in the XML configuration file + * @return a {@code URL} pointing to the XML configuration file + * @throws IOException if an error is raised while creating or writing the XML configuration file + */ + private URL makeConfig(final String[] lines) throws IOException { + final File configFile = folder.newFile(testName.getMethodName() + "_config.xml"); + + try (FileOutputStream fout = new FileOutputStream(configFile); OutputStreamWriter out = new OutputStreamWriter(fout, StandardCharsets.UTF_8)) { + for (final String line : lines) { + out.write(line); + } + } + + return configFile.toURI().toURL(); + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringCacheServiceConfigurationParserTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringCacheServiceConfigurationParserTest.java new file mode 100644 index 0000000000..a1829d7f7d --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringCacheServiceConfigurationParserTest.java @@ -0,0 +1,39 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.config.xml; + +import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.junit.Test; +import org.w3c.dom.Node; + +import static org.ehcache.xml.XmlConfigurationMatchers.isSameConfigurationAs; +import static org.hamcrest.MatcherAssert.assertThat; + +public class ClusteringCacheServiceConfigurationParserTest { + + @Test + public void testTranslateServiceStoreConfiguration() { + + ClusteringCacheServiceConfigurationParser configurationTranslator = new ClusteringCacheServiceConfigurationParser(); + Node retNode = configurationTranslator.unparseServiceConfiguration( + ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG).build()); + + String inputString = ""; + assertThat(retNode, isSameConfigurationAs(inputString)); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/loaderwriter/ClusteredLoaderWriterStoreProviderTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/loaderwriter/ClusteredLoaderWriterStoreProviderTest.java new file mode 100644 index 0000000000..163cbfd8fd --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/loaderwriter/ClusteredLoaderWriterStoreProviderTest.java @@ -0,0 +1,76 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.loaderwriter; + +import org.ehcache.clustered.client.config.ClusteredResourceType; +import org.ehcache.clustered.client.internal.store.ClusteredStoreProviderTest; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.core.spi.ServiceLocator; +import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; +import org.ehcache.impl.internal.store.heap.OnHeapStore; +import org.ehcache.impl.internal.store.offheap.OffHeapStore; +import org.ehcache.impl.internal.store.tiering.TieredStore; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterConfiguration; +import org.junit.Test; + +import java.util.Collections; +import java.util.HashSet; + +import static org.ehcache.core.spi.ServiceLocator.dependencySet; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class ClusteredLoaderWriterStoreProviderTest { + + private final CacheLoaderWriterConfiguration cacheLoaderWriterConfiguration = mock(CacheLoaderWriterConfiguration.class); + + @Test + public void testRank() { + ClusteredLoaderWriterStore.Provider provider = new ClusteredLoaderWriterStore.Provider(); + ServiceLocator serviceLocator = dependencySet() + .with(new TieredStore.Provider()) + .with(new OnHeapStore.Provider()) + .with(new OffHeapStore.Provider()) + .with(mock(DiskResourceService.class)) + .with(new OffHeapDiskStore.Provider()) + .with(mock(ClusteringService.class)).build(); + provider.start(serviceLocator); + + assertThat(provider.rank(new HashSet<>(Collections.singletonList(ClusteredResourceType.Types.DEDICATED)), + Collections.singletonList(cacheLoaderWriterConfiguration)), is(2)); + assertThat(provider.rank(new HashSet<>(Collections.singletonList(ClusteredResourceType.Types.DEDICATED)), + Collections.emptyList()), is(0)); + assertThat(provider.rank(new HashSet<>(Collections.singletonList(new ClusteredStoreProviderTest.UnmatchedResourceType())), + Collections.singletonList(cacheLoaderWriterConfiguration)), is(0)); + } + + @Test + public void testAuthoritativeRank() { + ClusteredLoaderWriterStore.Provider provider = new ClusteredLoaderWriterStore.Provider(); + ServiceLocator serviceLocator = dependencySet().with(mock(ClusteringService.class)).build(); + provider.start(serviceLocator); + + assertThat(provider.rankAuthority(ClusteredResourceType.Types.DEDICATED, + Collections.singletonList(cacheLoaderWriterConfiguration)), + is(2)); + assertThat(provider.rankAuthority(ClusteredResourceType.Types.DEDICATED, Collections.emptyList()), + is(0)); + assertThat(provider.rankAuthority(new ClusteredStoreProviderTest.UnmatchedResourceType(), Collections.singletonList(cacheLoaderWriterConfiguration)), + is(0)); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/loaderwriter/ClusteredLoaderWriterStoreTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/loaderwriter/ClusteredLoaderWriterStoreTest.java new file mode 100644 index 0000000000..b7ee9e0c2d --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/loaderwriter/ClusteredLoaderWriterStoreTest.java @@ -0,0 +1,395 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.loaderwriter; + +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; +import org.ehcache.clustered.client.internal.store.lock.LockingServerStoreProxyImpl; +import org.ehcache.clustered.client.internal.store.operations.EternalChainResolver; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.common.internal.store.operations.PutOperation; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.loaderWriter.TestCacheLoaderWriter; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.impl.serialization.LongSerializer; +import org.ehcache.impl.serialization.StringSerializer; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.junit.Test; +import org.mockito.ArgumentMatchers; + +import java.nio.ByteBuffer; +import java.util.Iterator; +import java.util.concurrent.TimeoutException; + +import static org.ehcache.clustered.ChainUtils.chainOf; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; + +public class ClusteredLoaderWriterStoreTest { + + @SuppressWarnings("unchecked") + private Store.Configuration configuration = mock(Store.Configuration.class); + private OperationsCodec codec = new OperationsCodec<>(new LongSerializer(), new StringSerializer()); + private EternalChainResolver resolver = new EternalChainResolver<>(codec); + private TimeSource timeSource = mock(TimeSource.class); + + @Test + public void testContainsKeyValueAbsentInCache() throws Exception { + ServerStoreProxy storeProxy = mock(LockingServerStoreProxyImpl.class); + @SuppressWarnings("unchecked") + CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); + when(storeProxy.get(eq(1L))).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(store.containsKey(1L), is(false)); + verify(loaderWriter, never()).load(anyLong()); + } + + @Test + public void testContainsKeyValuePresentInCache() throws Exception { + ServerStoreProxy storeProxy = mock(LockingServerStoreProxyImpl.class); + @SuppressWarnings("unchecked") + CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); + PutOperation operation = new PutOperation<>(1L, "one", System.currentTimeMillis()); + ServerStoreProxy.ChainEntry toReturn = entryOf(codec.encode(operation)); + when(storeProxy.get(anyLong())).thenReturn(toReturn); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(store.containsKey(1L), is(true)); + verify(loaderWriter, never()).load(anyLong()); + } + + @Test + public void testGetValueAbsentInSOR() throws Exception { + ServerStoreProxy storeProxy = mock(LockingServerStoreProxyImpl.class); + CacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + when(storeProxy.get(eq(1L))).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(store.get(1L), is(nullValue())); + } + + @Test + public void testGetValuePresentInSOR() throws Exception { + ServerStoreProxy storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + loaderWriter.storeMap.put(1L, "one"); + when(storeProxy.get(eq(1L))).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(store.get(1L).get(), equalTo("one")); + } + + @Test + public void testGetValuePresentInCache() throws Exception { + ServerStoreProxy storeProxy = mock(LockingServerStoreProxyImpl.class); + @SuppressWarnings("unchecked") + CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); + PutOperation operation = new PutOperation<>(1L, "one", System.currentTimeMillis()); + ServerStoreProxy.ChainEntry toReturn = entryOf(codec.encode(operation)); + when(storeProxy.get(anyLong())).thenReturn(toReturn); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(store.get(1L).get(), equalTo("one")); + verify(loaderWriter, times(0)).load(anyLong()); + verifyZeroInteractions(loaderWriter); + } + + @Test + public void testPut() throws Exception { + ServerStoreProxy storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(loaderWriter.storeMap.containsKey(1L), is(false)); + assertThat(store.put(1L, "one"), is(Store.PutStatus.PUT)); + assertThat(loaderWriter.storeMap.containsKey(1L), is(true)); + } + + @Test + public void testRemoveValueAbsentInCachePresentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + when(storeProxy.lock(anyLong())).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + loaderWriter.storeMap.put(1L, "one"); + assertThat(store.remove(1L), is(false)); + assertThat(loaderWriter.storeMap.containsKey(1L), is(false)); + } + + @Test + public void testRemoveValuePresentInCachePresentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + PutOperation operation = new PutOperation<>(1L, "one", System.currentTimeMillis()); + ServerStoreProxy.ChainEntry toReturn = entryOf(codec.encode(operation)); + when(storeProxy.lock(anyLong())).thenReturn(toReturn); + when(storeProxy.get(anyLong())).thenReturn(toReturn); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + loaderWriter.storeMap.put(1L, "one"); + assertThat(store.get(1L).get(), equalTo("one")); + assertThat(store.remove(1L), is(true)); + assertThat(loaderWriter.storeMap.containsKey(1L), is(false)); + } + + @Test + public void testRemoveValueAbsentInCacheAbsentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + @SuppressWarnings("unchecked") + CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); + when(storeProxy.lock(anyLong())).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(store.remove(1L), is(false)); + verify(loaderWriter, times(1)).delete(anyLong()); + } + + @Test + public void testPufIfAbsentValueAbsentInCacheAbsentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + when(storeProxy.lock(anyLong())).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(loaderWriter.storeMap.isEmpty(), is(true)); + assertThat(store.putIfAbsent(1L, "one", null), is(nullValue())); + assertThat(loaderWriter.storeMap.get(1L), equalTo("one")); + } + + @Test + public void testPufIfAbsentValueAbsentInCachePresentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + when(storeProxy.lock(anyLong())).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + loaderWriter.storeMap.put(1L, "one"); + assertThat(store.putIfAbsent(1L, "Again", null).get(), equalTo("one")); + verify(storeProxy, times(0)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + assertThat(loaderWriter.storeMap.get(1L), equalTo("one")); + } + + @Test + public void testPufIfAbsentValuePresentInCachePresentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + PutOperation operation = new PutOperation<>(1L, "one", System.currentTimeMillis()); + ServerStoreProxy.ChainEntry toReturn = entryOf(codec.encode(operation)); + when(storeProxy.lock(anyLong())).thenReturn(toReturn); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + loaderWriter.storeMap.put(1L, "one"); + assertThat(store.putIfAbsent(1L, "Again", null).get(), equalTo("one")); + verify(storeProxy, times(0)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + assertThat(loaderWriter.storeMap.get(1L), equalTo("one")); + } + + @Test + public void testReplaceValueAbsentInCacheAbsentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + when(storeProxy.lock(anyLong())).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(loaderWriter.storeMap.isEmpty(), is(true)); + assertThat(store.replace(1L, "one"), is(nullValue())); + assertThat(loaderWriter.storeMap.isEmpty(), is(true)); + verify(storeProxy, times(0)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + } + + @Test + public void testReplaceValueAbsentInCachePresentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + when(storeProxy.lock(anyLong())).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + loaderWriter.storeMap.put(1L, "one"); + assertThat(store.replace(1L, "Again").get(), equalTo("one")); + verify(storeProxy, times(1)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + assertThat(loaderWriter.storeMap.get(1L), equalTo("Again")); + } + + @Test + public void testReplaceValuePresentInCachePresentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + PutOperation operation = new PutOperation<>(1L, "one", System.currentTimeMillis()); + ServerStoreProxy.ChainEntry toReturn = entryOf(codec.encode(operation)); + when(storeProxy.lock(anyLong())).thenReturn(toReturn); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + loaderWriter.storeMap.put(1L, "one"); + assertThat(store.replace(1L, "Again").get(), equalTo("one")); + verify(storeProxy, times(1)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + assertThat(loaderWriter.storeMap.get(1L), equalTo("Again")); + } + + @Test + public void testRemove2ArgsValueAbsentInCacheAbsentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + @SuppressWarnings("unchecked") + CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); + when(storeProxy.lock(anyLong())).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(store.remove(1L, "one"), is(Store.RemoveStatus.KEY_MISSING)); + verify(storeProxy, times(0)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + } + + @Test + public void testRemove2ArgsValueAbsentInCachePresentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + when(storeProxy.lock(anyLong())).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + loaderWriter.storeMap.put(1L, "one"); + assertThat(store.remove(1L, "one"), is(Store.RemoveStatus.REMOVED)); + verify(storeProxy, times(1)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + assertThat(loaderWriter.storeMap.isEmpty(), is(true)); + } + + @Test + public void testRemove2ArgsValuePresentInCachePresentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + @SuppressWarnings("unchecked") + CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); + PutOperation operation = new PutOperation<>(1L, "one", System.currentTimeMillis()); + ServerStoreProxy.ChainEntry toReturn = entryOf(codec.encode(operation)); + when(storeProxy.lock(anyLong())).thenReturn(toReturn); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(store.remove(1L, "one"), is(Store.RemoveStatus.REMOVED)); + verify(storeProxy, times(1)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + verify(loaderWriter, times(0)).load(anyLong()); + verify(loaderWriter, times(1)).delete(anyLong()); + } + + @Test + public void testRemove2ArgsValueAbsentInCacheDiffValuePresentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + when(storeProxy.lock(anyLong())).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + loaderWriter.storeMap.put(1L, "one"); + assertThat(store.remove(1L, "Again"), is(Store.RemoveStatus.KEY_PRESENT)); + verify(storeProxy, times(0)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + assertThat(loaderWriter.storeMap.get(1L), equalTo("one")); + } + + @Test + public void testReplace2ArgsValueAbsentInCacheAbsentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + @SuppressWarnings("unchecked") + CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); + when(storeProxy.lock(anyLong())).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(store.replace(1L, "one", "Again"), is(Store.ReplaceStatus.MISS_NOT_PRESENT)); + verify(storeProxy, times(0)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + verify(loaderWriter, times(1)).load(anyLong()); + verify(loaderWriter, times(0)).write(anyLong(), anyString()); + } + + @Test + public void testReplace2ArgsValueAbsentInCachePresentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + when(storeProxy.lock(anyLong())).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + loaderWriter.storeMap.put(1L, "one"); + assertThat(store.replace(1L, "one", "Again"), is(Store.ReplaceStatus.HIT)); + verify(storeProxy, times(1)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + assertThat(loaderWriter.storeMap.get(1L), equalTo("Again")); + } + + @Test + public void testReplace2ArgsValuePresentInCachePresentInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + @SuppressWarnings("unchecked") + CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); + PutOperation operation = new PutOperation<>(1L, "one", System.currentTimeMillis()); + ServerStoreProxy.ChainEntry toReturn = entryOf(codec.encode(operation)); + when(storeProxy.lock(anyLong())).thenReturn(toReturn); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + assertThat(store.replace(1L, "one", "Again"), is(Store.ReplaceStatus.HIT)); + verify(storeProxy, times(1)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + verify(loaderWriter, times(0)).load(anyLong()); + verify(loaderWriter, times(1)).write(anyLong(), anyString()); + } + + @Test + public void testReplace2ArgsValueAbsentInCacheDiffValueInSOR() throws Exception { + LockingServerStoreProxyImpl storeProxy = mock(LockingServerStoreProxyImpl.class); + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + when(storeProxy.lock(anyLong())).thenReturn(entryOf()); + ClusteredLoaderWriterStore store = new ClusteredLoaderWriterStore<>(configuration, codec, resolver, storeProxy, + timeSource, loaderWriter, new DefaultStatisticsService()); + loaderWriter.storeMap.put(1L, "one"); + assertThat(store.replace(1L, "Again", "one"), is(Store.ReplaceStatus.MISS_PRESENT)); + verify(storeProxy, times(0)).append(anyLong(), ArgumentMatchers.any(ByteBuffer.class)); + assertThat(loaderWriter.storeMap.get(1L), equalTo("one")); + } + + private static ServerStoreProxy.ChainEntry entryOf(ByteBuffer ... elements) { + Chain chain = chainOf(elements); + return new ServerStoreProxy.ChainEntry() { + @Override + public void append(ByteBuffer payLoad) throws TimeoutException { + throw new AssertionError(); + } + + @Override + public void replaceAtHead(Chain equivalent) { + throw new AssertionError(); + } + + @Override + public boolean isEmpty() { + return chain.isEmpty(); + } + + @Override + public int length() { + return chain.length(); + } + + @Override + public Iterator iterator() { + return chain.iterator(); + } + }; + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehindStoreProviderTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehindStoreProviderTest.java new file mode 100644 index 0000000000..673381fd61 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehindStoreProviderTest.java @@ -0,0 +1,81 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.loaderwriter.writebehind; + +import org.ehcache.clustered.client.config.ClusteredResourceType; +import org.ehcache.clustered.client.internal.store.ClusteredStoreProviderTest; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.core.spi.ServiceLocator; +import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; +import org.ehcache.impl.internal.store.heap.OnHeapStore; +import org.ehcache.impl.internal.store.offheap.OffHeapStore; +import org.ehcache.impl.internal.store.tiering.TieredStore; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterConfiguration; +import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; +import org.junit.Test; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; + +import static org.ehcache.core.spi.ServiceLocator.dependencySet; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class ClusteredWriteBehindStoreProviderTest { + + private final CacheLoaderWriterConfiguration cacheLoaderWriterConfiguration = mock(CacheLoaderWriterConfiguration.class); + private final WriteBehindConfiguration writeBehindConfiguration = mock(WriteBehindConfiguration.class); + + @Test + public void testRank() { + ClusteredWriteBehindStore.Provider provider = new ClusteredWriteBehindStore.Provider(); + ServiceLocator serviceLocator = dependencySet() + .with(new TieredStore.Provider()) + .with(new OnHeapStore.Provider()) + .with(new OffHeapStore.Provider()) + .with(mock(DiskResourceService.class)) + .with(new OffHeapDiskStore.Provider()) + .with(mock(ClusteringService.class)).build(); + provider.start(serviceLocator); + + assertThat(provider.rank(new HashSet<>(Collections.singletonList(ClusteredResourceType.Types.DEDICATED)), + Arrays.asList(cacheLoaderWriterConfiguration, writeBehindConfiguration)), is(3)); + assertThat(provider.rank(new HashSet<>(Collections.singletonList(ClusteredResourceType.Types.DEDICATED)), + Collections.singletonList(cacheLoaderWriterConfiguration)), is(0)); + assertThat(provider.rank(new HashSet<>(Collections.singletonList(new ClusteredStoreProviderTest.UnmatchedResourceType())), + Arrays.asList(cacheLoaderWriterConfiguration, writeBehindConfiguration)), is(0)); + } + + @Test + public void testAuthoritativeRank() { + ClusteredWriteBehindStore.Provider provider = new ClusteredWriteBehindStore.Provider(); + ServiceLocator serviceLocator = dependencySet().with(mock(ClusteringService.class)).build(); + provider.start(serviceLocator); + + assertThat(provider.rankAuthority(ClusteredResourceType.Types.DEDICATED, + Arrays.asList(cacheLoaderWriterConfiguration, writeBehindConfiguration)), + is(3)); + assertThat(provider.rankAuthority(ClusteredResourceType.Types.DEDICATED, + Collections.singletonList(writeBehindConfiguration)), + is(0)); + assertThat(provider.rankAuthority(new ClusteredStoreProviderTest.UnmatchedResourceType(), Arrays.asList(cacheLoaderWriterConfiguration, + writeBehindConfiguration)), + is(0)); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehindTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehindTest.java new file mode 100644 index 0000000000..ff0563bf6e --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/loaderwriter/writebehind/ClusteredWriteBehindTest.java @@ -0,0 +1,289 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.loaderwriter.writebehind; + +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; +import org.ehcache.clustered.common.internal.util.ChainBuilder; +import org.ehcache.clustered.client.internal.store.operations.ChainResolver; +import org.ehcache.clustered.client.internal.store.operations.ExpiryChainResolver; +import org.ehcache.clustered.common.internal.store.operations.ConditionalRemoveOperation; +import org.ehcache.clustered.common.internal.store.operations.Operation; +import org.ehcache.clustered.common.internal.store.operations.PutIfAbsentOperation; +import org.ehcache.clustered.common.internal.store.operations.PutOperation; +import org.ehcache.clustered.common.internal.store.operations.PutWithWriterOperation; +import org.ehcache.clustered.common.internal.store.operations.RemoveOperation; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.loaderWriter.writebehind.RecordingLoaderWriter; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.serialization.LongSerializer; +import org.ehcache.impl.serialization.StringSerializer; +import org.junit.Test; +import org.mockito.ArgumentCaptor; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.AbstractExecutorService; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class ClusteredWriteBehindTest { + + private static final TimeSource TIME_SOURCE = SystemTimeSource.INSTANCE; + + @Test + public void testPutWithWriter() throws Exception { + List eventInfoList = new ArrayList<>(); + eventInfoList.add(new EventInfo(1L, + new PutWithWriterOperation<>(1L, "The one", TIME_SOURCE.getTimeMillis()), + "The one", + true)); + eventInfoList.add(new EventInfo(1L, + new PutWithWriterOperation<>(1L, "The one one", TIME_SOURCE.getTimeMillis()), + "The one one", + true)); + eventInfoList.add(new EventInfo(2L, + new PutWithWriterOperation<>(2L, "The two", TIME_SOURCE.getTimeMillis()), + + "The two", + true)); + eventInfoList.add(new EventInfo(2L, + new PutWithWriterOperation<>(2L, "The two two", TIME_SOURCE.getTimeMillis()), + "The two two", + true)); + + HashMap result = new HashMap<>(); + result.put(1L, "The one one"); + result.put(2L, "The two two"); + verifyEvents(eventInfoList, result); + } + + @Test + public void testRemoves() throws Exception { + List eventInfoList = new ArrayList<>(); + eventInfoList.add(new EventInfo(1L, + new PutWithWriterOperation<>(1L, "The one", TIME_SOURCE.getTimeMillis()), + "The one", + true)); + eventInfoList.add(new EventInfo(1L, + new PutWithWriterOperation<>(1L, "The one one", TIME_SOURCE.getTimeMillis()), + "The one one", + true)); + eventInfoList.add(new EventInfo(1L, new RemoveOperation<>(1L, TIME_SOURCE.getTimeMillis()), null, true)); + + verifyEvents(eventInfoList, Collections.emptyMap()); + } + + @Test + public void testCAS() throws Exception { + List eventInfoList = new ArrayList<>(); + eventInfoList.add(new EventInfo(1L, + new PutIfAbsentOperation<>(1L, "The one", TIME_SOURCE.getTimeMillis()), + "The one", + true)); + eventInfoList.add(new EventInfo(1L, + new PutIfAbsentOperation<>(1L, "The one one", TIME_SOURCE.getTimeMillis()), + "none", + false)); + eventInfoList.add(new EventInfo(1L, + new ConditionalRemoveOperation<>(1L, "The one", TIME_SOURCE.getTimeMillis()), + null, + true)); + + verifyEvents(eventInfoList, Collections.emptyMap()); + } + + @Test + public void testPuts() throws Exception { + List eventInfoList = new ArrayList<>(); + eventInfoList.add(new EventInfo(1L, + new PutOperation<>(1L, "The one", TIME_SOURCE.getTimeMillis()), + "The one", + false)); + eventInfoList.add(new EventInfo(1L, + new PutWithWriterOperation<>(1L, "The one one", TIME_SOURCE.getTimeMillis()), + "The one one", + true)); + eventInfoList.add(new EventInfo(2L, new PutWithWriterOperation<>(2L, "The two", TIME_SOURCE.getTimeMillis()), + "The two", + true)); + eventInfoList.add(new EventInfo(4L, new PutWithWriterOperation<>(4L, "The four", TIME_SOURCE.getTimeMillis()), + "The four", + true)); + + HashMap result = new HashMap<>(); + result.put(1L, "The one one"); + result.put(2L, "The two"); + result.put(4L, "The four"); + verifyEvents(eventInfoList, result); + } + + @SuppressWarnings("unchecked") + private void verifyEvents(List expected, Map expectedChainContents) throws TimeoutException { + ClusteredWriteBehindStore clusteredWriteBehindStore = mock(ClusteredWriteBehindStore.class); + ExecutorService executorService = new TestExecutorService(); + RecordingLoaderWriter cacheLoaderWriter = new RecordingLoaderWriter<>(); + OperationsCodec operationCodec = new OperationsCodec<>(new LongSerializer(), new StringSerializer()); + ChainResolver resolver = new ExpiryChainResolver<>(operationCodec, ExpiryPolicy.NO_EXPIRY); + + ClusteredWriteBehind clusteredWriteBehind = new ClusteredWriteBehind<>(clusteredWriteBehindStore, + executorService, + resolver, + cacheLoaderWriter, + operationCodec); + Chain elements = makeChain(expected, operationCodec); + + when(clusteredWriteBehindStore.lock(1L)).thenReturn(new ServerStoreProxy.ChainEntry() { + @Override + public void append(ByteBuffer payLoad) throws TimeoutException { + + } + + @Override + public void replaceAtHead(Chain equivalent) { + + } + + @Override + public boolean isEmpty() { + return elements.isEmpty(); + } + + @Override + public int length() { + return elements.length(); + } + + @Override + public Iterator iterator() { + return elements.iterator(); + } + }); + + ArgumentCaptor chainArgumentCaptor = ArgumentCaptor.forClass(Chain.class); + + clusteredWriteBehind.flushWriteBehindQueue(null, 1L); + + Map> records = cacheLoaderWriter.getRecords(); + + Map track = new HashMap<>(); + for (EventInfo event : expected) { + if (event.track) { + int next = track.compute(event.key, (k, v) -> v == null ? 0 : v + 1); + assertThat(records.get(event.key).get(next), is(event.expectedValue)); + } + } + + verify(clusteredWriteBehindStore).replaceAtHead(anyLong(), any(), chainArgumentCaptor.capture()); + + Chain value = chainArgumentCaptor.getValue(); + Map result = convert(value, operationCodec, resolver, TIME_SOURCE); + + for (Map.Entry entry : result.entrySet()) { + assertThat(entry.getValue(), is(expectedChainContents.get(entry.getKey()))); + } + + verify(clusteredWriteBehindStore).unlock(1L, false); + } + + private Map convert(Chain chain, OperationsCodec codec, + ChainResolver resolver, TimeSource timeSource) { + Map result = new HashMap<>(); + for (Element element : chain) { + ByteBuffer payload = element.getPayload(); + Operation operation = codec.decode(payload); + Long key = operation.getKey(); + PutOperation opResult = resolver.applyOperation(key, + null, + operation); + result.put(key, opResult.getValue()); + } + return result; + } + + private Chain makeChain(List expected, OperationsCodec operationsCodec) { + ChainBuilder builder = new ChainBuilder(); + for (EventInfo eventInfo : expected) { + builder.add(operationsCodec.encode(eventInfo.operation)); + } + return builder.build(); + } + + + class TestExecutorService extends AbstractExecutorService { + + @Override + public void shutdown() { + + } + + @Override + public List shutdownNow() { + return null; + } + + @Override + public boolean isShutdown() { + return false; + } + + @Override + public boolean isTerminated() { + return false; + } + + @Override + public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { + return false; + } + + @Override + public void execute(Runnable command) { + command.run(); + } + } + + private class EventInfo { + private final Long key; + private final Operation operation; + private final String expectedValue; + private final boolean track; + + private EventInfo(Long key, Operation operation, String expectedValue, boolean track) { + this.key = key; + this.operation = operation; + this.expectedValue = expectedValue; + this.track = track; + } + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockClientTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockClientTest.java similarity index 99% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockClientTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockClientTest.java index 86ee2b9724..f3af9e3595 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockClientTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockClientTest.java @@ -36,8 +36,8 @@ import static org.ehcache.clustered.common.internal.lock.LockMessaging.HoldType.READ; import static org.ehcache.clustered.common.internal.lock.LockMessaging.HoldType.WRITE; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import org.junit.Before; import org.terracotta.exception.EntityNotProvidedException; diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java similarity index 99% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java index a7f2f56a06..41de1e0c4f 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java @@ -26,9 +26,9 @@ import static org.ehcache.clustered.common.internal.lock.LockMessaging.HoldType.READ; import static org.ehcache.clustered.common.internal.lock.LockMessaging.HoldType.WRITE; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsNull.notNullValue; import static org.hamcrest.core.IsNull.nullValue; -import static org.junit.Assert.assertThat; import static org.mockito.ArgumentMatchers.isNull; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doThrow; diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ClusterStateRepositoryReplicationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ClusterStateRepositoryReplicationTest.java new file mode 100644 index 0000000000..86429fd73f --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ClusterStateRepositoryReplicationTest.java @@ -0,0 +1,207 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.service; + +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityService; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; +import org.ehcache.clustered.client.internal.store.ClusterTierClientEntityService; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; +import org.ehcache.clustered.client.internal.store.SimpleClusterTierClientEntity; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; +import org.ehcache.clustered.server.ClusterTierManagerServerEntityService; +import org.ehcache.clustered.server.store.ClusterTierServerEntityService; +import org.ehcache.impl.config.BaseCacheConfiguration; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.spi.persistence.StateHolder; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.terracotta.offheapresource.OffHeapResourcesProvider; +import org.terracotta.offheapresource.config.MemoryUnit; +import org.terracotta.passthrough.PassthroughClusterControl; +import org.terracotta.passthrough.PassthroughTestHelpers; + +import java.io.Serializable; +import java.lang.reflect.Field; +import java.net.URI; + +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; +import static org.ehcache.config.Eviction.noAdvice; +import static org.ehcache.config.builders.ExpiryPolicyBuilder.noExpiration; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class ClusterStateRepositoryReplicationTest { + + private PassthroughClusterControl clusterControl; + private static String STRIPENAME = "stripe"; + private static String STRIPE_URI = "passthrough://" + STRIPENAME; + + @Before + public void setUp() throws Exception { + this.clusterControl = PassthroughTestHelpers.createActivePassive(STRIPENAME, + server -> { + server.registerServerEntityService(new ClusterTierManagerServerEntityService()); + server.registerClientEntityService(new ClusterTierManagerClientEntityService()); + server.registerServerEntityService(new ClusterTierServerEntityService()); + server.registerClientEntityService(new ClusterTierClientEntityService()); + server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); + server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); + server.registerExtendedConfiguration(new OffHeapResourcesProvider(getOffheapResourcesType("test", 32, MemoryUnit.MB))); + + UnitTestConnectionService.addServerToStripe(STRIPENAME, server); + } + ); + + clusterControl.waitForActive(); + clusterControl.waitForRunningPassivesInStandby(); + } + + @After + public void tearDown() throws Exception { + UnitTestConnectionService.removeStripe(STRIPENAME); + clusterControl.tearDown(); + } + + @Test + public void testClusteredStateRepositoryReplication() throws Exception { + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) + .autoCreate(c -> c) + .build(); + + ClusteringService service = new ClusteringServiceFactory().create(configuration); + + service.start(null); + try { + BaseCacheConfiguration config = new BaseCacheConfiguration<>(Long.class, String.class, noAdvice(), null, noExpiration(), + newResourcePoolsBuilder().with(clusteredDedicated("test", 2, org.ehcache.config.units.MemoryUnit.MB)).build()); + ClusteringService.ClusteredCacheIdentifier spaceIdentifier = (ClusteringService.ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("test", + config); + + ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(spaceIdentifier, new StoreConfigurationImpl<>(config, 1, null, null), Consistency.STRONG, mock(ServerCallback.class)); + + SimpleClusterTierClientEntity clientEntity = getEntity(serverStoreProxy); + + ClusterStateRepository stateRepository = new ClusterStateRepository(spaceIdentifier, "test", clientEntity); + + StateHolder testHolder = stateRepository.getPersistentStateHolder("testHolder", String.class, String.class, c -> true, null); + testHolder.putIfAbsent("One", "One"); + testHolder.putIfAbsent("Two", "Two"); + + clusterControl.terminateActive(); + clusterControl.waitForActive(); + + assertThat(testHolder.get("One"), is("One")); + assertThat(testHolder.get("Two"), is("Two")); + } finally { + service.stop(); + } + } + + @Test + public void testClusteredStateRepositoryReplicationWithSerializableKV() throws Exception { + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) + .autoCreate(c -> c) + .build(); + + ClusteringService service = new ClusteringServiceFactory().create(configuration); + + service.start(null); + try { + BaseCacheConfiguration config = new BaseCacheConfiguration<>(Long.class, String.class, noAdvice(), null, noExpiration(), + newResourcePoolsBuilder().with(clusteredDedicated("test", 2, org.ehcache.config.units.MemoryUnit.MB)).build()); + ClusteringService.ClusteredCacheIdentifier spaceIdentifier = (ClusteringService.ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("test", + config); + + ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(spaceIdentifier, new StoreConfigurationImpl<>(config, 1, null, null), Consistency.STRONG, mock(ServerCallback.class)); + + SimpleClusterTierClientEntity clientEntity = getEntity(serverStoreProxy); + + ClusterStateRepository stateRepository = new ClusterStateRepository(new ClusteringService.ClusteredCacheIdentifier() { + @Override + public String getId() { + return "testStateRepo"; + } + + @Override + public Class getServiceType() { + return ClusteringService.class; + } + }, "test", clientEntity); + + StateHolder testMap = stateRepository.getPersistentStateHolder("testMap", TestVal.class, TestVal.class, c -> true, null); + testMap.putIfAbsent(new TestVal("One"), new TestVal("One")); + testMap.putIfAbsent(new TestVal("Two"), new TestVal("Two")); + + clusterControl.terminateActive(); + clusterControl.waitForActive(); + + assertThat(testMap.get(new TestVal("One")), is(new TestVal("One"))); + assertThat(testMap.get(new TestVal("Two")), is(new TestVal("Two"))); + + assertThat(testMap.entrySet(), hasSize(2)); + } finally { + service.stop(); + } + } + + private static SimpleClusterTierClientEntity getEntity(ServerStoreProxy clusteringService) throws NoSuchFieldException, IllegalAccessException { + Field entity = clusteringService.getClass().getDeclaredField("entity"); + entity.setAccessible(true); + return (SimpleClusterTierClientEntity)entity.get(clusteringService); + } + + private static class TestVal implements Serializable { + + private static final long serialVersionUID = 1L; + + final String val; + + + private TestVal(String val) { + this.val = val; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + TestVal testVal = (TestVal) o; + + return val != null ? val.equals(testVal.val) : testVal.val == null; + } + + @Override + public int hashCode() { + return val != null ? val.hashCode() : 0; + } + } + +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusterTierManagerClientEntityExceptionTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ClusterTierManagerClientEntityExceptionTest.java similarity index 93% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusterTierManagerClientEntityExceptionTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ClusterTierManagerClientEntityExceptionTest.java index 9d9e6986f3..82ecc988d2 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusterTierManagerClientEntityExceptionTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ClusterTierManagerClientEntityExceptionTest.java @@ -30,8 +30,8 @@ import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.config.store.StoreEventSourceConfiguration; -import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.spi.store.Store; +import org.ehcache.core.store.StoreConfigurationImpl; import org.ehcache.impl.internal.spi.serialization.DefaultSerializationProvider; import org.junit.After; import org.junit.Before; @@ -39,9 +39,10 @@ import java.net.URI; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.*; +import static org.junit.Assert.fail; /** * This class includes tests to ensure server-side exceptions returned as responses to @@ -75,21 +76,21 @@ public void removePassThroughServer() throws Exception { public void testServerExceptionPassThrough() throws Exception { ClusteringServiceConfiguration creationConfig = ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(creationConfig); creationService.start(null); creationService.stop(); ClusteringServiceConfiguration accessConfig = ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .expecting() - .defaultServerResource("different") - .build(); + .expecting(server -> server + .defaultServerResource("different")) + .build(); DefaultClusteringService accessService = new DefaultClusteringService(accessConfig); /* * Induce an "InvalidStoreException: cluster tier 'cacheAlias' does not exist" on the server. @@ -98,7 +99,8 @@ public void testServerExceptionPassThrough() throws Exception { accessService.start(null); fail("Expecting ClusterTierManagerValidationException"); - } catch (ClusterTierManagerValidationException e) { + } catch (RuntimeException e) { + assertThat(e.getCause(), is(instanceOf(ClusterTierManagerValidationException.class))); /* * Find the last ClusterTierManagerClientEntity involved exception in the causal chain. This diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactoryTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactoryTest.java new file mode 100644 index 0000000000..25ba158741 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactoryTest.java @@ -0,0 +1,40 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.service; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.core.util.ClassLoading; +import org.junit.Test; + +import java.util.stream.Collectors; + +import static java.util.Spliterators.spliterator; +import static java.util.stream.StreamSupport.stream; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.IsCollectionContaining.hasItem; + +/** + * @author Clifford W. Johnson + */ +public class ClusteringServiceFactoryTest { + + @Test + public void testServiceLocator() throws Exception { + assertThat(stream(spliterator(ClassLoading.servicesOfType(ServiceFactory.class).iterator(), Long.MAX_VALUE, 0), false).map(Object::getClass).collect(Collectors.toList()), + hasItem(ClusteringServiceFactory.class)); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ConnectionClosedTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ConnectionClosedTest.java new file mode 100644 index 0000000000..03dadd4f10 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ConnectionClosedTest.java @@ -0,0 +1,107 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.service; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.resilience.ThrowingResilienceStrategy; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.terracotta.connection.Connection; +import org.terracotta.connection.ConnectionPropertyNames; + +import java.net.URI; +import java.time.Duration; +import java.util.Collection; +import java.util.Properties; + +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.terracotta.utilities.test.matchers.Eventually.within; + +public class ConnectionClosedTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://connection.com:9540/timeout"); + + @Before + public void definePassthroughServer() { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 64, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() { + try { + UnitTestConnectionService.remove(CLUSTER_URI); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), is("Connection already closed")); + } + } + + @Test + public void testCacheOperationThrowsAfterConnectionClosed() throws Exception { + + ResourcePoolsBuilder resourcePoolsBuilder = ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB)); + + CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .with(cluster(CLUSTER_URI) + .timeouts(TimeoutsBuilder + .timeouts() + .connection(Duration.ofSeconds(20)) + .build()) + .autoCreate(c -> c)) + .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, + resourcePoolsBuilder).withResilienceStrategy(new ThrowingResilienceStrategy<>())); + try (PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { + + Cache cache = cacheManager.getCache("clustered-cache", Long.class, String.class); + + Collection connectionProperties = UnitTestConnectionService.getConnectionProperties(CLUSTER_URI); + + assertThat(connectionProperties.size(), is(1)); + Properties properties = connectionProperties.iterator().next(); + + assertThat(properties.getProperty(ConnectionPropertyNames.CONNECTION_TIMEOUT), is("20000")); + + cache.put(1L, "value"); + assertThat(cache.get(1L), is("value")); + + Collection connections = UnitTestConnectionService.getConnections(CLUSTER_URI); + + assertThat(connections.size(), is(1)); + + connections.iterator().next().close(); + + assertThat(() -> cache.get(1L), within(Duration.ofSeconds(60)).is("value")); + } + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ConnectionStateTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ConnectionStateTest.java new file mode 100644 index 0000000000..daa021c4e9 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ConnectionStateTest.java @@ -0,0 +1,117 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.service; + +import org.ehcache.clustered.client.config.ClusteredResourcePool; +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.Timeouts; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.client.internal.store.ClusterTierClientEntity; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.impl.serialization.LongSerializer; +import org.ehcache.impl.serialization.StringSerializer; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.terracotta.connection.Connection; + +import java.io.IOException; +import java.net.URI; +import java.util.Collection; +import java.util.Properties; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.terracotta.utilities.test.matchers.ThrowsMatcher.threw; + +public class ConnectionStateTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); + + private final ClusteringServiceConfiguration serviceConfiguration = ClusteringServiceConfigurationBuilder + .cluster(CLUSTER_URI) + .autoCreate(c -> c) + .build(); + + @Before + public void definePassthroughServer() { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 64, MemoryUnit.MB) + .resource("secondary-server-resource", 64, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthrough() { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + @Test + public void testInitializeStateAfterConnectionCloses() throws Exception { + + ConnectionState connectionState = new ConnectionState(Timeouts.DEFAULT, new Properties(), serviceConfiguration); + connectionState.initClusterConnection(Runnable::run); + + closeConnection(); + + Connection connection = connectionState.getConnection(); + assertThat(connection::close, threw(instanceOf(IllegalStateException.class))); + + connectionState.initializeState(); + + assertThat(connectionState.getConnection(), notNullValue()); + assertThat(connectionState.getEntityFactory(), notNullValue()); + assertThat(connectionState.getEntity(), notNullValue()); + + connectionState.getConnection().close(); + + } + + @Test + public void testCreateClusterTierEntityAfterConnectionCloses() throws Exception { + + ConnectionState connectionState = new ConnectionState(Timeouts.DEFAULT, new Properties(), serviceConfiguration); + connectionState.initClusterConnection(Runnable::run); + connectionState.initializeState(); + + closeConnection(); + + ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB); + ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), + Long.class.getName(), String.class.getName(), LongSerializer.class.getName(), StringSerializer.class.getName(), null, false); + + ClusterTierClientEntity clientEntity = connectionState.createClusterTierClientEntity("cache1", serverStoreConfiguration, false); + + assertThat(clientEntity, notNullValue()); + assertThat(connectionState.getEntity(), notNullValue()); + } + + //For test to simulate connection close as result of lease expiry + private void closeConnection() throws IOException { + Collection connections = UnitTestConnectionService.getConnections(CLUSTER_URI); + + assertThat(connections.size(), is(1)); + + connections.iterator().next().close(); + } + +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceDestroyTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceDestroyTest.java similarity index 91% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceDestroyTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceDestroyTest.java index e61074d825..157f7cbdb5 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceDestroyTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceDestroyTest.java @@ -16,12 +16,10 @@ package org.ehcache.clustered.client.internal.service; -import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntity; import org.ehcache.clustered.client.internal.MockConnectionService; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockClient; import org.ehcache.clustered.client.internal.store.InternalClusterTierClientEntity; -import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ClusterTierManagerConfiguration; import org.ehcache.clustered.common.internal.exceptions.DestroyInProgressException; import org.ehcache.clustered.common.internal.lock.LockMessaging; @@ -39,10 +37,10 @@ import java.util.HashSet; import java.util.Set; -import static java.util.Collections.emptyMap; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; import static org.ehcache.clustered.common.EhcacheEntityVersion.ENTITY_VERSION; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -91,8 +89,8 @@ public void testDestroyAllFullyMocked() throws Exception { when(tierEntityRef.destroy()).thenReturn(true); when(managerEntityRef.destroy()).thenReturn(true); - DefaultClusteringService service = new DefaultClusteringService(new ClusteringServiceConfiguration(URI - .create("mock://localhost/whatever"))); + DefaultClusteringService service = new DefaultClusteringService(cluster((URI + .create("mock://localhost/whatever"))).build()); service.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); service.destroyAll(); @@ -103,8 +101,6 @@ public void testDestroyAllFullyMocked() throws Exception { @Test public void testAutoCreateOnPartialDestroyState() throws Exception { - ServerSideConfiguration serverConfig = new ServerSideConfiguration("default", emptyMap()); - mockLockForWriteLockSuccess(); when(getEntityRef(ClusterTierManagerClientEntity.class)).thenReturn(managerEntityRef); @@ -114,14 +110,14 @@ public void testAutoCreateOnPartialDestroyState() throws Exception { doThrow(new EntityAlreadyExistsException("className", "entityName")) // Next time simulate creation .doNothing() - .when(managerEntityRef).create(new ClusterTierManagerConfiguration("whatever", serverConfig)); + .when(managerEntityRef).create(new ClusterTierManagerConfiguration("whatever", any())); // And can be fetch when(managerEntityRef.fetchEntity(null)).thenReturn(managerEntity); // However validate indicates destroy in progress doThrow(new DestroyInProgressException("destroying")) // Next time validation succeeds .doNothing() - .when(managerEntity).validate(serverConfig); + .when(managerEntity).validate(any()); Set stores = new HashSet<>(); stores.add("store1"); @@ -133,8 +129,8 @@ public void testAutoCreateOnPartialDestroyState() throws Exception { when(tierEntityRef.destroy()).thenReturn(true); when(managerEntityRef.destroy()).thenReturn(true); - DefaultClusteringService service = new DefaultClusteringService(new ClusteringServiceConfiguration(URI - .create("mock://localhost/whatever"), true, serverConfig)); + DefaultClusteringService service = new DefaultClusteringService(cluster(URI + .create("mock://localhost/whatever")).autoCreate(s -> s).build()); service.start(null); verify(managerEntity).prepareForDestroy(); @@ -154,8 +150,8 @@ public void testFetchOnPartialDestroyState() throws Exception { // However validate indicates destroy in progress doThrow(new DestroyInProgressException("destroying")).when(managerEntity).validate(null); - DefaultClusteringService service = new DefaultClusteringService(new ClusteringServiceConfiguration(URI - .create("mock://localhost/whatever"))); + DefaultClusteringService service = new DefaultClusteringService(cluster(URI + .create("mock://localhost/whatever")).build()); try { service.start(null); fail("IllegalStateException expected"); @@ -183,8 +179,8 @@ public void testDestroyOnPartialDestroyState() throws Exception { when(tierEntityRef.destroy()).thenReturn(true); when(managerEntityRef.destroy()).thenReturn(true); - DefaultClusteringService service = new DefaultClusteringService(new ClusteringServiceConfiguration(URI - .create("mock://localhost/whatever"))); + DefaultClusteringService service = new DefaultClusteringService(cluster(URI + .create("mock://localhost/whatever")).build()); service.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); service.destroyAll(); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java similarity index 89% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java index 9968f29972..8100a13bb4 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java @@ -22,6 +22,7 @@ import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityFactory; import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityService; import org.ehcache.clustered.client.internal.UnitTestConnectionService; import org.ehcache.clustered.client.internal.UnitTestConnectionService.PassthroughServerBuilder; @@ -35,8 +36,6 @@ import org.ehcache.clustered.client.service.ClusteringService; import org.ehcache.clustered.client.service.ClusteringService.ClusteredCacheIdentifier; import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.common.ServerSideConfiguration; -import org.ehcache.clustered.common.ServerSideConfiguration.Pool; import org.ehcache.clustered.common.internal.exceptions.InvalidServerStoreConfigurationException; import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; import org.ehcache.clustered.server.ObservableEhcacheServerEntityService; @@ -48,8 +47,8 @@ import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.config.store.StoreEventSourceConfiguration; -import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.spi.store.Store; +import org.ehcache.core.store.StoreConfigurationImpl; import org.ehcache.impl.internal.spi.serialization.DefaultSerializationProvider; import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.persistence.StateRepository; @@ -57,9 +56,7 @@ import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.terracotta.connection.ConnectionPropertyNames; import org.terracotta.exception.EntityNotFoundException; @@ -77,21 +74,25 @@ import static org.ehcache.clustered.client.config.ClusteredResourceType.Types.DEDICATED; import static org.ehcache.clustered.client.config.ClusteredResourceType.Types.SHARED; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; import static org.ehcache.clustered.client.internal.service.TestServiceProvider.providerContaining; import static org.ehcache.config.ResourceType.Core.DISK; import static org.ehcache.config.ResourceType.Core.HEAP; import static org.ehcache.config.ResourceType.Core.OFFHEAP; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.hasProperty; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; +import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doReturn; @@ -104,9 +105,6 @@ public class DefaultClusteringServiceTest { private ObservableEhcacheServerEntityService observableEhcacheServerEntityService; private ObservableClusterTierServerEntityService observableClusterTierServerEntityService; - @Rule - public ExpectedException expectedException = ExpectedException.none(); - @Before public void definePassthroughServer() throws Exception { observableEhcacheServerEntityService = new ObservableEhcacheServerEntityService(); @@ -134,8 +132,8 @@ public void removePassthroughServer() throws Exception { @Test public void testHandlesResourceType() throws Exception { ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(c -> c) .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); @@ -170,12 +168,12 @@ public int getTierHeight() { @Test public void testGetPersistenceSpaceIdentifier() throws Exception { ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(c -> c) .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); - PersistableResourceService.PersistenceSpaceIdentifier spaceIdentifier = service.getPersistenceSpaceIdentifier("cacheAlias", null); + PersistableResourceService.PersistenceSpaceIdentifier spaceIdentifier = service.getPersistenceSpaceIdentifier("cacheAlias", null); assertThat(spaceIdentifier, is(instanceOf(ClusteredCacheIdentifier.class))); assertThat(((ClusteredCacheIdentifier)spaceIdentifier).getId(), is("cacheAlias")); assertThat(service.getPersistenceSpaceIdentifier("cacheAlias", null), sameInstance(spaceIdentifier)); @@ -189,11 +187,11 @@ public void testCreate() throws Exception { .with(ClusteredResourcePoolBuilder.clusteredShared("primary"))); ClusteringServiceConfiguration configuration = ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + .autoCreate(c -> c) .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); - PersistableResourceService.PersistenceSpaceIdentifier spaceIdentifier = service.getPersistenceSpaceIdentifier("cacheAlias", configBuilder + PersistableResourceService.PersistenceSpaceIdentifier spaceIdentifier = service.getPersistenceSpaceIdentifier("cacheAlias", configBuilder .build()); assertThat(spaceIdentifier, instanceOf(ClusteredCacheIdentifier.class)); assertThat(((ClusteredCacheIdentifier) spaceIdentifier).getId(), is("cacheAlias")); @@ -203,9 +201,9 @@ public void testCreate() throws Exception { public void testConnectionName() throws Exception { String entityIdentifier = "my-application"; ClusteringServiceConfiguration configuration = - new ClusteringServiceConfiguration( - URI.create(CLUSTER_URI_BASE + entityIdentifier), - true, new ServerSideConfiguration(Collections.emptyMap())); + ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + entityIdentifier)) + .autoCreate(s -> s) + .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); service.start(null); @@ -223,7 +221,7 @@ public void testStartStopAutoCreate() throws Exception { URI clusterUri = URI.create(CLUSTER_URI_BASE + "my-application"); ClusteringServiceConfiguration configuration = ClusteringServiceConfigurationBuilder.cluster(clusterUri) - .autoCreate() + .autoCreate(c -> c) .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); assertThat(service.isConnected(), is(false)); @@ -249,7 +247,7 @@ public void testStartStopAutoCreate() throws Exception { public void testStartStopNoAutoCreate() throws Exception { URI clusterUri = URI.create(CLUSTER_URI_BASE + "my-application"); ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(clusterUri) + cluster(clusterUri) .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); try { @@ -274,8 +272,8 @@ public void testStartStopNoAutoCreate() throws Exception { @Test public void testStartStopAutoCreateTwiceA() throws Exception { ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(c -> c) .build(); DefaultClusteringService firstService = new DefaultClusteringService(configuration); firstService.start(null); @@ -305,8 +303,8 @@ public void testStartStopAutoCreateTwiceA() throws Exception { @Test public void testStartStopAutoCreateTwiceB() throws Exception { ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(c -> c) .build(); DefaultClusteringService firstService = new DefaultClusteringService(configuration); firstService.start(null); @@ -334,8 +332,8 @@ public void testStartStopAutoCreateTwiceB() throws Exception { public void testStartForMaintenanceAutoStart() throws Exception { URI clusterUri = URI.create(CLUSTER_URI_BASE + "my-application"); ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(clusterUri) - .autoCreate() + cluster(clusterUri) + .autoCreate(c -> c) .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); assertThat(service.isConnected(), is(false)); @@ -356,8 +354,8 @@ public void testStartForMaintenanceAutoStart() throws Exception { @Test public void testStartForMaintenanceOtherAutoCreate() throws Exception { ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(c -> c) .build(); DefaultClusteringService createService = new DefaultClusteringService(configuration); createService.start(null); @@ -388,8 +386,8 @@ public void testStartForMaintenanceOtherAutoCreate() throws Exception { @Test public void testStartForMaintenanceOtherCreated() throws Exception { ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(c -> c) .build(); DefaultClusteringService createService = new DefaultClusteringService(configuration); createService.start(null); @@ -421,8 +419,8 @@ public void testStartForMaintenanceOtherCreated() throws Exception { @Test public void testMultipleAutoCreateClientsRunConcurrently() throws InterruptedException, ExecutionException { final ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(c -> c) .build(); Callable task = () -> { @@ -448,8 +446,8 @@ public void testMultipleAutoCreateClientsRunConcurrently() throws InterruptedExc @Test public void testStartForMaintenanceInterlock() throws Exception { ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(c -> c) .build(); DefaultClusteringService maintenanceService1 = new DefaultClusteringService(configuration); maintenanceService1.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); @@ -471,8 +469,8 @@ public void testStartForMaintenanceInterlock() throws Exception { @Test public void testStartForMaintenanceSequence() throws Exception { ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(c -> c) .build(); DefaultClusteringService maintenanceService1 = new DefaultClusteringService(configuration); maintenanceService1.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); @@ -492,13 +490,13 @@ public void testStartForMaintenanceSequence() throws Exception { @Test public void testBasicConfiguration() throws Exception { ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService createService = new DefaultClusteringService(configuration); createService.start(null); @@ -528,13 +526,13 @@ public void testGetServerStoreProxySharedAutoCreate() throws Exception { String cacheAlias = "cacheAlias"; String targetPool = "sharedPrimary"; ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool(targetPool, 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); service.start(null); @@ -577,25 +575,25 @@ public void testGetServerStoreProxySharedNoAutoCreateNonExistent() throws Except String cacheAlias = "cacheAlias"; String targetPool = "sharedPrimary"; ClusteringServiceConfiguration creationConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool(targetPool, 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(creationConfig); creationService.start(null); creationService.stop(); ClusteringServiceConfiguration accessConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .expecting() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .expecting(server -> server .defaultServerResource("defaultResource") .resourcePool(targetPool, 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService accessService = new DefaultClusteringService(accessConfig); accessService.start(null); @@ -635,13 +633,13 @@ public void testGetServerStoreProxySharedNoAutoCreateExists() throws Exception { String cacheAlias = "cacheAlias"; String targetPool = "sharedPrimary"; ClusteringServiceConfiguration creationConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool(targetPool, 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(creationConfig); creationService.start(null); @@ -672,13 +670,13 @@ public void testGetServerStoreProxySharedNoAutoCreateExists() throws Exception { ClusteringServiceConfiguration accessConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .expecting() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .expecting(server -> server .defaultServerResource("defaultResource") .resourcePool(targetPool, 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService accessService = new DefaultClusteringService(accessConfig); accessService.start(null); @@ -716,13 +714,13 @@ public void testGetServerStoreProxySharedAutoCreateTwice() throws Exception { String cacheAlias = "cacheAlias"; String targetPool = "sharedPrimary"; ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool(targetPool, 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService firstService = new DefaultClusteringService(configuration); firstService.start(null); @@ -778,13 +776,13 @@ public void testReleaseServerStoreProxyShared() throws Exception { String cacheAlias = "cacheAlias"; String targetPool = "sharedPrimary"; ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool(targetPool, 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(configuration); creationService.start(null); @@ -807,7 +805,7 @@ public void testReleaseServerStoreProxyShared() throws Exception { ObservableClusterTierActiveEntity clusterTierActiveEntity = clusterTierActiveEntities.get(0); assertThat(clusterTierActiveEntity.getConnectedClients().size(), is(1)); - creationService.releaseServerStoreProxy(serverStoreProxy); + creationService.releaseServerStoreProxy(serverStoreProxy, false); assertThat(activeEntity.getConnectedClients().size(), is(1)); assertThat(activeEntity.getStores(), containsInAnyOrder(cacheAlias)); @@ -815,7 +813,7 @@ public void testReleaseServerStoreProxyShared() throws Exception { assertThat(clusterTierActiveEntity.getConnectedClients(), empty()); try { - creationService.releaseServerStoreProxy(serverStoreProxy); + creationService.releaseServerStoreProxy(serverStoreProxy, false); fail("Expecting IllegalStateException"); } catch (IllegalStateException e) { assertThat(e.getMessage(), containsString("Endpoint closed")); @@ -829,13 +827,13 @@ public void testGetServerStoreProxyDedicatedAutoCreate() throws Exception { String cacheAlias = "cacheAlias"; String targetResource = "serverResource2"; ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); service.start(null); @@ -880,25 +878,25 @@ public void testGetServerStoreProxyDedicatedNoAutoCreateNonExistent() throws Exc String cacheAlias = "cacheAlias"; String targetResource = "serverResource2"; ClusteringServiceConfiguration creationConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(creationConfig); creationService.start(null); creationService.stop(); ClusteringServiceConfiguration accessConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .expecting() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .expecting(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService accessService = new DefaultClusteringService(accessConfig); accessService.start(null); @@ -940,13 +938,13 @@ public void testGetServerStoreProxyDedicatedNoAutoCreateExists() throws Exceptio String cacheAlias = "cacheAlias"; String targetResource = "serverResource2"; ClusteringServiceConfiguration creationConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(creationConfig); creationService.start(null); @@ -977,13 +975,13 @@ public void testGetServerStoreProxyDedicatedNoAutoCreateExists() throws Exceptio assertThat(clusterTierActiveEntity.getConnectedClients(), empty()); ClusteringServiceConfiguration accessConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .expecting() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .expecting(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService accessService = new DefaultClusteringService(accessConfig); accessService.start(null); @@ -1023,13 +1021,13 @@ public void testGetServerStoreProxyDedicatedAutoCreateTwice() throws Exception { String cacheAlias = "cacheAlias"; String targetResource = "serverResource2"; ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService firstService = new DefaultClusteringService(configuration); firstService.start(null); @@ -1090,13 +1088,13 @@ public void testReleaseServerStoreProxyDedicated() throws Exception { String cacheAlias = "cacheAlias"; String targetResource = "serverResource2"; ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(configuration); creationService.start(null); @@ -1120,7 +1118,7 @@ public void testReleaseServerStoreProxyDedicated() throws Exception { ObservableClusterTierActiveEntity clusterTierActiveEntity = clusterTierActiveEntities.get(0); assertThat(clusterTierActiveEntity.getConnectedClients(), not(empty())); - creationService.releaseServerStoreProxy(serverStoreProxy); + creationService.releaseServerStoreProxy(serverStoreProxy, false); assertThat(activeEntity.getDedicatedResourcePoolIds(), containsInAnyOrder(cacheAlias)); assertThat(activeEntity.getConnectedClients().size(), is(1)); @@ -1128,7 +1126,7 @@ public void testReleaseServerStoreProxyDedicated() throws Exception { assertThat(clusterTierActiveEntity.getConnectedClients(), empty()); try { - creationService.releaseServerStoreProxy(serverStoreProxy); + creationService.releaseServerStoreProxy(serverStoreProxy, false); fail("Expecting IllegalStateException"); } catch (IllegalStateException e) { assertThat(e.getMessage(), containsString("Endpoint closed")); @@ -1142,13 +1140,13 @@ public void testGetServerStoreProxySharedDestroy() throws Exception { String cacheAlias = "cacheAlias"; String targetPool = "sharedPrimary"; ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool(targetPool, 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(configuration); creationService.start(null); @@ -1178,7 +1176,7 @@ public void testGetServerStoreProxySharedDestroy() throws Exception { assertThat(getRootCause(e).getMessage(), containsString(" in use by ")); } - creationService.releaseServerStoreProxy(serverStoreProxy); + creationService.releaseServerStoreProxy(serverStoreProxy, false); assertThat(activeEntity.getStores(), containsInAnyOrder(cacheAlias)); assertThat(clusterTierActiveEntity.getConnectedClients(), empty()); @@ -1195,13 +1193,13 @@ public void testGetServerStoreProxyDedicatedDestroy() throws Exception { String cacheAlias = "cacheAlias"; String targetResource = "serverResource2"; ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(configuration); creationService.start(null); @@ -1231,7 +1229,7 @@ public void testGetServerStoreProxyDedicatedDestroy() throws Exception { assertThat(getRootCause(e).getMessage(), containsString(" in use by ")); } - creationService.releaseServerStoreProxy(serverStoreProxy); + creationService.releaseServerStoreProxy(serverStoreProxy, false); assertThat(activeEntity.getDedicatedResourcePoolIds(), containsInAnyOrder(cacheAlias)); assertThat(activeEntity.getStores(), containsInAnyOrder(cacheAlias)); assertThat(clusterTierActiveEntity.getConnectedClients(), empty()); @@ -1250,28 +1248,26 @@ public void testDestroyCantBeCalledIfStopped() throws Exception { String cacheAlias = "cacheAlias"; String targetResource = "serverResource2"; ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() - .defaultServerResource("defaultResource") + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server + .defaultServerResource("defaultResource")) .build(); DefaultClusteringService creationService = new DefaultClusteringService(configuration); - expectedException.expect(IllegalStateException.class); - expectedException.expectMessage(endsWith(" should be started to call destroy")); - - creationService.destroy(cacheAlias); + IllegalStateException thrown = assertThrows(IllegalStateException.class, () -> creationService.destroy(cacheAlias)); + assertThat(thrown, hasProperty("message", endsWith(" should be started to call destroy"))); } @Test public void testDestroyAllNoStores() throws Exception { ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService createService = new DefaultClusteringService(configuration); createService.start(null); createService.stop(); @@ -1309,13 +1305,13 @@ public void testDestroyAllNoStores() throws Exception { @Test public void testDestroyAllWithStores() throws Exception { ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService createService = new DefaultClusteringService(configuration); createService.start(null); @@ -1371,13 +1367,13 @@ public void testDestroyAllWithStores() throws Exception { @Test public void testStartNoAutoCreateThenAutoCreate() throws Exception { ClusteringServiceConfiguration creationConfigBad = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .expecting() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .expecting(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationServiceBad = new DefaultClusteringService(creationConfigBad); try { @@ -1391,13 +1387,13 @@ public void testStartNoAutoCreateThenAutoCreate() throws Exception { assertThat(activeEntitiesBad.size(), is(0)); ClusteringServiceConfiguration creationConfigGood = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationServiceGood = new DefaultClusteringService(creationConfigGood); creationServiceGood.start(null); @@ -1407,13 +1403,13 @@ public void testStartNoAutoCreateThenAutoCreate() throws Exception { public void testStoreValidation_autoCreateConfigGood_autoCreateConfigBad() throws Exception { String cacheAlias = "cacheAlias"; ClusteringServiceConfiguration config = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(config); creationService.start(null); @@ -1469,13 +1465,13 @@ public void testStoreValidation_autoCreateConfigGood_autoCreateConfigBad() throw public void testStoreValidation_autoCreateConfigGood_autoCreateConfigGood() throws Exception { String cacheAlias = "cacheAlias"; ClusteringServiceConfiguration config = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(config); creationService.start(null); @@ -1521,13 +1517,13 @@ public void testStoreValidation_autoCreateConfigGood_autoCreateConfigGood() thro public void testStoreValidation_autoCreateConfigBad() throws Exception { String cacheAlias = "cacheAlias"; ClusteringServiceConfiguration config = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(config); creationService.start(null); @@ -1568,13 +1564,13 @@ public void testStoreValidation_autoCreateConfigBad() throws Exception { public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigBad() throws Exception { String cacheAlias = "cacheAlias"; ClusteringServiceConfiguration autoConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(autoConfig); creationService.start(null); @@ -1587,13 +1583,13 @@ public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigBad() thr creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL, mock(ServerCallback.class)); ClusteringServiceConfiguration noAutoConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .expecting() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .expecting(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService accessService = new DefaultClusteringService(noAutoConfig); accessService.start(null); @@ -1637,13 +1633,13 @@ public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigBad() thr public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigGood() throws Exception { String cacheAlias = "cacheAlias"; ClusteringServiceConfiguration autoConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(autoConfig); creationService.start(null); @@ -1657,13 +1653,12 @@ public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigGood() th ClusteringServiceConfiguration noAutoConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .expecting() - .defaultServerResource("defaultResource") + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .expecting(server -> server.defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService accessService = new DefaultClusteringService(noAutoConfig); accessService.start(null); @@ -1699,13 +1694,13 @@ public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigGood() th public void testStoreValidation_MismatchedPoolTypes_ConfiguredDedicatedValidateShared() throws Exception { String cacheAlias = "cacheAlias"; ClusteringServiceConfiguration creationConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(creationConfig); creationService.start(null); @@ -1720,13 +1715,13 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredDedicatedValidateS creationService.stop(); ClusteringServiceConfiguration accessConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService accessService = new DefaultClusteringService(accessConfig); accessService.start(null); @@ -1767,13 +1762,13 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredDedicatedValidateS public void testStoreValidation_MismatchedPoolTypes_ConfiguredSharedValidateDedicated() throws Exception { String cacheAlias = "cacheAlias"; ClusteringServiceConfiguration creationConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService creationService = new DefaultClusteringService(creationConfig); creationService.start(null); @@ -1788,13 +1783,13 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredSharedValidateDedi creationService.stop(); ClusteringServiceConfiguration accessConfig = - ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) - .autoCreate() + cluster(URI.create(CLUSTER_URI_BASE + "my-application")) + .autoCreate(server -> server .defaultServerResource("defaultResource") .resourcePool("sharedPrimary", 2, MemoryUnit.MB, "serverResource1") .resourcePool("sharedSecondary", 2, MemoryUnit.MB, "serverResource2") - .resourcePool("sharedTertiary", 4, MemoryUnit.MB) - .build(); + .resourcePool("sharedTertiary", 4, MemoryUnit.MB)) + .build(); DefaultClusteringService accessService = new DefaultClusteringService(accessConfig); accessService.start(null); @@ -1872,9 +1867,9 @@ private ClusteredCacheIdentifier getClusteredCacheIdentifier( public void testGetServerStoreProxyReturnsEventualStore() throws Exception { String entityIdentifier = "my-application"; ClusteringServiceConfiguration configuration = - new ClusteringServiceConfiguration( - URI.create(CLUSTER_URI_BASE + entityIdentifier), - true, new ServerSideConfiguration(Collections.emptyMap())); + ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + entityIdentifier)) + .autoCreate(s -> s) + .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); service.start(null); @@ -1896,9 +1891,9 @@ public void testGetServerStoreProxyReturnsEventualStore() throws Exception { public void testGetServerStoreProxyReturnsStrongStore() throws Exception { String entityIdentifier = "my-application"; ClusteringServiceConfiguration configuration = - new ClusteringServiceConfiguration( - URI.create(CLUSTER_URI_BASE + entityIdentifier), - true, new ServerSideConfiguration(Collections.emptyMap())); + ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + entityIdentifier)) + .autoCreate(s -> s) + .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); service.start(null); @@ -1921,9 +1916,9 @@ public void testGetServerStoreProxyFailureClearsEntityListeners() throws Excepti // Initial setup begin String entityIdentifier = "my-application"; ClusteringServiceConfiguration configuration = - new ClusteringServiceConfiguration( - URI.create(CLUSTER_URI_BASE + entityIdentifier), - true, new ServerSideConfiguration(Collections.emptyMap())); + ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + entityIdentifier)) + .autoCreate(s -> s) + .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); service.start(null); @@ -1942,7 +1937,7 @@ public void testGetServerStoreProxyFailureClearsEntityListeners() throws Excepti // Initial setup end service.start(null); - when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 2L, MemoryUnit.MB)); + when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource2", 1L, MemoryUnit.MB)); try { service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG, mock(ServerCallback.class)); fail("Server store proxy creation should have failed"); @@ -1957,9 +1952,9 @@ public void testGetServerStoreProxyFailureDoesNotClearOtherStoreEntityListeners( // Initial setup begin String entityIdentifier = "my-application"; ClusteringServiceConfiguration configuration = - new ClusteringServiceConfiguration( - URI.create(CLUSTER_URI_BASE + entityIdentifier), - true, new ServerSideConfiguration(Collections.emptyMap())); + ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + entityIdentifier)) + .autoCreate(s -> s) + .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); service.start(null); @@ -1981,7 +1976,7 @@ public void testGetServerStoreProxyFailureDoesNotClearOtherStoreEntityListeners( ClusteringService.ClusteredCacheIdentifier otherCacheIdentifier = (ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("my-other-cache", null); service.getServerStoreProxy(otherCacheIdentifier, storeConfig, Consistency.STRONG, mock(ServerCallback.class)); // Creates one more store - when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 2L, MemoryUnit.MB)); + when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource2", 1L, MemoryUnit.MB)); try { service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG, mock(ServerCallback.class)); fail("Server store proxy creation should have failed"); @@ -1994,9 +1989,11 @@ public void testGetServerStoreProxyFailureDoesNotClearOtherStoreEntityListeners( @Test public void testGetStateRepositoryWithinTwiceWithSameName() throws Exception { ClusteringServiceConfiguration configuration = - new ClusteringServiceConfiguration(URI.create(CLUSTER_URI_BASE), true, new ServerSideConfiguration(Collections.emptyMap())); + ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE)) + .autoCreate(s -> s) + .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); - PersistableResourceService.PersistenceSpaceIdentifier cacheIdentifier = service.getPersistenceSpaceIdentifier("myCache", null); + PersistableResourceService.PersistenceSpaceIdentifier cacheIdentifier = service.getPersistenceSpaceIdentifier("myCache", null); StateRepository repository1 = service.getStateRepositoryWithin(cacheIdentifier, "myRepo"); StateRepository repository2 = service.getStateRepositoryWithin(cacheIdentifier, "myRepo"); assertThat(repository1, sameInstance(repository2)); @@ -2005,10 +2002,12 @@ public void testGetStateRepositoryWithinTwiceWithSameName() throws Exception { @Test public void testGetStateRepositoryWithinTwiceWithSameNameDifferentPersistenceSpaceIdentifier() throws Exception { ClusteringServiceConfiguration configuration = - new ClusteringServiceConfiguration(URI.create(CLUSTER_URI_BASE), true, new ServerSideConfiguration(Collections.emptyMap())); + ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE)) + .autoCreate(s -> s) + .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); - PersistableResourceService.PersistenceSpaceIdentifier cacheIdentifier1 = service.getPersistenceSpaceIdentifier("myCache1", null); - PersistableResourceService.PersistenceSpaceIdentifier cacheIdentifier2 = service.getPersistenceSpaceIdentifier("myCache2", null); + PersistableResourceService.PersistenceSpaceIdentifier cacheIdentifier1 = service.getPersistenceSpaceIdentifier("myCache1", null); + PersistableResourceService.PersistenceSpaceIdentifier cacheIdentifier2 = service.getPersistenceSpaceIdentifier("myCache2", null); StateRepository repository1 = service.getStateRepositoryWithin(cacheIdentifier1, "myRepo"); StateRepository repository2 = service.getStateRepositoryWithin(cacheIdentifier2, "myRepo"); assertThat(repository1, not(sameInstance(repository2))); @@ -2016,42 +2015,66 @@ public void testGetStateRepositoryWithinTwiceWithSameNameDifferentPersistenceSpa @Test public void testGetStateRepositoryWithinWithNonExistentPersistenceSpaceIdentifier() throws Exception { - expectedException.expect(CachePersistenceException.class); - expectedException.expectMessage("Clustered space not found for identifier"); ClusteringServiceConfiguration configuration = - new ClusteringServiceConfiguration(URI.create(CLUSTER_URI_BASE), true, new ServerSideConfiguration(Collections.emptyMap())); + ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE)) + .autoCreate(s -> s) + .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); ClusteredCacheIdentifier cacheIdentifier = mock(ClusteredCacheIdentifier.class); doReturn("foo").when(cacheIdentifier).getId(); - service.getStateRepositoryWithin(cacheIdentifier, "myRepo"); + + CachePersistenceException thrown = assertThrows(CachePersistenceException.class, () -> service.getStateRepositoryWithin(cacheIdentifier, "myRepo")); + assertThat(thrown, hasProperty("message", startsWith("Clustered space not found for identifier"))); } @Test public void testReleaseNonExistentPersistenceSpaceIdentifierTwice() throws Exception { - expectedException.expect(CachePersistenceException.class); - expectedException.expectMessage("Unknown identifier"); ClusteringServiceConfiguration configuration = - new ClusteringServiceConfiguration(URI.create(CLUSTER_URI_BASE), true, new ServerSideConfiguration(Collections.emptyMap())); + ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE)) + .autoCreate(s -> s) + .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); ClusteredCacheIdentifier cacheIdentifier = mock(ClusteredCacheIdentifier.class); doReturn("foo").when(cacheIdentifier).getId(); - service.releasePersistenceSpaceIdentifier(cacheIdentifier); + + CachePersistenceException thrown = assertThrows(CachePersistenceException.class, () -> service.releasePersistenceSpaceIdentifier(cacheIdentifier)); + assertThat(thrown, hasProperty("message", startsWith("Unknown identifier"))); } @Test public void testReleasePersistenceSpaceIdentifierTwice() throws Exception { - expectedException.expect(CachePersistenceException.class); - expectedException.expectMessage("Unknown identifier"); ClusteringServiceConfiguration configuration = - new ClusteringServiceConfiguration(URI.create(CLUSTER_URI_BASE), true, new ServerSideConfiguration(Collections.emptyMap())); + ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE)) + .autoCreate(s -> s) + .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); - PersistableResourceService.PersistenceSpaceIdentifier cacheIdentifier = service.getPersistenceSpaceIdentifier("myCache", null); + PersistableResourceService.PersistenceSpaceIdentifier cacheIdentifier = service.getPersistenceSpaceIdentifier("myCache", null); try { service.releasePersistenceSpaceIdentifier(cacheIdentifier); } catch (CachePersistenceException e) { fail("First invocation of releasePersistenceSpaceIdentifier should not have failed"); } - service.releasePersistenceSpaceIdentifier(cacheIdentifier); + CachePersistenceException thrown = assertThrows(CachePersistenceException.class, () -> service.releasePersistenceSpaceIdentifier(cacheIdentifier)); + assertThat(thrown, hasProperty("message", startsWith("Unknown identifier"))); + } + + @Test + public void releaseMaintenanceHoldsWhenConnectionClosedDuringDestruction() { + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE)) + .autoCreate(s -> s) + .build(); + DefaultClusteringService service = new DefaultClusteringService(configuration); + assertThat(service.isConnected(), is(false)); + service.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); + assertThat(service.isConnected(), is(true)); + + ConnectionState connectionState = service.getConnectionState(); + ClusterTierManagerClientEntityFactory clusterTierManagerClientEntityFactory = connectionState.getEntityFactory(); + assertEquals(clusterTierManagerClientEntityFactory.getMaintenanceHolds().size(), 1); + connectionState.destroyState(false); + assertEquals(clusterTierManagerClientEntityFactory.getMaintenanceHolds().size(), 0); + service.stop(); } private static Throwable getRootCause(Throwable t) { diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ReconnectTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ReconnectTest.java new file mode 100644 index 0000000000..dbcf495614 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/ReconnectTest.java @@ -0,0 +1,97 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.service; + +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.Timeouts; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.ClusterTierManagerValidationException; +import org.ehcache.clustered.client.internal.MockConnectionService; +import org.hamcrest.Matchers; +import org.junit.Test; +import org.mockito.Mockito; +import org.terracotta.connection.Connection; +import org.terracotta.exception.ConnectionShutdownException; + +import java.net.URI; +import java.util.Properties; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; + +import static org.hamcrest.MatcherAssert.assertThat; + +public class ReconnectTest { + + private static URI CLUSTER_URI = URI.create("mock://localhost:9510"); + + private final ClusteringServiceConfiguration serviceConfiguration = ClusteringServiceConfigurationBuilder + .cluster(CLUSTER_URI) + .autoCreate(c -> c) + .build(); + + @Test(expected = RuntimeException.class) + public void testInitialConnectDoesNotRetryAfterConnectionException() { + MockConnectionService.mockConnection = null; + ConnectionState connectionState = new ConnectionState(Timeouts.DEFAULT, new Properties(), serviceConfiguration); + + connectionState.initClusterConnection(Runnable::run); + } + + @Test + public void testAfterConnectionReconnectHappensEvenAfterConnectionException() throws Exception { + Connection connection = Mockito.mock(Connection.class, Mockito.withSettings() + .defaultAnswer(invocation -> { + throw new ConnectionShutdownException("Connection Closed"); + })); + + MockConnectionService.mockConnection = connection; + + ConnectionState connectionState = new ConnectionState(Timeouts.DEFAULT, new Properties(), serviceConfiguration); + + connectionState.initClusterConnection(Runnable::run); + + CompletableFuture future = CompletableFuture.runAsync(() -> { + try { + connectionState.initializeState(); + } catch (ClusterTierManagerValidationException e) { + throw new AssertionError(e); + } + }); + + MockConnectionService.mockConnection = null; + + CompletableFuture reconnecting = CompletableFuture.runAsync(() -> { + MockConnectionService.mockConnection = Mockito.mock(Connection.class, Mockito.withSettings().defaultAnswer(invocation -> { + throw new RuntimeException("Stop reconnecting"); + })); + while (connectionState.getReconnectCount() == 1) { + break; + } + }); + + reconnecting.get(); + + try { + future.get(); + } catch (ExecutionException e) { + assertThat(e.getCause().getMessage(), Matchers.is("Stop reconnecting")); + } + + assertThat(connectionState.getReconnectCount(), Matchers.is(1)); + + } + +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/StateRepositoryWhitelistingTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/StateRepositoryWhitelistingTest.java similarity index 95% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/StateRepositoryWhitelistingTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/StateRepositoryWhitelistingTest.java index 6975d20aa5..ffe57b376c 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/StateRepositoryWhitelistingTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/StateRepositoryWhitelistingTest.java @@ -29,8 +29,8 @@ import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; import org.ehcache.clustered.server.ClusterTierManagerServerEntityService; import org.ehcache.clustered.server.store.ClusterTierServerEntityService; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.impl.config.BaseCacheConfiguration; +import org.ehcache.core.store.StoreConfigurationImpl; import org.ehcache.spi.persistence.StateHolder; import org.junit.After; import org.junit.Before; @@ -48,11 +48,11 @@ import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; import static org.ehcache.config.Eviction.noAdvice; +import static org.ehcache.config.builders.ExpiryPolicyBuilder.noExpiration; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.expiry.Expirations.noExpiration; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; @@ -84,7 +84,7 @@ public void setUp() throws Exception { ClusteringServiceConfiguration configuration = ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) - .autoCreate() + .autoCreate(c -> c) .build(); service = new ClusteringServiceFactory().create(configuration); @@ -169,6 +169,9 @@ public void testWhitelistingForPrimitiveClass() throws Exception { } private static class Parent implements Serializable { + + private static final long serialVersionUID = 1L; + final int val; private Parent(int val) { @@ -192,6 +195,9 @@ public int hashCode() { } private static class Child extends Parent implements Serializable { + + private static final long serialVersionUID = 1L; + final long longValue; private Child(int val, long longValue) { diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/TestServiceProvider.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/TestServiceProvider.java similarity index 100% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/TestServiceProvider.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/service/TestServiceProvider.java diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/AbstractServerStoreProxyTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/AbstractServerStoreProxyTest.java new file mode 100644 index 0000000000..1b4bb4438c --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/AbstractServerStoreProxyTest.java @@ -0,0 +1,107 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.client.config.ClusteredResourcePool; +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; +import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityFactory; +import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityService; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.client.internal.UnitTestConnectionService.PassthroughServerBuilder; +import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; +import org.ehcache.clustered.server.ClusterTierManagerServerEntityService; +import org.ehcache.clustered.server.store.ObservableClusterTierServerEntityService; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.impl.serialization.LongSerializer; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.terracotta.connection.Connection; + +import java.net.URI; +import java.time.Duration; +import java.util.Collections; +import java.util.Properties; + +public abstract class AbstractServerStoreProxyTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://localhost"); + private static final UnitTestConnectionService CONNECTION_SERVICE = new UnitTestConnectionService(); + + protected static ObservableClusterTierServerEntityService observableClusterTierService; + + @BeforeClass + public static void createCluster() { + UnitTestConnectionService.add(CLUSTER_URI, new PassthroughServerBuilder() + .serverEntityService(new ClusterTierManagerServerEntityService()) + .clientEntityService(new ClusterTierManagerClientEntityService()) + .serverEntityService(observableClusterTierService = new ObservableClusterTierServerEntityService()) + .clientEntityService(new ClusterTierClientEntityService()) + .serverEntityService(new VoltronReadWriteLockServerEntityService()) + .clientEntityService(new VoltronReadWriteLockEntityClientService()) + .resource("defaultResource", 128, MemoryUnit.MB).build()); + } + + @AfterClass + public static void destroyCluster() { + UnitTestConnectionService.remove(CLUSTER_URI); + observableClusterTierService = null; + } + + protected static SimpleClusterTierClientEntity createClientEntity(String name, + ServerStoreConfiguration configuration, + boolean create) throws Exception { + return createClientEntity(name, configuration, create, true); + } + + protected static SimpleClusterTierClientEntity createClientEntity(String name, + ServerStoreConfiguration configuration, + boolean create, + boolean validate) throws Exception { + Connection connection = CONNECTION_SERVICE.connect(CLUSTER_URI, new Properties()); + + // Create ClusterTierManagerClientEntity if needed + ClusterTierManagerClientEntityFactory entityFactory = new ClusterTierManagerClientEntityFactory( + connection, Runnable::run, + TimeoutsBuilder.timeouts().write(Duration.ofSeconds(30)).build()); + if (create) { + entityFactory.create(name, new ServerSideConfiguration("defaultResource", Collections.emptyMap())); + } + // Create or fetch the ClusterTierClientEntity + SimpleClusterTierClientEntity clientEntity = (SimpleClusterTierClientEntity) entityFactory.fetchOrCreateClusteredStoreEntity(name, name, configuration, create ? ClusteringServiceConfiguration.ClientMode.AUTO_CREATE : ClusteringServiceConfiguration.ClientMode.CONNECT, false); + if (validate) { + clientEntity.validate(configuration); + } + return clientEntity; + } + + protected static SimpleClusterTierClientEntity createClientEntity(String name, Consistency consistency, boolean create) throws Exception { + ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(8L, MemoryUnit.MB); + + ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class + .getName(), + Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class + .getName(), consistency, false); + + return createClientEntity(name, serverStoreConfiguration, create); + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ChainBuilderTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ChainBuilderTest.java new file mode 100644 index 0000000000..56abb981a2 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ChainBuilderTest.java @@ -0,0 +1,41 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.util.ChainBuilder; +import org.junit.Test; + +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.ehcache.clustered.Matchers.hasPayloads; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +/** + */ +public class ChainBuilderTest { + + @Test + public void testChainBuilder() { + Chain chain = new ChainBuilder() + .add(createPayload(1L)) + .add(createPayload(3L)) + .add(createPayload(4L)) + .add(createPayload(2L)).build(); + + assertThat(chain, hasPayloads(1L, 3L, 4L, 2L)); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreEventsTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreEventsTest.java new file mode 100644 index 0000000000..9751829dc9 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreEventsTest.java @@ -0,0 +1,378 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.ChainUtils; +import org.ehcache.clustered.client.TestTimeSource; +import org.ehcache.clustered.client.config.ClusteredResourcePool; +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityFactory; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; +import org.ehcache.clustered.client.internal.store.operations.ChainResolver; +import org.ehcache.clustered.client.internal.store.operations.ExpiryChainResolver; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.operations.ConditionalRemoveOperation; +import org.ehcache.clustered.common.internal.store.operations.ConditionalReplaceOperation; +import org.ehcache.clustered.common.internal.store.operations.Operation; +import org.ehcache.clustered.common.internal.store.operations.PutIfAbsentOperation; +import org.ehcache.clustered.common.internal.store.operations.PutOperation; +import org.ehcache.clustered.common.internal.store.operations.RemoveOperation; +import org.ehcache.clustered.common.internal.store.operations.ReplaceOperation; +import org.ehcache.clustered.common.internal.store.operations.TimestampOperation; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.events.StoreEventDispatcher; +import org.ehcache.core.events.StoreEventSink; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.serialization.LongSerializer; +import org.ehcache.impl.serialization.StringSerializer; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.serialization.Serializer; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.mockito.InOrder; +import org.terracotta.connection.Connection; + +import java.net.URI; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Collections; +import java.util.Properties; +import java.util.function.Supplier; + +import static org.hamcrest.core.IsEqual.equalTo; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; +import static org.mockito.hamcrest.MockitoHamcrest.argThat; + +public class ClusteredStoreEventsTest { + + private static final String CACHE_IDENTIFIER = "testCache"; + private static final URI CLUSTER_URI = URI.create("terracotta://localhost"); + + private final Store.Configuration config = new Store.Configuration() { + + @Override + public Class getKeyType() { + return Long.class; + } + + @Override + public Class getValueType() { + return String.class; + } + + @Override + public EvictionAdvisor getEvictionAdvisor() { + return null; + } + + @Override + public ClassLoader getClassLoader() { + return null; + } + + @Override + public ExpiryPolicy getExpiry() { + return null; + } + + @Override + public ResourcePools getResourcePools() { + return null; + } + + @Override + public Serializer getKeySerializer() { + return null; + } + + @Override + public Serializer getValueSerializer() { + return null; + } + + @Override + public int getDispatcherConcurrency() { + return 0; + } + + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } + }; + private StoreEventSink storeEventSink; + private ServerCallback serverCallback; + private OperationsCodec codec; + private TestTimeSource testTimeSource; + + @SuppressWarnings("unchecked") + @Before + public void setup() throws Exception { + UnitTestConnectionService.add( + CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder().resource("defaultResource", 8, MemoryUnit.MB).build() + ); + + Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); + ClusterTierManagerClientEntityFactory entityFactory = new ClusterTierManagerClientEntityFactory(connection, Runnable::run); + + ServerSideConfiguration serverConfig = + new ServerSideConfiguration("defaultResource", Collections.emptyMap()); + entityFactory.create("TestCacheManager", serverConfig); + + ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(4, MemoryUnit.MB); + ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), + Long.class.getName(), String.class.getName(), LongSerializer.class.getName(), StringSerializer.class.getName(), null, false); + ClusterTierClientEntity clientEntity = entityFactory.fetchOrCreateClusteredStoreEntity("TestCacheManager", CACHE_IDENTIFIER, serverStoreConfiguration, ClusteringServiceConfiguration.ClientMode.AUTO_CREATE, false); + clientEntity.validate(serverStoreConfiguration); + ServerStoreProxy serverStoreProxy = new CommonServerStoreProxy(CACHE_IDENTIFIER, clientEntity, mock(ServerCallback.class)); + + testTimeSource = new TestTimeSource(); + + codec = new OperationsCodec<>(new LongSerializer(), new StringSerializer()); + ChainResolver resolver = new ExpiryChainResolver<>(codec, ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1000))); + + StoreEventDispatcher storeEventDispatcher = mock(StoreEventDispatcher.class); + storeEventSink = mock(StoreEventSink.class); + when(storeEventDispatcher.eventSink()).thenReturn(storeEventSink); + + ClusteredStore store = new ClusteredStore<>(config, codec, resolver, serverStoreProxy, testTimeSource, storeEventDispatcher, new DefaultStatisticsService()); + serverCallback = new ClusteredStore.Provider().getServerCallback(store); + } + + @After + public void tearDown() throws Exception { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + private ByteBuffer op(Operation operation) { + return codec.encode(operation); + } + + + @Test + public void testOnAppend_PutAfterNothingFiresCreatedEvent() { + Chain beforeAppend = ChainUtils.chainOf(); + serverCallback.onAppend(beforeAppend, op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + + verify(storeEventSink).created(eq(1L), eq("one")); + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_PutAfterPutFiresUpdatedEvent() { + Chain beforeAppend = ChainUtils.chainOf(op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + serverCallback.onAppend(beforeAppend, op(new PutOperation<>(1L, "one-bis", testTimeSource.getTimeMillis()))); + + verify(storeEventSink).updated(eq(1L), argThat(supplies("one")), eq("one-bis")); + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_RemoveAfterPutFiresRemovedEvent() { + Chain beforeAppend = ChainUtils.chainOf(op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + serverCallback.onAppend(beforeAppend, op(new RemoveOperation<>(1L, testTimeSource.getTimeMillis()))); + + verify(storeEventSink).removed(eq(1L), argThat(supplies("one"))); + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_RemoveAfterNothingFiresNoEvent() { + Chain beforeAppend = ChainUtils.chainOf(); + serverCallback.onAppend(beforeAppend, op(new RemoveOperation<>(1L, testTimeSource.getTimeMillis()))); + + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_ReplaceAfterPutFiresUpdatedEvent() { + Chain beforeAppend = ChainUtils.chainOf(op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + serverCallback.onAppend(beforeAppend, op(new ReplaceOperation<>(1L, "one-bis", testTimeSource.getTimeMillis()))); + + verify(storeEventSink).updated(eq(1L), argThat(supplies("one")), eq("one-bis")); + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_ReplaceAfterNothingFiresNoEvent() { + Chain beforeAppend = ChainUtils.chainOf(); + serverCallback.onAppend(beforeAppend, op(new ReplaceOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_PutIfAbsentAfterNothingFiresCreatedEvent() { + Chain beforeAppend = ChainUtils.chainOf(); + serverCallback.onAppend(beforeAppend, op(new PutIfAbsentOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + + verify(storeEventSink).created(eq(1L), eq("one")); + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_PutIfAbsentAfterPutFiresNoEvent() { + Chain beforeAppend = ChainUtils.chainOf(op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + serverCallback.onAppend(beforeAppend, op(new PutIfAbsentOperation<>(1L, "one-bis", testTimeSource.getTimeMillis()))); + + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_SuccessfulReplaceConditionalAfterPutFiresUpdatedEvent() { + Chain beforeAppend = ChainUtils.chainOf(op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + serverCallback.onAppend(beforeAppend, op(new ConditionalReplaceOperation<>(1L, "one", "one-bis", testTimeSource.getTimeMillis()))); + + verify(storeEventSink).updated(eq(1L), argThat(supplies("one")), eq("one-bis")); + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_FailingReplaceConditionalAfterPutFiresNoEvent() { + Chain beforeAppend = ChainUtils.chainOf(op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + serverCallback.onAppend(beforeAppend, op(new ConditionalReplaceOperation<>(1L, "un", "one-bis", testTimeSource.getTimeMillis()))); + + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_ReplaceConditionalAfterNothingFiresNoEvent() { + Chain beforeAppend = ChainUtils.chainOf(); + serverCallback.onAppend(beforeAppend, op(new ConditionalReplaceOperation<>(1L, "one", "one-bis", testTimeSource.getTimeMillis()))); + + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_SuccessfulRemoveConditionalAfterPutFiresUpdatedEvent() { + Chain beforeAppend = ChainUtils.chainOf(op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + serverCallback.onAppend(beforeAppend, op(new ConditionalRemoveOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + + verify(storeEventSink).removed(eq(1L), argThat(supplies("one"))); + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_FailingRemoveConditionalAfterPutFiresNoEvent() { + Chain beforeAppend = ChainUtils.chainOf(op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + serverCallback.onAppend(beforeAppend, op(new ConditionalRemoveOperation<>(1L, "un", testTimeSource.getTimeMillis()))); + + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_RemoveConditionalAfterNothingFiresNoEvent() { + Chain beforeAppend = ChainUtils.chainOf(); + serverCallback.onAppend(beforeAppend, op(new ConditionalRemoveOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_timestampAfterExpiryFiresExpiredEvent() { + Chain beforeAppend = ChainUtils.chainOf(op(new PutOperation<>(1L, "wrong-one", testTimeSource.getTimeMillis())), op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + testTimeSource.advanceTime(1100L); + serverCallback.onAppend(beforeAppend, op(new TimestampOperation<>(1L, testTimeSource.getTimeMillis()))); + + verify(storeEventSink).expired(eq(1L), argThat(supplies("one"))); + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_timestampAfterNothingFiresNoEvent() { + Chain beforeAppend = ChainUtils.chainOf(); + serverCallback.onAppend(beforeAppend, op(new TimestampOperation<>(1L, testTimeSource.getTimeMillis()))); + + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_timestampAfterNoExpiryFiresNoEvent() { + Chain beforeAppend = ChainUtils.chainOf(op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + testTimeSource.advanceTime(100L); + serverCallback.onAppend(beforeAppend, op(new TimestampOperation<>(1L, testTimeSource.getTimeMillis()))); + + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnAppend_putIfAbsentAfterExpiredPutFiresCorrectly() { + Chain beforeAppend = ChainUtils.chainOf(op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis()))); + testTimeSource.advanceTime(1100L); + serverCallback.onAppend(beforeAppend, op(new PutIfAbsentOperation<>(1L, "one-bis", testTimeSource.getTimeMillis()))); + + InOrder inOrder = inOrder(storeEventSink); + inOrder.verify(storeEventSink).expired(eq(1L), argThat(supplies("one"))); + inOrder.verify(storeEventSink).created(1L, "one-bis"); + inOrder.verifyNoMoreInteractions(); + } + + @Test + public void testOnInvalidateHash_chainFiresEvictedEvents() { + Chain evictedChain = ChainUtils.chainOf(op(new PutOperation<>(1L, "one", testTimeSource.getTimeMillis())), op(new PutOperation<>(2L, "two", testTimeSource.getTimeMillis()))); + serverCallback.onInvalidateHash(1L, evictedChain); + + verify(storeEventSink).evicted(eq(1L), argThat(supplies("one"))); + verify(storeEventSink).evicted(eq(2L), argThat(supplies("two"))); + verifyNoMoreInteractions(storeEventSink); + } + + @Test + public void testOnInvalidateHash_noChainFiresNoEvent() { + serverCallback.onInvalidateHash(1L, null); + + verifyNoMoreInteractions(storeEventSink); + } + + private static Matcher> supplies(T value) { + return supplies(equalTo(value)); + } + + private static Matcher> supplies(Matcher matcher) { + return new TypeSafeMatcher>() { + @Override + protected boolean matchesSafely(Supplier item) { + return matcher.matches(item.get()); + } + + @Override + public void describeTo(Description description) { + description.appendValue(" supplier of ").appendDescriptionOf(matcher); + } + }; + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java similarity index 87% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java index 41125ca8c1..814b8886ff 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java @@ -25,22 +25,26 @@ import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.config.ResourcePoolsImpl; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.service.CacheManagerProviderService; +import org.ehcache.impl.config.ResourcePoolsImpl; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.Store; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.offheap.OffHeapStore; import org.ehcache.impl.internal.store.tiering.TieredStore; import org.ehcache.impl.serialization.LongSerializer; import org.ehcache.impl.serialization.StringSerializer; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.service.ServiceConfiguration; import org.junit.Test; +import org.mockito.Answers; +import org.mockito.Mockito; import java.util.Arrays; import java.util.Collections; @@ -48,10 +52,11 @@ import java.util.List; import java.util.Map; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.startsWith; -import static org.junit.Assert.*; +import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; /** @@ -88,6 +93,7 @@ public void testRankTiered() throws Exception { .with(new OffHeapStore.Provider()) .with(new OffHeapDiskStore.Provider()) .with(mock(DiskResourceService.class)) + .with(Mockito.mock(CacheManagerProviderService.class, Answers.RETURNS_DEEP_STUBS)) .with(mock(ClusteringService.class)).build(); serviceLocator.startAllServices(); @@ -123,14 +129,14 @@ public void testAuthoritativeRank() throws Exception { ServiceLocator serviceLocator = dependencySet().with(mock(ClusteringService.class)).build(); provider.start(serviceLocator); - assertThat(provider.rankAuthority(ClusteredResourceType.Types.DEDICATED, Collections.>emptyList()), is(1)); - assertThat(provider.rankAuthority(ClusteredResourceType.Types.SHARED, Collections.>emptyList()), is(1)); - assertThat(provider.rankAuthority(new UnmatchedResourceType(), Collections.>emptyList()), is(0)); + assertThat(provider.rankAuthority(ClusteredResourceType.Types.DEDICATED, Collections.>emptyList()), is(1)); + assertThat(provider.rankAuthority(ClusteredResourceType.Types.SHARED, Collections.>emptyList()), is(1)); + assertThat(provider.rankAuthority(new UnmatchedResourceType(), Collections.>emptyList()), is(0)); } private void assertRank(final Store.Provider provider, final int expectedRank, final ResourceType... resources) { - final List> serviceConfigs = Collections.emptyList(); + final List> serviceConfigs = Collections.emptyList(); if (expectedRank == -1) { try { provider.rank(new HashSet<>(Arrays.asList(resources)), @@ -168,8 +174,8 @@ public ClassLoader getClassLoader() { } @Override - public Expiry getExpiry() { - return Expirations.noExpiration(); + public ExpiryPolicy getExpiry() { + return ExpiryPolicyBuilder.noExpiration(); } @Override @@ -194,10 +200,15 @@ public Serializer getValueSerializer() { public int getDispatcherConcurrency() { return 1; } + + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } }; } - private static class UnmatchedResourceType implements ResourceType { + public static class UnmatchedResourceType implements ResourceType { @Override public Class getResourcePoolClass() { return ResourcePool.class; diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java new file mode 100644 index 0000000000..0ce631c4a9 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java @@ -0,0 +1,732 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.store; + +import com.google.common.base.Objects; +import org.assertj.core.api.ThrowableAssert; +import org.ehcache.Cache; +import org.ehcache.clustered.client.TestTimeSource; +import org.ehcache.clustered.client.config.ClusteredResourcePool; +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityFactory; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; +import org.ehcache.clustered.client.internal.store.operations.EternalChainResolver; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.Ehcache; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.store.DefaultStoreEventDispatcher; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.core.statistics.StoreOperationOutcomes; +import org.ehcache.impl.store.HashUtils; +import org.ehcache.impl.serialization.LongSerializer; +import org.ehcache.impl.serialization.StringSerializer; +import org.ehcache.spi.serialization.Serializer; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.terracotta.connection.Connection; + +import java.net.URI; +import java.util.Arrays; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Properties; +import java.util.concurrent.TimeoutException; +import java.util.function.Function; + +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.ehcache.clustered.util.StatisticsTestUtils.validateStat; +import static org.ehcache.clustered.util.StatisticsTestUtils.validateStats; +import static org.ehcache.core.spi.store.Store.ValueHolder.NO_EXPIRE; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.CombinableMatcher.either; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.isNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.RETURNS_MOCKS; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.withSettings; + +public class ClusteredStoreTest { + + private static final String CACHE_IDENTIFIER = "testCache"; + private static final URI CLUSTER_URI = URI.create("terracotta://localhost"); + + private ClusteredStore store; + + private final Store.Configuration config = new Store.Configuration() { + + @Override + public Class getKeyType() { + return Long.class; + } + + @Override + public Class getValueType() { + return String.class; + } + + @Override + public EvictionAdvisor getEvictionAdvisor() { + return null; + } + + @Override + public ClassLoader getClassLoader() { + return null; + } + + @Override + public ExpiryPolicy getExpiry() { + return null; + } + + @Override + public ResourcePools getResourcePools() { + return null; + } + + @Override + public Serializer getKeySerializer() { + return null; + } + + @Override + public Serializer getValueSerializer() { + return null; + } + + @Override + public int getDispatcherConcurrency() { + return 0; + } + + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } + }; + + @Before + public void setup() throws Exception { + UnitTestConnectionService.add( + CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder().resource("defaultResource", 8, MemoryUnit.MB).build() + ); + + Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); + ClusterTierManagerClientEntityFactory entityFactory = new ClusterTierManagerClientEntityFactory(connection, Runnable::run); + + ServerSideConfiguration serverConfig = + new ServerSideConfiguration("defaultResource", Collections.emptyMap()); + entityFactory.create("TestCacheManager", serverConfig); + + ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(4, MemoryUnit.MB); + ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), + Long.class.getName(), String.class.getName(), LongSerializer.class.getName(), StringSerializer.class.getName(), null, false); + ClusterTierClientEntity clientEntity = entityFactory.fetchOrCreateClusteredStoreEntity("TestCacheManager", CACHE_IDENTIFIER, serverStoreConfiguration, ClusteringServiceConfiguration.ClientMode.AUTO_CREATE, false); + clientEntity.validate(serverStoreConfiguration); + ServerStoreProxy serverStoreProxy = new CommonServerStoreProxy(CACHE_IDENTIFIER, clientEntity, mock(ServerCallback.class)); + + TestTimeSource testTimeSource = new TestTimeSource(); + + OperationsCodec codec = new OperationsCodec<>(new LongSerializer(), new StringSerializer()); + EternalChainResolver resolver = new EternalChainResolver<>(codec); + store = new ClusteredStore<>(config, codec, resolver, serverStoreProxy, testTimeSource, new DefaultStoreEventDispatcher<>(8), new DefaultStatisticsService()); + } + + @After + public void tearDown() throws Exception { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + private void assertTimeoutOccurred(ThrowableAssert.ThrowingCallable throwingCallable) { + assertThatExceptionOfType(StoreAccessException.class) + .isThrownBy(throwingCallable) + .withCauseInstanceOf(TimeoutException.class); + } + + @Test + public void testPut() throws Exception { + assertThat(store.put(1L, "one"), is(Store.PutStatus.PUT)); + validateStats(store, EnumSet.of(StoreOperationOutcomes.PutOutcome.PUT)); + assertThat(store.put(1L, "another one"), is(Store.PutStatus.PUT)); + assertThat(store.put(1L, "yet another one"), is(Store.PutStatus.PUT)); + validateStat(store, StoreOperationOutcomes.PutOutcome.PUT, 3); + } + + @Test + @SuppressWarnings("unchecked") + public void testPutTimeout() throws Exception { + ServerStoreProxy proxy = mock(ServerStoreProxy.class); + OperationsCodec codec = mock(OperationsCodec.class); + TimeSource timeSource = mock(TimeSource.class); + doThrow(TimeoutException.class).when(proxy).append(anyLong(), isNull()); + ClusteredStore store = new ClusteredStore<>(config, codec, null, proxy, timeSource, null, new DefaultStatisticsService()); + + assertTimeoutOccurred(() -> store.put(1L, "one")); + } + + @Test + public void testGet() throws Exception { + assertThat(store.get(1L), nullValue()); + validateStats(store, EnumSet.of(StoreOperationOutcomes.GetOutcome.MISS)); + store.put(1L, "one"); + assertThat(store.get(1L).get(), is("one")); + validateStats(store, EnumSet.of(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.HIT)); + } + + @Test(expected = StoreAccessException.class) + public void testGetThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); + @SuppressWarnings("unchecked") + EternalChainResolver chainResolver = mock(EternalChainResolver.class); + ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); + when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); + TestTimeSource testTimeSource = mock(TestTimeSource.class); + ClusteredStore store = new ClusteredStore<>(config, codec, chainResolver, serverStoreProxy, testTimeSource, null, new DefaultStatisticsService()); + store.get(1L); + } + + @Test + @SuppressWarnings("unchecked") + public void testGetTimeout() throws Exception { + ServerStoreProxy proxy = mock(ServerStoreProxy.class); + long longKey = HashUtils.intHashToLong(new Long(1L).hashCode()); + when(proxy.get(longKey)).thenThrow(TimeoutException.class); + ClusteredStore store = new ClusteredStore<>(config,null, null, proxy, null, null, new DefaultStatisticsService()); + assertThat(store.get(1L), nullValue()); + validateStats(store, EnumSet.of(StoreOperationOutcomes.GetOutcome.TIMEOUT)); + } + + @Test + public void testContainsKey() throws Exception { + assertThat(store.containsKey(1L), is(false)); + store.put(1L, "one"); + assertThat(store.containsKey(1L), is(true)); + validateStat(store, StoreOperationOutcomes.GetOutcome.HIT, 0); + validateStat(store, StoreOperationOutcomes.GetOutcome.MISS, 0); + } + + @Test(expected = StoreAccessException.class) + public void testContainsKeyThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); + @SuppressWarnings("unchecked") + EternalChainResolver chainResolver = mock(EternalChainResolver.class); + ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); + when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); + TestTimeSource testTimeSource = mock(TestTimeSource.class); + ClusteredStore store = new ClusteredStore<>(config, codec, chainResolver, serverStoreProxy, testTimeSource, null, new DefaultStatisticsService()); + store.containsKey(1L); + } + + @Test + public void testRemove() throws Exception { + assertThat(store.remove(1L), is(false)); + validateStats(store, EnumSet.of(StoreOperationOutcomes.RemoveOutcome.MISS)); + store.put(1L, "one"); + assertThat(store.remove(1L), is(true)); + assertThat(store.containsKey(1L), is(false)); + validateStats(store, EnumSet.of(StoreOperationOutcomes.RemoveOutcome.MISS, StoreOperationOutcomes.RemoveOutcome.REMOVED)); + } + + @Test + public void testRemoveThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); + @SuppressWarnings("unchecked") + EternalChainResolver chainResolver = mock(EternalChainResolver.class); + ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); + RuntimeException theException = new RuntimeException(); + when(serverStoreProxy.getAndAppend(anyLong(), any())).thenThrow(theException); + TestTimeSource testTimeSource = new TestTimeSource(); + + ClusteredStore store = new ClusteredStore<>(config, codec, chainResolver, serverStoreProxy, testTimeSource, null, new DefaultStatisticsService()); + assertThatExceptionOfType(StoreAccessException.class) + .isThrownBy(() -> store.remove(1L)) + .withCause(theException); + } + + @Test + @SuppressWarnings("unchecked") + public void testRemoveTimeout() throws Exception { + ServerStoreProxy proxy = mock(ServerStoreProxy.class); + OperationsCodec codec = mock(OperationsCodec.class); + TimeSource timeSource = mock(TimeSource.class); + when(proxy.getAndAppend(anyLong(), isNull())).thenThrow(TimeoutException.class); + ClusteredStore store = new ClusteredStore<>(config, codec, null, proxy, timeSource, null, new DefaultStatisticsService()); + + assertTimeoutOccurred(() -> store.remove(1L)); + } + + @Test + public void testClear() throws Exception { + assertThat(store.containsKey(1L), is(false)); + store.clear(); + assertThat(store.containsKey(1L), is(false)); + + store.put(1L, "one"); + store.put(2L, "two"); + store.put(3L, "three"); + assertThat(store.containsKey(1L), is(true)); + + store.clear(); + + assertThat(store.containsKey(1L), is(false)); + assertThat(store.containsKey(2L), is(false)); + assertThat(store.containsKey(3L), is(false)); + } + + @Test(expected = StoreAccessException.class) + public void testClearThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); + @SuppressWarnings("unchecked") + EternalChainResolver chainResolver = mock(EternalChainResolver.class); + ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); + doThrow(new RuntimeException()).when(serverStoreProxy).clear(); + TestTimeSource testTimeSource = mock(TestTimeSource.class); + ClusteredStore store = new ClusteredStore<>(config, codec, chainResolver, serverStoreProxy, testTimeSource, null, new DefaultStatisticsService()); + store.clear(); + } + + @Test + public void testClearTimeout() throws Exception { + ServerStoreProxy proxy = mock(ServerStoreProxy.class); + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); + TimeSource timeSource = mock(TimeSource.class); + doThrow(TimeoutException.class).when(proxy).clear(); + ClusteredStore store = new ClusteredStore<>(config, codec, null, proxy, timeSource, null, new DefaultStatisticsService()); + + assertTimeoutOccurred(() -> store.clear()); + } + + @Test + public void testPutIfAbsent() throws Exception { + assertThat(store.putIfAbsent(1L, "one", b -> {}), nullValue()); + validateStats(store, EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.PUT)); + assertThat(store.putIfAbsent(1L, "another one", b -> {}).get(), is("one")); + validateStats(store, EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.PUT, StoreOperationOutcomes.PutIfAbsentOutcome.HIT)); + } + + @Test(expected = StoreAccessException.class) + public void testPutIfAbsentThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); + @SuppressWarnings("unchecked") + EternalChainResolver chainResolver = mock(EternalChainResolver.class); + ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); + when(serverStoreProxy.getAndAppend(anyLong(), any())).thenThrow(new RuntimeException()); + TestTimeSource testTimeSource = mock(TestTimeSource.class); + ClusteredStore store = new ClusteredStore<>(config, codec, chainResolver, serverStoreProxy, testTimeSource, null, new DefaultStatisticsService()); + store.putIfAbsent(1L, "one", b -> {}); + } + + @Test + @SuppressWarnings("unchecked") + public void testPutIfAbsentTimeout() throws Exception { + ServerStoreProxy proxy = mock(ServerStoreProxy.class); + OperationsCodec codec = mock(OperationsCodec.class); + TimeSource timeSource = mock(TimeSource.class); + when(proxy.getAndAppend(anyLong(), isNull())).thenThrow(TimeoutException.class); + ClusteredStore store = new ClusteredStore<>(config, codec, null, proxy, timeSource, null, new DefaultStatisticsService()); + + assertTimeoutOccurred(() -> store.putIfAbsent(1L, "one", b -> {})); + } + + @Test + public void testConditionalRemove() throws Exception { + assertThat(store.remove(1L, "one"), is(Store.RemoveStatus.KEY_MISSING)); + validateStats(store, EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS)); + store.put(1L, "one"); + assertThat(store.remove(1L, "one"), is(Store.RemoveStatus.REMOVED)); + validateStats(store, EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS, StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED)); + store.put(1L, "another one"); + assertThat(store.remove(1L, "one"), is(Store.RemoveStatus.KEY_PRESENT)); + validateStat(store, StoreOperationOutcomes.ConditionalRemoveOutcome.MISS, 2); + } + + @Test(expected = StoreAccessException.class) + public void testConditionalRemoveThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); + @SuppressWarnings("unchecked") + EternalChainResolver chainResolver = mock(EternalChainResolver.class); + ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); + when(serverStoreProxy.getAndAppend(anyLong(), any())).thenThrow(new RuntimeException()); + TestTimeSource testTimeSource = mock(TestTimeSource.class); + ClusteredStore store = new ClusteredStore<>(config, codec, chainResolver, serverStoreProxy, testTimeSource, null, new DefaultStatisticsService()); + store.remove(1L, "one"); + } + + @Test + @SuppressWarnings("unchecked") + public void testConditionalRemoveTimeout() throws Exception { + ServerStoreProxy proxy = mock(ServerStoreProxy.class); + OperationsCodec codec = mock(OperationsCodec.class); + TimeSource timeSource = mock(TimeSource.class); + when(proxy.getAndAppend(anyLong(), isNull())).thenThrow(TimeoutException.class); + ClusteredStore store = new ClusteredStore<>(config, codec, null, proxy, timeSource, null, new DefaultStatisticsService()); + + assertTimeoutOccurred(() -> store.remove(1L, "one")); + } + + @Test + public void testReplace() throws Exception { + assertThat(store.replace(1L, "one"), nullValue()); + validateStats(store, EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.MISS)); + store.put(1L, "one"); + assertThat(store.replace(1L, "another one").get(), is("one")); + validateStats(store, EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.MISS, StoreOperationOutcomes.ReplaceOutcome.REPLACED)); + } + + @Test(expected = StoreAccessException.class) + public void testReplaceThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); + @SuppressWarnings("unchecked") + EternalChainResolver chainResolver = mock(EternalChainResolver.class); + ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); + when(serverStoreProxy.getAndAppend(anyLong(), any())).thenThrow(new RuntimeException()); + TestTimeSource testTimeSource = mock(TestTimeSource.class); + ClusteredStore store = new ClusteredStore<>(config, codec, chainResolver, serverStoreProxy, testTimeSource, null, new DefaultStatisticsService()); + store.replace(1L, "one"); + } + + @Test + @SuppressWarnings("unchecked") + public void testReplaceTimeout() throws Exception { + ServerStoreProxy proxy = mock(ServerStoreProxy.class); + OperationsCodec codec = mock(OperationsCodec.class); + TimeSource timeSource = mock(TimeSource.class); + when(proxy.getAndAppend(anyLong(), isNull())).thenThrow(TimeoutException.class); + ClusteredStore store = new ClusteredStore<>(config, codec, null, proxy, timeSource, null, new DefaultStatisticsService()); + + assertTimeoutOccurred(() -> store.replace(1L, "one")); + } + + @Test + public void testConditionalReplace() throws Exception { + assertThat(store.replace(1L, "one" , "another one"), is(Store.ReplaceStatus.MISS_NOT_PRESENT)); + validateStats(store, EnumSet.of(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS)); + store.put(1L, "some other one"); + assertThat(store.replace(1L, "one" , "another one"), is(Store.ReplaceStatus.MISS_PRESENT)); + validateStat(store, StoreOperationOutcomes.ConditionalReplaceOutcome.MISS, 2); + validateStat(store, StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED, 0); + assertThat(store.replace(1L, "some other one" , "another one"), is(Store.ReplaceStatus.HIT)); + validateStat(store, StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED, 1); + validateStat(store, StoreOperationOutcomes.ConditionalReplaceOutcome.MISS, 2); + } + + @Test(expected = StoreAccessException.class) + public void testConditionalReplaceThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); + @SuppressWarnings("unchecked") + EternalChainResolver chainResolver = mock(EternalChainResolver.class); + ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); + when(serverStoreProxy.getAndAppend(anyLong(), any())).thenThrow(new RuntimeException()); + TestTimeSource testTimeSource = mock(TestTimeSource.class); + ClusteredStore store = new ClusteredStore<>(config, codec, chainResolver, serverStoreProxy, testTimeSource, null, new DefaultStatisticsService()); + store.replace(1L, "one", "another one"); + } + + @Test + @SuppressWarnings("unchecked") + public void testConditionalReplaceTimeout() throws Exception { + ServerStoreProxy proxy = mock(ServerStoreProxy.class); + OperationsCodec codec = mock(OperationsCodec.class); + TimeSource timeSource = mock(TimeSource.class); + when(proxy.getAndAppend(anyLong(), isNull())).thenThrow(TimeoutException.class); + ClusteredStore store = new ClusteredStore<>(config, codec, null, proxy, timeSource, null, new DefaultStatisticsService()); + + assertTimeoutOccurred(() -> store.replace(1L, "one", "another one")); + } + + @Test + public void testBulkComputePutAll() throws Exception { + store.put(1L, "another one"); + Map map = new HashMap<>(); + map.put(1L, "one"); + map.put(2L, "two"); + Ehcache.PutAllFunction putAllFunction = new Ehcache.PutAllFunction<>(null, map, null); + Map> valueHolderMap = store.bulkCompute(new HashSet<>(Arrays.asList(1L, 2L)), putAllFunction); + + assertThat(valueHolderMap.get(1L).get(), is(map.get(1L))); + assertThat(store.get(1L).get(), is(map.get(1L))); + assertThat(valueHolderMap.get(2L).get(), is(map.get(2L))); + assertThat(store.get(2L).get(), is(map.get(2L))); + assertThat(putAllFunction.getActualPutCount().get(), is(2)); + validateStats(store, EnumSet.of(StoreOperationOutcomes.PutOutcome.PUT)); //outcome of the initial store put + } + + @Test + public void testBulkComputeRemoveAll() throws Exception { + store.put(1L, "one"); + store.put(2L, "two"); + store.put(3L, "three"); + Ehcache.RemoveAllFunction removeAllFunction = new Ehcache.RemoveAllFunction<>(); + Map> valueHolderMap = store.bulkCompute(new HashSet<>(Arrays.asList(1L, 2L, 4L)), removeAllFunction); + + assertThat(valueHolderMap.get(1L), nullValue()); + assertThat(store.get(1L), nullValue()); + assertThat(valueHolderMap.get(2L), nullValue()); + assertThat(store.get(2L), nullValue()); + assertThat(valueHolderMap.get(4L), nullValue()); + assertThat(store.get(4L), nullValue()); + validateStats(store, EnumSet.noneOf(StoreOperationOutcomes.RemoveOutcome.class)); + } + + @Test(expected = UnsupportedOperationException.class) + public void testBulkComputeThrowsForGenericFunction() throws Exception { + @SuppressWarnings("unchecked") + Function>, Iterable>> remappingFunction + = mock(Function.class); + store.bulkCompute(new HashSet<>(Arrays.asList(1L, 2L)), remappingFunction); + } + + @Test + public void testBulkComputeIfAbsentGetAll() throws Exception { + store.put(1L, "one"); + store.put(2L, "two"); + Ehcache.GetAllFunction getAllAllFunction = new Ehcache.GetAllFunction<>(); + Map> valueHolderMap = store.bulkComputeIfAbsent(new HashSet<>(Arrays.asList(1L, 2L)), getAllAllFunction); + + assertThat(valueHolderMap.get(1L).get(), is("one")); + assertThat(store.get(1L).get(), is("one")); + assertThat(valueHolderMap.get(2L).get(), is("two")); + assertThat(store.get(2L).get(), is("two")); + } + + @Test(expected = UnsupportedOperationException.class) + public void testBulkComputeIfAbsentThrowsForGenericFunction() throws Exception { + @SuppressWarnings("unchecked") + Function, Iterable>> mappingFunction + = mock(Function.class); + store.bulkComputeIfAbsent(new HashSet<>(Arrays.asList(1L, 2L)), mappingFunction); + } + + @Test + public void testExpirationIsSentToHigherTiers() throws Exception { + @SuppressWarnings("unchecked") + Store.ValueHolder valueHolder = mock(Store.ValueHolder.class, withSettings().defaultAnswer(RETURNS_MOCKS)); + when(valueHolder.get()).thenReturn("bar"); + when(valueHolder.expirationTime()).thenReturn(1000L); + + @SuppressWarnings("unchecked") + EternalChainResolver resolver = mock(EternalChainResolver.class); + when(resolver.resolve(any(ServerStoreProxy.ChainEntry.class), anyLong(), anyLong())).thenReturn(valueHolder); + + ServerStoreProxy proxy = mock(ServerStoreProxy.class, withSettings().defaultAnswer(RETURNS_MOCKS)); + + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); + TimeSource timeSource = mock(TimeSource.class); + + ClusteredStore store = new ClusteredStore<>(config, codec, resolver, proxy, timeSource, null, new DefaultStatisticsService()); + + Store.ValueHolder vh = store.get(1L); + + long expirationTime = vh.expirationTime(); + assertThat(expirationTime, is(1000L)); + } + + @Test + public void testNoExpireIsSentToHigherTiers() throws Exception { + @SuppressWarnings("unchecked") + Store.ValueHolder valueHolder = mock(Store.ValueHolder.class, withSettings().defaultAnswer(RETURNS_MOCKS)); + when(valueHolder.get()).thenReturn("bar"); + when(valueHolder.expirationTime()).thenReturn(NO_EXPIRE); + + @SuppressWarnings("unchecked") + EternalChainResolver resolver = mock(EternalChainResolver.class); + when(resolver.resolve(any(ServerStoreProxy.ChainEntry.class), anyLong(), anyLong())).thenReturn(valueHolder); + + ServerStoreProxy proxy = mock(ServerStoreProxy.class, withSettings().defaultAnswer(RETURNS_MOCKS)); + + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); + TimeSource timeSource = mock(TimeSource.class); + + ClusteredStore store = new ClusteredStore<>(config, codec, resolver, proxy, timeSource, null, new DefaultStatisticsService()); + + Store.ValueHolder vh = store.get(1L); + + long expirationTime = vh.expirationTime(); + assertThat(expirationTime, is(NO_EXPIRE)); + } + + @Test + public void testEmptyChainIteratorIsEmpty() throws StoreAccessException { + + Store.Iterator>> iterator = store.iterator(); + + assertThat(iterator.hasNext(), is(false)); + try { + iterator.next(); + fail("Expected NoSuchElementException"); + } catch (NoSuchElementException e) { + //expected + } + } + + @Test + public void testSingleChainSingleValue() throws StoreAccessException { + store.put(1L, "foo"); + + Store.Iterator>> iterator = store.iterator(); + + assertThat(iterator.hasNext(), is(true)); + assertThat(iterator.next(), isEntry(1L, "foo")); + assertThat(iterator.hasNext(), is(false)); + try { + iterator.next(); + fail("Expected NoSuchElementException"); + } catch (NoSuchElementException e) { + //expected + } + } + + @Test + public void testSingleChainMultipleValues() throws StoreAccessException { + assertThat(Long.hashCode(1L), is(Long.hashCode(~1L))); + + store.put(1L, "foo"); + store.put(~1L, "bar"); + + Store.Iterator>> iterator = store.iterator(); + + Matcher>> entryOne = isEntry(1L, "foo"); + Matcher>> entryTwo = isEntry(~1L, "bar"); + + assertThat(iterator.hasNext(), is(true)); + + Cache.Entry> next = iterator.next(); + assertThat(next, either(entryOne).or(entryTwo)); + + if (entryOne.matches(next)) { + assertThat(iterator.hasNext(), is(true)); + assertThat(iterator.next(), is(entryTwo)); + assertThat(iterator.hasNext(), is(false)); + } else { + assertThat(iterator.hasNext(), is(true)); + assertThat(iterator.next(), is(entryOne)); + assertThat(iterator.hasNext(), is(false)); + } + + try { + iterator.next(); + fail("Expected NoSuchElementException"); + } catch (NoSuchElementException e) { + //expected + } + } + + @Test + public void testSingleChainRequiresResolution() throws StoreAccessException { + + store.put(~1L, "bar"); + store.put(1L, "foo"); + store.remove(~1L); + + Store.Iterator>> iterator = store.iterator(); + + assertThat(iterator.hasNext(), is(true)); + assertThat(iterator.next(), isEntry(1L, "foo")); + assertThat(iterator.hasNext(), is(false)); + try { + iterator.next(); + fail("Expected NoSuchElementException"); + } catch (NoSuchElementException e) { + //expected + } + } + + @Test + public void testMultipleChains() throws StoreAccessException { + + store.put(1L, "foo"); + store.put(2L, "bar"); + + Store.Iterator>> iterator = store.iterator(); + + Matcher>> entryOne = isEntry(1L, "foo"); + Matcher>> entryTwo = isEntry(2L, "bar"); + + assertThat(iterator.hasNext(), is(true)); + + Cache.Entry> next = iterator.next(); + assertThat(next, either(entryOne).or(entryTwo)); + + if (entryOne.matches(next)) { + assertThat(iterator.hasNext(), is(true)); + assertThat(iterator.next(), is(entryTwo)); + assertThat(iterator.hasNext(), is(false)); + } else { + assertThat(iterator.hasNext(), is(true)); + assertThat(iterator.next(), is(entryOne)); + assertThat(iterator.hasNext(), is(false)); + } + + try { + iterator.next(); + fail("Expected NoSuchElementException"); + } catch (NoSuchElementException e) { + //expected + } + } + + private Matcher>> isEntry(K key, V value) { + return new TypeSafeMatcher>>() { + @Override + public void describeTo(Description description) { + description.appendText(" the cache entry { ").appendValue(key).appendText(": ").appendValue(value).appendText(" }"); + } + + @Override + protected boolean matchesSafely(Cache.Entry> item) { + return Objects.equal(key, item.getKey()) && Objects.equal(value, item.getValue().get()); + } + }; + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxyTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxyTest.java new file mode 100644 index 0000000000..3677e8781c --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxyTest.java @@ -0,0 +1,493 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.Matchers; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.server.store.ObservableClusterTierServerEntityService; +import org.hamcrest.CoreMatchers; +import org.hamcrest.Matcher; +import org.hamcrest.core.Is; +import org.junit.Test; + +import java.nio.ByteBuffer; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.ehcache.clustered.ChainUtils.chainOf; +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.ehcache.clustered.Matchers.hasPayloads; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.CombinableMatcher.either; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; + +public class CommonServerStoreProxyTest extends AbstractServerStoreProxyTest { + + @Test + public void testInvalidationsContainChains() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testInvalidationsContainChains", Consistency.EVENTUAL, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testInvalidationsContainChains", Consistency.EVENTUAL, false); + + final List store1InvalidatedHashes = new CopyOnWriteArrayList<>(); + final List store1InvalidatedChains = new CopyOnWriteArrayList<>(); + final AtomicBoolean store1InvalidatedAll = new AtomicBoolean(); + final List store2InvalidatedHashes = new CopyOnWriteArrayList<>(); + final List store2InvalidatedChains = new CopyOnWriteArrayList<>(); + final AtomicBoolean store2InvalidatedAll = new AtomicBoolean(); + + EventualServerStoreProxy serverStoreProxy1 = new EventualServerStoreProxy("testInvalidationsContainChains", clientEntity1, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + store1InvalidatedHashes.add(hash); + if (evictedChain != null) { + // make sure the chain's elements' buffers are correctly sized + for (Element element : evictedChain) { + assertThat(element.getPayload().limit(), is(512 * 1024)); + } + store1InvalidatedChains.add(evictedChain); + } + } + + @Override + public void onInvalidateAll() { + store1InvalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + // make sure the appended buffer is correctly sized + assertThat(appended.limit(), is(512 * 1024)); + // make sure the chain's elements' buffers are correctly sized + for (Element element : beforeAppend) { + assertThat(element.getPayload().limit(), is(512 * 1024)); + } + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + fail("should not be called"); + } + }); + serverStoreProxy1.enableEvents(true); + EventualServerStoreProxy serverStoreProxy2 = new EventualServerStoreProxy("testInvalidationsContainChains", clientEntity2, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + store2InvalidatedHashes.add(hash); + if (evictedChain != null) { + // make sure the chain's elements' buffers are correctly sized + for (Element element : evictedChain) { + assertThat(element.getPayload().limit(), is(512 * 1024)); + } + store2InvalidatedChains.add(evictedChain); + } + } + + @Override + public void onInvalidateAll() { + store2InvalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + // make sure the appended buffer is correctly sized + assertThat(appended.limit(), is(512 * 1024)); + // make sure the chain's elements' buffers are correctly sized + for (Element element : beforeAppend) { + assertThat(element.getPayload().limit(), is(512 * 1024)); + } + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + fail("should not be called"); + } + }); + serverStoreProxy2.enableEvents(true); + + final int ITERATIONS = 40; + for (int i = 0; i < ITERATIONS; i++) { + serverStoreProxy1.append(i, createPayload(i, 512 * 1024)); + } + + int evictionCount = 0; + int entryCount = 0; + for (int i = 0; i < ITERATIONS; i++) { + Chain elements1 = serverStoreProxy1.get(i); + Chain elements2 = serverStoreProxy2.get(i); + assertThat(elements1, Matchers.matchesChain(elements2)); + if (!elements1.isEmpty()) { + entryCount++; + } else { + evictionCount++; + } + } + + // there has to be server-side evictions, otherwise this test is useless + assertThat(store1InvalidatedHashes.size(), greaterThan(0)); + // test that each time the server evicted, the originating client got notified + assertThat(store1InvalidatedHashes.size(), Is.is(ITERATIONS - entryCount)); + // test that each time the server evicted, the other client got notified on top of normal invalidations + assertThat(store2InvalidatedHashes.size(), Is.is(ITERATIONS + evictionCount)); + // test that we got evicted chains + assertThat(store1InvalidatedChains.size(), greaterThan(0)); + assertThat(store2InvalidatedChains.size(), is(store1InvalidatedChains.size())); + + assertThatClientsWaitingForInvalidationIsEmpty("testInvalidationsContainChains"); + assertThat(store1InvalidatedAll.get(), is(false)); + assertThat(store2InvalidatedAll.get(), is(false)); + } + + @Test + public void testAppendFireEvents() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testAppendFireEvents", Consistency.EVENTUAL, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testAppendFireEvents", Consistency.EVENTUAL, false); + + final List store1AppendedBuffers = new CopyOnWriteArrayList<>(); + final List store1Chains = new CopyOnWriteArrayList<>(); + final AtomicBoolean store1InvalidatedAll = new AtomicBoolean(); + final List store2AppendedBuffers = new CopyOnWriteArrayList<>(); + final List store2Chains = new CopyOnWriteArrayList<>(); + final AtomicBoolean store2InvalidatedAll = new AtomicBoolean(); + + EventualServerStoreProxy serverStoreProxy1 = new EventualServerStoreProxy("testAppendFireEvents", clientEntity1, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + fail("should not be called"); + } + + @Override + public void onInvalidateAll() { + store1InvalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + // make sure the appended buffer is correctly sized + assertThat(appended.limit(), is(512)); + // make sure the chain's elements' buffers are correctly sized + for (Element element : beforeAppend) { + assertThat(element.getPayload().limit(), is(512)); + } + store1AppendedBuffers.add(appended); + store1Chains.add(beforeAppend); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + fail("should not be called"); + } + }); + serverStoreProxy1.enableEvents(true); + EventualServerStoreProxy serverStoreProxy2 = new EventualServerStoreProxy("testAppendFireEvents", clientEntity2, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + // make sure those only are cross-client invalidations and not server evictions + assertThat(evictedChain, is(nullValue())); + } + + @Override + public void onInvalidateAll() { + store2InvalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + // make sure the appended buffer is correctly sized + assertThat(appended.limit(), is(512)); + // make sure the chain's elements' buffers are correctly sized + for (Element element : beforeAppend) { + assertThat(element.getPayload().limit(), is(512)); + } + store2AppendedBuffers.add(appended); + store2Chains.add(beforeAppend); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + fail("should not be called"); + } + }); + serverStoreProxy2.enableEvents(true); + + serverStoreProxy1.append(1L, createPayload(1L, 512)); + Chain c = serverStoreProxy1.getAndAppend(1L, createPayload(2L, 512)); + assertThat(c.length(), is(1)); + + assertThatClientsWaitingForInvalidationIsEmpty("testAppendFireEvents"); + + assertThat(store1AppendedBuffers.size(), is(2)); + assertThat(store1AppendedBuffers.get(0).asLongBuffer().get(), is(1L)); + assertThat(store1AppendedBuffers.get(1).asLongBuffer().get(), is(2L)); + assertThat(store1Chains.size(), is(2)); + assertThat(store1Chains.get(0).length(), is(0)); + assertThat(store1Chains.get(1).length(), is(1)); + assertThat(store1InvalidatedAll.get(), is(false)); + assertThat(store2AppendedBuffers.size(), is(2)); + assertThat(store2AppendedBuffers.get(0).asLongBuffer().get(), is(1L)); + assertThat(store2AppendedBuffers.get(1).asLongBuffer().get(), is(2L)); + assertThat(store2Chains.size(), is(2)); + assertThat(store2Chains.get(0).length(), is(0)); + assertThat(store2Chains.get(1).length(), is(1)); + assertThat(store2InvalidatedAll.get(), is(false)); + } + + private static void assertThatClientsWaitingForInvalidationIsEmpty(String name) throws Exception { + ObservableClusterTierServerEntityService.ObservableClusterTierActiveEntity activeEntity = observableClusterTierService.getServedActiveEntitiesFor(name).get(0); + long now = System.currentTimeMillis(); + while (System.currentTimeMillis() < now + 5000 && activeEntity.getClientsWaitingForInvalidation().size() != 0); + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), Is.is(0)); + } + + @Test + public void testGetKeyNotPresent() throws Exception { + ClusterTierClientEntity clientEntity = createClientEntity("testGetKeyNotPresent", Consistency.EVENTUAL, true); + CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testGetKeyNotPresent", clientEntity, mock(ServerCallback.class)); + + Chain chain = serverStoreProxy.get(1); + + assertThat(chain.isEmpty(), is(true)); + } + + @Test + public void testAppendKeyNotPresent() throws Exception { + ClusterTierClientEntity clientEntity = createClientEntity("testAppendKeyNotPresent", Consistency.EVENTUAL, true); + CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testAppendKeyNotPresent", clientEntity, mock(ServerCallback.class)); + + serverStoreProxy.append(2, createPayload(2)); + + Chain chain = serverStoreProxy.get(2); + assertThat(chain.isEmpty(), is(false)); + assertThat(chain, hasPayloads(2L)); + } + + @Test + public void testGetAfterMultipleAppendsOnSameKey() throws Exception { + ClusterTierClientEntity clientEntity = createClientEntity("testGetAfterMultipleAppendsOnSameKey", Consistency.EVENTUAL, true); + CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testGetAfterMultipleAppendsOnSameKey", clientEntity, mock(ServerCallback.class)); + + serverStoreProxy.append(3L, createPayload(3L)); + serverStoreProxy.append(3L, createPayload(33L)); + serverStoreProxy.append(3L, createPayload(333L)); + + Chain chain = serverStoreProxy.get(3L); + + assertThat(chain.isEmpty(), is(false)); + + assertThat(chain, hasPayloads(3L, 33L, 333l)); + } + + @Test + public void testGetAndAppendKeyNotPresent() throws Exception { + ClusterTierClientEntity clientEntity = createClientEntity("testGetAndAppendKeyNotPresent", Consistency.EVENTUAL, true); + CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testGetAndAppendKeyNotPresent", clientEntity, mock(ServerCallback.class)); + Chain chain = serverStoreProxy.getAndAppend(4L, createPayload(4L)); + + assertThat(chain.isEmpty(), is(true)); + + chain = serverStoreProxy.get(4L); + + assertThat(chain.isEmpty(), is(false)); + assertThat(chain, hasPayloads(4L)); + } + + @Test + public void testGetAndAppendMultipleTimesOnSameKey() throws Exception { + ClusterTierClientEntity clientEntity = createClientEntity("testGetAndAppendMultipleTimesOnSameKey", Consistency.EVENTUAL, true); + CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testGetAndAppendMultipleTimesOnSameKey", clientEntity, mock(ServerCallback.class)); + serverStoreProxy.getAndAppend(5L, createPayload(5L)); + serverStoreProxy.getAndAppend(5L, createPayload(55L)); + serverStoreProxy.getAndAppend(5L, createPayload(555L)); + Chain chain = serverStoreProxy.getAndAppend(5l, createPayload(5555L)); + + assertThat(chain.isEmpty(), is(false)); + assertThat(chain, hasPayloads(5L, 55L, 555L)); + } + + @Test + public void testReplaceAtHeadSuccessFull() throws Exception { + ClusterTierClientEntity clientEntity = createClientEntity("testReplaceAtHeadSuccessFull", Consistency.EVENTUAL, true); + CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testReplaceAtHeadSuccessFull", clientEntity, mock(ServerCallback.class)); + serverStoreProxy.append(20L, createPayload(200L)); + serverStoreProxy.append(20L, createPayload(2000L)); + serverStoreProxy.append(20L, createPayload(20000L)); + + Chain expect = serverStoreProxy.get(20L); + Chain update = chainOf(createPayload(400L)); + + serverStoreProxy.replaceAtHead(20l, expect, update); + + Chain afterReplace = serverStoreProxy.get(20L); + assertThat(afterReplace, hasPayloads(400L)); + + serverStoreProxy.append(20L, createPayload(4000L)); + serverStoreProxy.append(20L, createPayload(40000L)); + + serverStoreProxy.replaceAtHead(20L, afterReplace, chainOf(createPayload(800L))); + + Chain anotherReplace = serverStoreProxy.get(20L); + + assertThat(anotherReplace, hasPayloads(800L, 4000L, 40000L)); + } + + @Test + public void testClear() throws Exception { + ClusterTierClientEntity clientEntity = createClientEntity("testClear", Consistency.EVENTUAL, true); + CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testClear", clientEntity, mock(ServerCallback.class)); + serverStoreProxy.append(1L, createPayload(100L)); + + serverStoreProxy.clear(); + Chain chain = serverStoreProxy.get(1); + assertThat(chain.isEmpty(), is(true)); + } + + @Test + public void testResolveRequestIsProcessedAtThreshold() throws Exception { + ByteBuffer buffer = createPayload(42L); + + ClusterTierClientEntity clientEntity = createClientEntity("testResolveRequestIsProcessed", Consistency.EVENTUAL, true); + ServerCallback serverCallback = mock(ServerCallback.class); + doAnswer(inv -> { + ServerStoreProxy.ChainEntry entry = inv.getArgument(0); + entry.replaceAtHead(chainOf(buffer.duplicate())); + return null; + }).when(serverCallback).compact(any(ServerStoreProxy.ChainEntry.class), any(long.class)); + + CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testResolveRequestIsProcessed", clientEntity, serverCallback); + + for (int i = 0; i < 8; i++) { + serverStoreProxy.append(1L, buffer.duplicate()); + } + verify(serverCallback, never()).compact(any(ServerStoreProxy.ChainEntry.class)); + assertThat(serverStoreProxy.get(1L), hasPayloads(42L, 42L, 42L, 42L, 42L, 42L, 42L, 42L)); + + //trigger compaction at > 8 entries + serverStoreProxy.append(1L, buffer.duplicate()); + verify(serverCallback).compact(any(ServerStoreProxy.ChainEntry.class), any(long.class)); + assertThat(serverStoreProxy.get(1L), hasPayloads(42L)); + } + + @Test + public void testEmptyStoreIteratorIsEmpty() throws Exception { + ClusterTierClientEntity clientEntity = createClientEntity("testEmptyStoreIteratorIsEmpty", Consistency.EVENTUAL, true); + CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testEmptyStoreIteratorIsEmpty", clientEntity, mock(ServerCallback.class)); + + Iterator> iterator = serverStoreProxy.iterator(); + + assertThat(iterator.hasNext(), is(false)); + try { + iterator.next(); + fail("Expected NoSuchElementException"); + } catch (NoSuchElementException e) { + //expected + } + } + + @Test + public void testSingleChainIterator() throws Exception { + ClusterTierClientEntity clientEntity = createClientEntity("testSingleChainIterator", Consistency.EVENTUAL, true); + CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testSingleChainIterator", clientEntity, mock(ServerCallback.class)); + + serverStoreProxy.append(1L, createPayload(42L)); + + Iterator> iterator = serverStoreProxy.iterator(); + + assertThat(iterator.hasNext(), is(true)); + Map.Entry next = iterator.next(); + assertThat(next.getKey(), is(1L)); + assertThat(next.getValue(), hasPayloads(42L)); + assertThat(iterator.hasNext(), is(false)); + try { + iterator.next(); + fail("Expected NoSuchElementException"); + } catch (NoSuchElementException e) { + //expected + } + } + + @Test + public void testSingleChainMultipleElements() throws Exception { + ClusterTierClientEntity clientEntity = createClientEntity("testSingleChainMultipleElements", Consistency.EVENTUAL, true); + CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testSingleChainMultipleElements", clientEntity, mock(ServerCallback.class)); + + serverStoreProxy.append(1L, createPayload(42L)); + serverStoreProxy.append(1L, createPayload(43L)); + + Iterator> iterator = serverStoreProxy.iterator(); + + assertThat(iterator.hasNext(), is(true)); + Map.Entry next = iterator.next(); + assertThat(next.getKey(), is(1L)); + assertThat(next.getValue(), hasPayloads(42L, 43L)); + assertThat(iterator.hasNext(), CoreMatchers.is(false)); + try { + iterator.next(); + fail("Expected NoSuchElementException"); + } catch (NoSuchElementException e) { + //expected + } + } + + @Test + public void testMultipleChains() throws Exception { + ClusterTierClientEntity clientEntity = createClientEntity("testMultipleChains", Consistency.EVENTUAL, true); + CommonServerStoreProxy serverStoreProxy = new CommonServerStoreProxy("testMultipleChains", clientEntity, mock(ServerCallback.class)); + + serverStoreProxy.append(1L, createPayload(42L)); + serverStoreProxy.append(2L, createPayload(43L)); + + Iterator> iterator = serverStoreProxy.iterator(); + + Matcher chainOne = hasPayloads(42L); + Matcher chainTwo = hasPayloads(43L); + + assertThat(iterator.hasNext(), CoreMatchers.is(true)); + + Chain next = iterator.next().getValue(); + assertThat(next, either(chainOne).or(chainTwo)); + + if (chainOne.matches(next)) { + assertThat(iterator.hasNext(), is(true)); + assertThat(iterator.next().getValue(), is(chainTwo)); + assertThat(iterator.hasNext(), is(false)); + } else { + assertThat(iterator.hasNext(), is(true)); + assertThat(iterator.next().getValue(), is(chainOne)); + assertThat(iterator.hasNext(), is(false)); + } + + try { + iterator.next(); + fail("Expected NoSuchElementException"); + } catch (NoSuchElementException e) { + //expected + } + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java new file mode 100644 index 0000000000..776fe2c93a --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java @@ -0,0 +1,255 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.Matchers; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.server.store.ObservableClusterTierServerEntityService.ObservableClusterTierActiveEntity; +import org.junit.Test; + +import java.nio.ByteBuffer; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.mock; + +public class EventualServerStoreProxyTest extends AbstractServerStoreProxyTest { + + @Test + public void testServerSideEvictionFiresInvalidations() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testServerSideEvictionFiresInvalidations", Consistency.EVENTUAL, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testServerSideEvictionFiresInvalidations", Consistency.EVENTUAL, false); + + final List store1InvalidatedHashes = new CopyOnWriteArrayList<>(); + final AtomicBoolean store1InvalidatedAll = new AtomicBoolean(); + final List store2InvalidatedHashes = new CopyOnWriteArrayList<>(); + final AtomicBoolean store2InvalidatedAll = new AtomicBoolean(); + + EventualServerStoreProxy serverStoreProxy1 = new EventualServerStoreProxy("testServerSideEvictionFiresInvalidations", clientEntity1, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + store1InvalidatedHashes.add(hash); + } + + @Override + public void onInvalidateAll() { + store1InvalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + EventualServerStoreProxy serverStoreProxy2 = new EventualServerStoreProxy("testServerSideEvictionFiresInvalidations", clientEntity2, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + store2InvalidatedHashes.add(hash); + } + + @Override + public void onInvalidateAll() { + store2InvalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + } + }); + + final int ITERATIONS = 40; + for (int i = 0; i < ITERATIONS; i++) { + serverStoreProxy1.append(i, createPayload(i, 512 * 1024)); + } + + int evictionCount = 0; + int entryCount = 0; + for (int i = 0; i < ITERATIONS; i++) { + Chain elements1 = serverStoreProxy1.get(i); + Chain elements2 = serverStoreProxy2.get(i); + assertThat(elements1, Matchers.matchesChain(elements2)); + if (!elements1.isEmpty()) { + entryCount++; + } else { + evictionCount++; + } + } + + // there has to be server-side evictions, otherwise this test is useless + assertThat(store1InvalidatedHashes.size(), greaterThan(0)); + // test that each time the server evicted, the originating client got notified + assertThat(store1InvalidatedHashes.size(), is(ITERATIONS - entryCount)); + // test that each time the server evicted, the other client got notified on top of normal invalidations + assertThat(store2InvalidatedHashes.size(), is(ITERATIONS + evictionCount)); + + assertThatClientsWaitingForInvalidationIsEmpty("testServerSideEvictionFiresInvalidations"); + + assertThat(store1InvalidatedAll.get(), is(false)); + assertThat(store2InvalidatedAll.get(), is(false)); + } + + @Test + public void testHashInvalidationListenerWithAppend() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testHashInvalidationListenerWithAppend", Consistency.EVENTUAL, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testHashInvalidationListenerWithAppend", Consistency.EVENTUAL, false); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference invalidatedHash = new AtomicReference<>(); + final AtomicBoolean invalidatedAll = new AtomicBoolean(); + + + EventualServerStoreProxy serverStoreProxy1 = new EventualServerStoreProxy("testHashInvalidationListenerWithAppend", clientEntity1, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + invalidatedHash.set(hash); + latch.countDown(); + } + + @Override + public void onInvalidateAll() { + invalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + EventualServerStoreProxy serverStoreProxy2 = new EventualServerStoreProxy("testServerSideEvictionFiresInvalidations", clientEntity2, mock(ServerCallback.class)); + + serverStoreProxy2.append(1L, createPayload(1L)); + + latch.await(5, TimeUnit.SECONDS); + assertThat(invalidatedHash.get(), is(1L)); + assertThatClientsWaitingForInvalidationIsEmpty("testHashInvalidationListenerWithAppend"); + assertThat(invalidatedAll.get(), is(false)); + } + + @Test + public void testHashInvalidationListenerWithGetAndAppend() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testHashInvalidationListenerWithGetAndAppend", Consistency.EVENTUAL, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testHashInvalidationListenerWithGetAndAppend", Consistency.EVENTUAL, false); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference invalidatedHash = new AtomicReference<>(); + final AtomicBoolean invalidatedAll = new AtomicBoolean(); + + + EventualServerStoreProxy serverStoreProxy1 = new EventualServerStoreProxy("testHashInvalidationListenerWithGetAndAppend", clientEntity1, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + invalidatedHash.set(hash); + latch.countDown(); + } + + @Override + public void onInvalidateAll() { + invalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + EventualServerStoreProxy serverStoreProxy2 = new EventualServerStoreProxy("testHashInvalidationListenerWithGetAndAppend", clientEntity2, mock(ServerCallback.class)); + + serverStoreProxy2.getAndAppend(1L, createPayload(1L)); + + latch.await(5, TimeUnit.SECONDS); + assertThat(invalidatedHash.get(), is(1L)); + assertThatClientsWaitingForInvalidationIsEmpty("testHashInvalidationListenerWithGetAndAppend"); + assertThat(invalidatedAll.get(), is(false)); + } + + @Test + public void testAllInvalidationListener() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testAllInvalidationListener", Consistency.EVENTUAL, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testAllInvalidationListener", Consistency.EVENTUAL, false); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicBoolean invalidatedAll = new AtomicBoolean(); + + EventualServerStoreProxy serverStoreProxy1 = new EventualServerStoreProxy("testAllInvalidationListener", clientEntity1, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + throw new AssertionError("Should not be called"); + } + + @Override + public void onInvalidateAll() { + invalidatedAll.set(true); + latch.countDown(); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + EventualServerStoreProxy serverStoreProxy2 = new EventualServerStoreProxy("testAllInvalidationListener", clientEntity2, mock(ServerCallback.class)); + + serverStoreProxy2.clear(); + + latch.await(5, TimeUnit.SECONDS); + assertThat(invalidatedAll.get(), is(true)); + assertThatClientsWaitingForInvalidationIsEmpty("testAllInvalidationListener"); + } + + private static void assertThatClientsWaitingForInvalidationIsEmpty(String name) throws Exception { + ObservableClusterTierActiveEntity activeEntity = observableClusterTierService.getServedActiveEntitiesFor(name).get(0); + long now = System.currentTimeMillis(); + while (System.currentTimeMillis() < now + 5000 && activeEntity.getClientsWaitingForInvalidation().size() != 0); + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/MultiThreadedStrongServerStoreProxyTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/MultiThreadedStrongServerStoreProxyTest.java new file mode 100644 index 0000000000..344567685c --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/MultiThreadedStrongServerStoreProxyTest.java @@ -0,0 +1,111 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.client.config.ClusteredResourcePool; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.impl.serialization.LongSerializer; +import org.junit.Assert; +import org.junit.Test; + +import java.nio.ByteBuffer; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; + +public class MultiThreadedStrongServerStoreProxyTest extends AbstractServerStoreProxyTest { + + private static final String ENTITY_NAME = "testConcurrentHashInvalidationWithAppend"; + private static final int MAX_WAIT_TIME_SECONDS = 30; + + private static ServerStoreConfiguration getServerStoreConfiguration() throws Exception { + ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(4L, MemoryUnit.MB); + + return new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), + Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class + .getName(), Consistency.STRONG, false); + } + + @Test + public void testConcurrentHashInvalidationListenerWithAppend() throws Exception { + final AtomicReference invalidatedHash = new AtomicReference<>(); + SimpleClusterTierClientEntity clientEntity1 = createClientEntity(ENTITY_NAME, getServerStoreConfiguration(), true, true); + StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy(ENTITY_NAME, clientEntity1, mock(ServerCallback.class)); + AtomicBoolean invalidatedAll = new AtomicBoolean(); + + ExecutorService executor = Executors.newSingleThreadExecutor(); + CountDownLatch beforeValidationLatch = new CountDownLatch(1); + CountDownLatch afterValidationLatch = new CountDownLatch(1); + executor.submit(() -> { + try { + SimpleClusterTierClientEntity clientEntity2 = createClientEntity(ENTITY_NAME, getServerStoreConfiguration(), false, false); + StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy(ENTITY_NAME, clientEntity2, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + invalidatedHash.set(hash); + } + + @Override + public void onInvalidateAll() { + invalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + throw new AssertionError("Should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + // avoid a warning + assertNotNull(serverStoreProxy2); + assertTrue(beforeValidationLatch.await(MAX_WAIT_TIME_SECONDS, TimeUnit.SECONDS)); + clientEntity2.validate(getServerStoreConfiguration()); + afterValidationLatch.countDown(); + } catch (Exception e) { + Assert.fail("Unexpected Exception " + e.getMessage()); + } + }); + + serverStoreProxy1.append(1L, createPayload(1L)); + assertNull(invalidatedHash.get()); + beforeValidationLatch.countDown(); + assertTrue(afterValidationLatch.await(MAX_WAIT_TIME_SECONDS, TimeUnit.SECONDS)); + serverStoreProxy1.append(1L, createPayload(1L)); + assertThat(invalidatedHash.get(), is(1L)); + assertThat(invalidatedAll.get(), is(false)); + executor.shutdownNow(); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ReconnectingServerStoreProxyTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ReconnectingServerStoreProxyTest.java new file mode 100644 index 0000000000..7ab175942c --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/ReconnectingServerStoreProxyTest.java @@ -0,0 +1,77 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store; + +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.terracotta.exception.ConnectionClosedException; + +import java.nio.ByteBuffer; + +import static org.junit.Assert.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.doThrow; + +public class ReconnectingServerStoreProxyTest { + + @Rule + public MockitoRule mockitoRule = MockitoJUnit.rule(); + + @Mock + ServerStoreProxy proxy; + + @Mock + Runnable runnable; + + private final ServerStoreProxyException storeProxyException = new ServerStoreProxyException(new ConnectionClosedException("Connection Closed")); + + @InjectMocks + ReconnectingServerStoreProxy serverStoreProxy; + + @Test + public void testAppend() throws Exception { + doThrow(storeProxyException).when(proxy).append(anyLong(), any(ByteBuffer.class)); + + assertThrows(ReconnectInProgressException.class, () -> serverStoreProxy.append(0, ByteBuffer.allocate(2))); + } + + @Test + public void testGetAndAppend() throws Exception { + doThrow(storeProxyException).when(proxy).getAndAppend(anyLong(), any(ByteBuffer.class)); + + assertThrows(ReconnectInProgressException.class, () -> serverStoreProxy.getAndAppend(0, ByteBuffer.allocate(2))); + } + + @Test + public void testGet() throws Exception { + + doThrow(storeProxyException).when(proxy).get(anyLong()); + + assertThrows(ReconnectInProgressException.class, () -> serverStoreProxy.get(0)); + } + + @Test + public void testIterator() throws Exception { + doThrow(storeProxyException).when(proxy).iterator(); + + assertThrows(ReconnectInProgressException.class, () -> serverStoreProxy.iterator()); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java new file mode 100644 index 0000000000..feb10da5b5 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java @@ -0,0 +1,466 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.client.config.Timeouts; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.store.Chain; +import org.junit.Test; +import org.terracotta.exception.ConnectionClosedException; + +import java.nio.ByteBuffer; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.ehcache.clustered.Matchers.matchesChain; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsInstanceOf.instanceOf; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class StrongServerStoreProxyTest extends AbstractServerStoreProxyTest { + + @Test + public void testServerSideEvictionFiresInvalidations() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testServerSideEvictionFiresInvalidations", Consistency.STRONG, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testServerSideEvictionFiresInvalidations", Consistency.STRONG, false); + + final List store1InvalidatedHashes = new CopyOnWriteArrayList<>(); + final AtomicBoolean store1InvalidatedAll = new AtomicBoolean(); + final List store2InvalidatedHashes = new CopyOnWriteArrayList<>(); + final AtomicBoolean store2InvalidatedAll = new AtomicBoolean(); + + StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testServerSideEvictionFiresInvalidations", clientEntity1, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + store1InvalidatedHashes.add(hash); + } + + @Override + public void onInvalidateAll() { + store1InvalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testServerSideEvictionFiresInvalidations", clientEntity2, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + store2InvalidatedHashes.add(hash); + } + + @Override + public void onInvalidateAll() { + store2InvalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + } + }); + + final int ITERATIONS = 40; + for (int i = 0; i < ITERATIONS; i++) { + serverStoreProxy1.append(i, createPayload(i, 512 * 1024)); + } + + int evictionCount = 0; + int entryCount = 0; + for (int i = 0; i < ITERATIONS; i++) { + Chain elements1 = serverStoreProxy1.get(i); + Chain elements2 = serverStoreProxy2.get(i); + assertThat(elements2, matchesChain(elements2)); + if (!elements1.isEmpty()) { + entryCount++; + } else { + evictionCount++; + } + } + + // there has to be server-side evictions, otherwise this test is useless + assertThat(store1InvalidatedHashes.size(), greaterThan(0)); + // test that each time the server evicted, the originating client got notified + assertThat(store1InvalidatedHashes.size(), is(ITERATIONS - entryCount)); + // test that each time the server evicted, the other client got notified on top of normal invalidations + assertThat(store2InvalidatedHashes.size(), is(ITERATIONS + evictionCount)); + assertThat(store1InvalidatedAll.get(), is(false)); + assertThat(store2InvalidatedAll.get(), is(false)); + } + + @Test + public void testHashInvalidationListenerWithAppend() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testHashInvalidationListenerWithAppend", Consistency.STRONG, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testHashInvalidationListenerWithAppend", Consistency.STRONG, false); + + final AtomicReference invalidatedHash = new AtomicReference<>(); + final AtomicBoolean invalidatedAll = new AtomicBoolean(); + + StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testHashInvalidationListenerWithAppend", clientEntity1, mock(ServerCallback.class)); + StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testHashInvalidationListenerWithAppend", clientEntity2, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + invalidatedHash.set(hash); + } + + @Override + public void onInvalidateAll() { + invalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + + serverStoreProxy1.append(1L, createPayload(1L)); + + assertThat(invalidatedHash.get(), is(1L)); + assertThat(invalidatedAll.get(), is(false)); + } + + @Test + public void testConcurrentHashInvalidationListenerWithAppend() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testConcurrentHashInvalidationListenerWithAppend", Consistency.STRONG, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testConcurrentHashInvalidationListenerWithAppend", Consistency.STRONG, false); + + final AtomicBoolean invalidating = new AtomicBoolean(); + final AtomicBoolean invalidatedAll = new AtomicBoolean(); + final CountDownLatch latch = new CountDownLatch(2); + StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testConcurrentHashInvalidationListenerWithAppend", clientEntity1, mock(ServerCallback.class)); + StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testConcurrentHashInvalidationListenerWithAppend", clientEntity2, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + if (!invalidating.compareAndSet(false, true)) { + fail("Both threads entered the listener concurrently"); + } + try { + Thread.sleep(100); + } catch (InterruptedException ie) { + throw new AssertionError(ie); + } + invalidating.set(false); + latch.countDown(); + } + + @Override + public void onInvalidateAll() { + invalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + + ExecutorService executor = Executors.newCachedThreadPool(); + try { + executor.submit(() -> { + serverStoreProxy1.append(1L, createPayload(1L)); + return null; + }); + executor.submit(() -> { + serverStoreProxy1.append(1L, createPayload(1L)); + return null; + }); + + if (!latch.await(5, TimeUnit.SECONDS)) { + fail("Both listeners were not called"); + } + } finally { + executor.shutdown(); + } + assertThat(invalidatedAll.get(), is(false)); + } + + @Test + public void testHashInvalidationListenerWithGetAndAppend() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testHashInvalidationListenerWithGetAndAppend", Consistency.STRONG, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testHashInvalidationListenerWithGetAndAppend", Consistency.STRONG, false); + + final AtomicReference invalidatedHash = new AtomicReference<>(); + final AtomicBoolean invalidatedAll = new AtomicBoolean(); + StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testHashInvalidationListenerWithGetAndAppend", clientEntity1, mock(ServerCallback.class)); + StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testHashInvalidationListenerWithGetAndAppend", clientEntity2, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + invalidatedHash.set(hash); + } + + @Override + public void onInvalidateAll() { + invalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + + serverStoreProxy1.getAndAppend(1L, createPayload(1L)); + + assertThat(invalidatedHash.get(), is(1L)); + assertThat(invalidatedAll.get(), is(false)); + } + + @Test + public void testAllInvalidationListener() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testAllInvalidationListener", Consistency.STRONG, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testAllInvalidationListener", Consistency.STRONG, false); + + final AtomicBoolean invalidatedAll = new AtomicBoolean(); + + StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testAllInvalidationListener", clientEntity1, mock(ServerCallback.class)); + StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testAllInvalidationListener", clientEntity2, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + throw new AssertionError("Should not be called"); + } + + @Override + public void onInvalidateAll() { + invalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + + serverStoreProxy1.clear(); + + assertThat(invalidatedAll.get(), is(true)); + } + + @Test + public void testConcurrentAllInvalidationListener() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testConcurrentAllInvalidationListener", Consistency.STRONG, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testConcurrentAllInvalidationListener", Consistency.STRONG, false); + + final AtomicBoolean invalidating = new AtomicBoolean(); + final CountDownLatch latch = new CountDownLatch(2); + + StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testConcurrentAllInvalidationListener", clientEntity1, mock(ServerCallback.class)); + StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testConcurrentAllInvalidationListener", clientEntity2, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + throw new AssertionError("Should not be called"); + } + + @Override + public void onInvalidateAll() { + if (!invalidating.compareAndSet(false, true)) { + fail("Both threads entered the listener concurrently"); + } + try { + Thread.sleep(100); + } catch (InterruptedException ie) { + throw new AssertionError(ie); + } + invalidating.set(false); + latch.countDown(); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + + ExecutorService executor = Executors.newCachedThreadPool(); + try { + executor.submit(() -> { + serverStoreProxy1.clear(); + return null; + }); + executor.submit(() -> { + serverStoreProxy1.clear(); + return null; + }); + + if (!latch.await(5, TimeUnit.SECONDS)) { + fail("Both listeners were not called"); + } + } finally { + executor.shutdown(); + } + } + + @Test + public void testAppendInvalidationUnblockedByDisconnection() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testAppendInvalidationUnblockedByDisconnection", Consistency.STRONG, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testAppendInvalidationUnblockedByDisconnection", Consistency.STRONG, false); + + final AtomicBoolean invalidatedAll = new AtomicBoolean(); + + StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testAppendInvalidationUnblockedByDisconnection", clientEntity1, mock(ServerCallback.class)); + StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testAppendInvalidationUnblockedByDisconnection", clientEntity2, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + clientEntity1.fireDisconnectionEvent(); + } + + @Override + public void onInvalidateAll() { + invalidatedAll.set(true); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + + try { + serverStoreProxy1.append(1L, createPayload(1L)); + fail("expected RuntimeException"); + } catch (RuntimeException re) { + assertThat(re.getCause(), instanceOf(IllegalStateException.class)); + } + assertThat(invalidatedAll.get(), is(false)); + } + + @Test + public void testClearInvalidationUnblockedByDisconnection() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testClearInvalidationUnblockedByDisconnection", Consistency.STRONG, true); + SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testClearInvalidationUnblockedByDisconnection", Consistency.STRONG, false); + + StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testClearInvalidationUnblockedByDisconnection", clientEntity1, mock(ServerCallback.class)); + StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testClearInvalidationUnblockedByDisconnection", clientEntity2, new ServerCallback() { + @Override + public void onInvalidateHash(long hash, Chain evictedChain) { + throw new AssertionError("Should not be called"); + } + + @Override + public void onInvalidateAll() { + clientEntity1.fireDisconnectionEvent(); + } + + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + fail("should not be called"); + } + + @Override + public void compact(ServerStoreProxy.ChainEntry chain) { + throw new AssertionError(); + } + }); + + try { + serverStoreProxy1.clear(); + fail("expected RuntimeException"); + } catch (RuntimeException re) { + assertThat(re.getCause(), instanceOf(IllegalStateException.class)); + } + } + + @Test + public void testAppendThrowsConnectionClosedExceptionDuringHashInvalidation() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = mock(SimpleClusterTierClientEntity.class); + StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testAppendThrowsConnectionClosedExceptionDuringHashInvalidation", clientEntity1, mock(ServerCallback.class)); + doThrow(new ConnectionClosedException("Test")).when(clientEntity1).invokeAndWaitForReceive(any(), anyBoolean()); + when(clientEntity1.getTimeouts()).thenReturn(Timeouts.DEFAULT); + when(clientEntity1.isConnected()).thenReturn(true); + try { + serverStoreProxy1.append(1L, createPayload(1L)); + fail("Expected ServerStoreProxyException"); + } catch (ServerStoreProxyException e) { + assertThat(e.getCause(), instanceOf(ConnectionClosedException.class)); + } catch (RuntimeException e) { + fail("Expected ServerStoreProxyException"); + } + } + + @Test + public void testClearThrowsConnectionClosedExceptionDuringAllInvaildation() throws Exception { + SimpleClusterTierClientEntity clientEntity1 = mock(SimpleClusterTierClientEntity.class); + StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testClearThrowsConnectionClosedExceptionDuringAllInvaildation", clientEntity1, mock(ServerCallback.class)); + doThrow(new ConnectionClosedException("Test")).when(clientEntity1).invokeAndWaitForRetired(any(), anyBoolean()); + when(clientEntity1.getTimeouts()).thenReturn(Timeouts.DEFAULT); + when(clientEntity1.isConnected()).thenReturn(true); + try { + serverStoreProxy1.clear(); + fail("Expected ServerStoreProxyException"); + } catch (ServerStoreProxyException e) { + assertThat(e.getCause(), instanceOf(ConnectionClosedException.class)); + } catch (RuntimeException e) { + fail("Expected ServerStoreProxyException"); + } + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/lock/LockManagerTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/lock/LockManagerTest.java new file mode 100644 index 0000000000..de51c9f1e3 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/lock/LockManagerTest.java @@ -0,0 +1,139 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store.lock; + +import org.ehcache.clustered.client.internal.store.ClusterTierClientEntity; +import org.ehcache.clustered.client.internal.store.ServerStoreProxyException; +import org.ehcache.clustered.common.internal.exceptions.UnknownClusterException; +import org.ehcache.clustered.common.internal.messages.ClusterTierReconnectMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.LockSuccess; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.LockMessage; +import org.ehcache.clustered.common.internal.store.Chain; +import org.junit.Test; +import org.mockito.ArgumentCaptor; + +import java.nio.ByteBuffer; +import java.util.Set; +import java.util.concurrent.TimeoutException; + +import static org.ehcache.clustered.ChainUtils.chainOf; +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.lockFailure; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.*; + +public class LockManagerTest { + + @Test + public void testLock() throws Exception { + ClusterTierClientEntity clusterTierClientEntity = mock(ClusterTierClientEntity.class); + + LockManager lockManager = new LockManager(clusterTierClientEntity); + + LockSuccess lockSuccess = getLockSuccessResponse(); + + when(clusterTierClientEntity.invokeAndWaitForComplete(any(LockMessage.class), anyBoolean())) + .thenReturn(lockSuccess); + + Chain lock = lockManager.lock(2L); + + assertThat(lock, notNullValue()); + assertThat(lock.length(), is(3)); + + } + + @Test + public void testLockWhenException() throws Exception { + ClusterTierClientEntity clusterTierClientEntity = mock(ClusterTierClientEntity.class); + + LockManager lockManager = new LockManager(clusterTierClientEntity); + + when(clusterTierClientEntity.invokeAndWaitForComplete(any(LockMessage.class), anyBoolean())) + .thenThrow(new UnknownClusterException(""), new TimeoutException("timed out test")); + + try { + lockManager.lock(2L); + fail(); + } catch (ServerStoreProxyException sspe) { + assertThat(sspe.getCause(), instanceOf(UnknownClusterException.class)); + } + + try { + lockManager.lock(2L); + fail(); + } catch (TimeoutException e) { + assertThat(e.getMessage(), is("timed out test")); + } + + } + + @Test + public void testLockWhenFailure() throws Exception { + ClusterTierClientEntity clusterTierClientEntity = mock(ClusterTierClientEntity.class); + + LockManager lockManager = new LockManager(clusterTierClientEntity); + + LockSuccess lockSuccess = getLockSuccessResponse(); + + when(clusterTierClientEntity.invokeAndWaitForComplete(any(LockMessage.class), anyBoolean())) + .thenReturn(lockFailure(), lockFailure(), lockFailure(), lockSuccess); + + Chain lock = lockManager.lock(2L); + + assertThat(lock, notNullValue()); + assertThat(lock.length(), is(3)); + } + + @SuppressWarnings("unchecked") + @Test + public void testUnlockClearsLocksHeldState() throws Exception { + ClusterTierClientEntity clusterTierClientEntity = mock(ClusterTierClientEntity.class); + LockManager lockManager = new LockManager(clusterTierClientEntity); + + LockSuccess lockSuccess = getLockSuccessResponse(); + when(clusterTierClientEntity.invokeAndWaitForComplete(any(LockMessage.class), anyBoolean())) + .thenReturn(lockSuccess); + + Chain lock = lockManager.lock(2L); + lockManager.unlock(2L, false); + + ClusterTierReconnectMessage reconnectMessage = mock(ClusterTierReconnectMessage.class); + ArgumentCaptor> locks = ArgumentCaptor.forClass(Set.class); + doNothing().when(reconnectMessage).addLocksHeld(locks.capture()); + lockManager.reconnectListener(reconnectMessage); + assertThat(locks.getValue().size(), is(0)); + + } + + private LockSuccess getLockSuccessResponse() { + ByteBuffer[] buffers = new ByteBuffer[3]; + for (int i = 0; i < 3; i++) { + buffers[i] = createPayload(i + 1); + } + + Chain chain = chainOf(buffers); + + return EhcacheEntityResponse.lockSuccess(chain); + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/lock/LockRetentionDuringFailoverTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/lock/LockRetentionDuringFailoverTest.java new file mode 100644 index 0000000000..c443c8776f --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/internal/store/lock/LockRetentionDuringFailoverTest.java @@ -0,0 +1,159 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.client.internal.store.lock; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityService; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; +import org.ehcache.clustered.client.internal.store.ClusterTierClientEntityService; +import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; +import org.ehcache.clustered.server.ObservableEhcacheServerEntityService; +import org.ehcache.clustered.server.store.ObservableClusterTierServerEntityService; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.terracotta.offheapresource.OffHeapResourcesProvider; +import org.terracotta.offheapresource.config.MemoryUnit; +import org.terracotta.passthrough.PassthroughClusterControl; +import org.terracotta.passthrough.PassthroughTestHelpers; + +import java.net.URI; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +public class LockRetentionDuringFailoverTest { + + private static final String STRIPENAME = "stripe"; + private static final String STRIPE_URI = "passthrough://" + STRIPENAME; + + private PassthroughClusterControl clusterControl; + + private CountDownLatch latch; + private LatchedLoaderWriter loaderWriter; + private CacheManager cacheManager; + private Cache cache; + + @Before + public void setUp() throws Exception { + this.clusterControl = PassthroughTestHelpers.createActivePassive(STRIPENAME, + server -> { + server.registerServerEntityService(new ObservableEhcacheServerEntityService()); + server.registerClientEntityService(new ClusterTierManagerClientEntityService()); + server.registerServerEntityService(new ObservableClusterTierServerEntityService()); + server.registerClientEntityService(new ClusterTierClientEntityService()); + server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); + server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); + server.registerExtendedConfiguration(new OffHeapResourcesProvider(getOffheapResourcesType("test", 32, MemoryUnit.MB))); + + UnitTestConnectionService.addServerToStripe(STRIPENAME, server); + } + ); + + clusterControl.waitForActive(); + clusterControl.waitForRunningPassivesInStandby(); + + this.latch = new CountDownLatch(1); + this.loaderWriter = new LatchedLoaderWriter(latch); + + CacheConfiguration config = CacheConfigurationBuilder + .newCacheConfigurationBuilder(Long.class, String.class, + newResourcePoolsBuilder() + .with(clusteredDedicated("test", 2, org.ehcache.config.units.MemoryUnit.MB))) + .withLoaderWriter(loaderWriter) + .build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder().with(cluster(URI.create(STRIPE_URI)).autoCreate(c -> c)) + .withCache("cache-1", config) + .build(true); + + cache = cacheManager.getCache("cache-1", Long.class, String.class); + + } + + @After + public void tearDown() throws Exception { + try { + cacheManager.close(); + } finally { + UnitTestConnectionService.removeStripe(STRIPENAME); + clusterControl.tearDown(); + } + } + + @Test + public void testLockRetentionDuringFailover() throws Exception { + + ExecutorService executorService = Executors.newFixedThreadPool(1); + Future putFuture = executorService.submit(() -> cache.put(1L, "one")); + + clusterControl.terminateActive(); + clusterControl.waitForActive(); + + assertThat(loaderWriter.backingMap.isEmpty(), is(true)); + + latch.countDown(); + + putFuture.get(); + + assertThat(loaderWriter.backingMap.get(1L), is("one")); + + } + + private static class LatchedLoaderWriter implements CacheLoaderWriter { + + ConcurrentHashMap backingMap = new ConcurrentHashMap<>(); + private final CountDownLatch latch; + + LatchedLoaderWriter(CountDownLatch latch) { + this.latch = latch; + } + + @Override + public String load(Long key) throws Exception { + latch.await(); + return backingMap.get(key); + } + + @Override + public void write(Long key, String value) throws Exception { + latch.await(); + backingMap.put(key, value); + } + + @Override + public void delete(Long key) throws Exception { + latch.await(); + backingMap.remove(key); + } + } + +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java similarity index 89% rename from clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java index 20b4d1249d..4094626ddd 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java @@ -29,13 +29,12 @@ import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; -import org.ehcache.clustered.common.internal.store.Element; import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; import org.ehcache.clustered.server.ObservableEhcacheServerEntityService; import org.ehcache.clustered.server.store.ObservableClusterTierServerEntityService; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.core.store.StoreConfigurationImpl; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -46,18 +45,16 @@ import org.terracotta.passthrough.PassthroughTestHelpers; import java.net.URI; -import java.util.ArrayList; -import java.util.List; import java.util.Map; import java.util.function.Predicate; +import java.util.stream.Collectors; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; +import static org.ehcache.clustered.ChainUtils.chainOf; +import static org.ehcache.clustered.ChainUtils.createPayload; import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; -import static org.ehcache.clustered.common.internal.store.Util.createPayload; -import static org.ehcache.clustered.common.internal.store.Util.getChain; -import static org.ehcache.clustered.common.internal.store.Util.getElement; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.mockito.Mockito.mock; @@ -105,7 +102,7 @@ public void setUp() throws Exception { ClusteringServiceConfiguration configuration = ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) - .autoCreate() + .autoCreate(c -> c) .build(); service = new ClusteringServiceFactory().create(configuration); @@ -142,8 +139,7 @@ public void messageTrackedAndRemovedWhenClientLeaves() throws Exception { storeProxy.getAndAppend(42L, createPayload(42L)); - Map responses = activeMessageHandler.getTrackedResponsesForSegment(KEY_ENDS_UP_IN_SEGMENT_11, activeMessageHandler.getTrackedClients().findFirst().get()); - assertThat(responses).hasSize(1); // should now track one message + assertThat(activeMessageHandler.getRecordedMessages().collect(Collectors.toList())).hasSize(1); // should now track one message assertThat(activeEntity.getConnectedClients()).hasSize(1); // make sure we currently have one client attached @@ -161,11 +157,8 @@ public void untrackedMessageAreNotStored() throws Exception { // Nothing tracked assertThat(activeMessageHandler.getTrackedClients().count()).isZero(); - List elements = new ArrayList<>(1); - elements.add(getElement(createPayload(44L))); - // Send a replace message, those are not tracked - storeProxy.replaceAtHead(44L, getChain(elements), getChain(new ArrayList<>(0))); + storeProxy.replaceAtHead(44L, chainOf(createPayload(44L)), chainOf()); // Not tracked as well storeProxy.get(42L); @@ -188,7 +181,8 @@ public void trackedMessagesReplicatedToPassive() throws Exception { assertThat(passiveMessageHandler.getTrackedClients().count()).isEqualTo(1L); // one client tracked - Map responses = passiveMessageHandler.getTrackedResponsesForSegment(KEY_ENDS_UP_IN_SEGMENT_11, passiveMessageHandler.getTrackedClients().findFirst().get()); + Map responses = activeMessageHandler.getRecordedMessages().filter(r->r.getClientSourceId().toLong() == activeMessageHandler.getTrackedClients().findFirst().get().toLong()) + .collect(Collectors.toMap(r->r.getTransactionId(), r->r.getResponse())); assertThat(responses).hasSize(1); // one message should have sync } @@ -198,7 +192,8 @@ public void messageTrackedAndRemovedByPassiveWhenClientLeaves() throws Exception storeProxy.getAndAppend(42L, createPayload(42L)); - Map responses = passiveMessageHandler.getTrackedResponsesForSegment(KEY_ENDS_UP_IN_SEGMENT_11, passiveMessageHandler.getTrackedClients().findFirst().get()); + Map responses = activeMessageHandler.getRecordedMessages().filter(r->r.getClientSourceId().toLong() == activeMessageHandler.getTrackedClients().findFirst().get().toLong()) + .collect(Collectors.toMap(r->r.getTransactionId(), r->r.getResponse())); assertThat(responses).hasSize(1); // should now track one message service.stop(); // stop the service. It will remove the client diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java similarity index 98% rename from clustered/client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java index 99ed6e65ab..c7f6ab26d0 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java @@ -52,6 +52,6 @@ public static ServerStoreConfiguration getServerStoreConfiguration(String resour ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(resourceName, 8, MB); return new ServerStoreConfiguration(resourcePool.getPoolAllocation(), String.class.getName(), String.class.getName(), CompactJavaSerializer.class.getName(), CompactJavaSerializer.class - .getName(), Consistency.STRONG); + .getName(), Consistency.STRONG, false); } } diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/AbstractChainResolverTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/AbstractChainResolverTest.java new file mode 100644 index 0000000000..b9f1d55306 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/AbstractChainResolverTest.java @@ -0,0 +1,791 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.store.operations; + +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; +import org.ehcache.clustered.common.internal.util.ChainBuilder; +import org.ehcache.clustered.client.internal.store.operations.ChainResolver; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.core.spi.store.Store; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.serialization.LongSerializer; +import org.ehcache.impl.serialization.StringSerializer; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; +import org.junit.Test; +import org.mockito.ArgumentMatcher; +import org.mockito.ArgumentMatchers; + +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import static java.util.Collections.emptyMap; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; +import static org.hamcrest.collection.IsIterableContainingInOrder.contains; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; + +public abstract class AbstractChainResolverTest { + + private static OperationsCodec codec = new OperationsCodec<>(new LongSerializer(), new StringSerializer()); + + protected abstract ChainResolver createChainResolver(ExpiryPolicy expiryPolicy, OperationsCodec codec); + + @Test + @SuppressWarnings("unchecked") + public void testResolveMaintainsOtherKeysInOrder() { + PutOperation expected = new PutOperation<>(1L, "Suresh", 0L); + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new PutOperation<>(2L, "Albin", 0L), + expected, + new PutOperation<>(2L, "Suresh", 0L), + new PutOperation<>(2L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 0L); + assertThat(valueHolder.get(), is(expected.getValue())); + verify(chain).replaceAtHead(argThat(contains( + operation(new PutOperation<>(2L, "Albin", 0L)), + operation(new PutOperation<>(2L, "Suresh", 0L)), + operation(new PutOperation<>(2L, "Matthew", 0L)), + operation(new PutOperation<>(1L, "Suresh", 0L))))); + } + + @Test + public void testResolveEmptyChain() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations(); + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 0L); + assertThat(valueHolder, nullValue()); + verify(chain, never()).replaceAtHead(any()); + } + + @Test + public void testResolveChainWithNonExistentKey() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new PutOperation<>(2L, "Suresh", 0L), + new PutOperation<>(2L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 3L, 0L); + assertThat(valueHolder, nullValue()); + verify(chain, never()).replaceAtHead(any()); + } + + @Test + public void testResolveSinglePut() { + PutOperation expected = new PutOperation<>(1L, "Albin", 0L); + ServerStoreProxy.ChainEntry chain = getEntryFromOperations(expected); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 0L); + assertThat(valueHolder.get(), is(expected.getValue())); + verify(chain, never()).replaceAtHead(any()); + } + + @Test + public void testResolvePutsOnly() { + PutOperation expected = new PutOperation<>(1L, "Matthew", 0L); + + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new PutOperation<>(1L, "Suresh", 0L), + expected); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 0L); + assertThat(valueHolder.get(), is(expected.getValue())); + verify(chain).replaceAtHead(argThat(contains(operation(expected)))); + } + + @Test + public void testResolveSingleRemove() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations(new RemoveOperation<>(1L, 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 0L); + assertThat(valueHolder, nullValue()); + verify(chain).replaceAtHead(argThat(emptyIterable())); + } + + @Test + public void testResolveRemovesOnly() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new RemoveOperation<>(1L, 0L), + new RemoveOperation<>(1L, 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 0L); + assertThat(valueHolder, nullValue()); + verify(chain).replaceAtHead(argThat(emptyIterable())); + } + + @Test + public void testPutAndRemove() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new RemoveOperation<>(1L, 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 0L); + assertThat(valueHolder, nullValue()); + verify(chain).replaceAtHead(argThat(emptyIterable())); + } + + @Test + public void testResolvePutIfAbsentOnly() { + PutOperation expected = new PutOperation<>(1L, "Matthew", 0L); + ServerStoreProxy.ChainEntry chain = getEntryFromOperations(new PutIfAbsentOperation<>(1L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 0L); + assertThat(valueHolder.get(), is(expected.getValue())); + verify(chain, never()).replaceAtHead(any()); + } + + @Test + public void testResolvePutIfAbsentsOnly() { + PutOperation expected = new PutOperation<>(1L, "Albin", 0L); + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutIfAbsentOperation<>(1L, "Albin", 0L), + new PutIfAbsentOperation<>(1L, "Suresh", 0L), + new PutIfAbsentOperation<>(1L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 0L); + assertThat(valueHolder.get(), is(expected.getValue())); + verify(chain).replaceAtHead(argThat(contains(operation(expected)))); + } + + @Test + public void testResolvePutIfAbsentSucceeds() { + PutOperation expected = new PutOperation<>(1L, "Matthew", 0L); + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new RemoveOperation<>(1L, 0L), + new PutIfAbsentOperation<>(1L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 0L); + assertThat(valueHolder.get(), is(expected.getValue())); + verify(chain).replaceAtHead(argThat(contains(operation(expected)))); + } + + @Test + public void testResolveForSingleOperationDoesNotCompact() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations(new PutOperation<>(1L, "Albin", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 0L); + assertThat(valueHolder.get(), is("Albin")); + verify(chain, never()).replaceAtHead(any()); + } + + @Test + @SuppressWarnings("unchecked") + public void testResolveForMultiplesOperationsAlwaysCompact() { + //create a random mix of operations + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutIfAbsentOperation<>(1L, "Albin", 0L), + new PutOperation<>(1L, "Suresh", 0L), + new PutOperation<>(1L, "Matthew", 0L), + new PutOperation<>(2L, "Melvin", 0L), + new ReplaceOperation<>(1L, "Joseph", 0L), + new RemoveOperation<>(2L, 0L), + new ConditionalRemoveOperation<>(1L, "Albin", 0L), + new PutOperation<>(1L, "Gregory", 0L), + new ConditionalReplaceOperation<>(1L, "Albin", "Abraham", 0L), + new RemoveOperation<>(1L, 0L), + new PutIfAbsentOperation<>(2L, "Albin", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 0L); + assertThat(valueHolder, nullValue()); + verify(chain).replaceAtHead(argThat(contains( + operation(new PutOperation<>(2L, "Melvin", 0L)), + operation(new RemoveOperation<>(2L, 0L)), + operation(new PutIfAbsentOperation<>(2L, "Albin", 0L)) + ))); + } + + @Test + public void testResolveDoesNotDecodeOtherKeyOperationValues() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(2L, "Albin", 0L), + new PutOperation<>(2L, "Suresh", 0L), + new PutOperation<>(2L, "Matthew", 0L)); + + CountingLongSerializer keySerializer = new CountingLongSerializer(); + CountingStringSerializer valueSerializer = new CountingStringSerializer(); + OperationsCodec customCodec = new OperationsCodec<>(keySerializer, valueSerializer); + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration(), customCodec); + resolver.resolve(chain, 1L, 0L); + + assertThat(keySerializer.decodeCount, is(3)); + assertThat(valueSerializer.decodeCount, is(0)); + assertThat(keySerializer.encodeCount, is(0)); + assertThat(valueSerializer.encodeCount, is(0)); //No operation to resolve + } + + @Test + public void testResolveDecodesOperationValueOnlyOnDemand() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 1), + new PutOperation<>(1L, "Suresh", 2), + new PutOperation<>(1L, "Matthew", 3)); + + CountingLongSerializer keySerializer = new CountingLongSerializer(); + CountingStringSerializer valueSerializer = new CountingStringSerializer(); + OperationsCodec customCodec = new OperationsCodec<>(keySerializer, valueSerializer); + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration(), customCodec); + resolver.resolve(chain, 1L, 0L); + + assertThat(keySerializer.decodeCount, is(3)); + assertThat(valueSerializer.decodeCount, is(1)); + assertThat(valueSerializer.encodeCount, is(0)); + assertThat(keySerializer.encodeCount, is(1)); //One encode from encoding the resolved operation's key + } + + @Test + @SuppressWarnings("unchecked") + public void testCompactingTwoKeys() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new PutOperation<>(2L, "Albin", 0L), + new PutOperation<>(1L, "Suresh", 0L), + new PutOperation<>(2L, "Suresh", 0L), + new PutOperation<>(2L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + + resolver.compact(chain); + + verify(chain).replaceAtHead(argThat(containsInAnyOrder( //@SuppressWarnings("unchecked") + operation(new PutOperation<>(2L, "Matthew", 0L)), + operation(new PutOperation<>(1L, "Suresh", 0L)) + ))); + } + + @Test + public void testCompactEmptyChain() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations(); + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + resolver.compact(chain); + verify(chain, never()).replaceAtHead(any()); + } + + @Test + public void testCompactSinglePut() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L) + ); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + resolver.compact(chain); + + verify(chain, never()).replaceAtHead(any()); + } + + @Test + public void testCompactMultiplePuts() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new PutOperation<>(1L, "Suresh", 0L), + new PutOperation<>(1L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + resolver.compact(chain); + verify(chain).replaceAtHead(argThat(contains(operation(new PutOperation<>(1L, "Matthew", 0L))))); + } + + @Test + public void testCompactSingleRemove() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations(new RemoveOperation<>(1L, 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + resolver.compact(chain); + verify(chain).replaceAtHead(argThat(emptyIterable())); + } + + @Test + public void testCompactMultipleRemoves() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new RemoveOperation<>(1L, 0L), + new RemoveOperation<>(1L, 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + resolver.compact(chain); + verify(chain).replaceAtHead(argThat(emptyIterable())); + } + + @Test + public void testCompactPutAndRemove() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new RemoveOperation<>(1L, 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + resolver.compact(chain); + verify(chain).replaceAtHead(argThat(emptyIterable())); + } + + @Test + public void testCompactSinglePutIfAbsent() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations(new PutIfAbsentOperation<>(1L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + resolver.compact(chain); + verify(chain, never()).replaceAtHead(any()); + } + + @Test + public void testCompactMultiplePutIfAbsents() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutIfAbsentOperation<>(1L, "Albin", 0L), + new PutIfAbsentOperation<>(1L, "Suresh", 0L), + new PutIfAbsentOperation<>(1L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + resolver.compact(chain); + verify(chain).replaceAtHead(argThat(contains(operation(new PutOperation<>(1L, "Albin", 0L))))); + } + + @Test + public void testCompactPutIfAbsentAfterRemove() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new RemoveOperation<>(1L, 0L), + new PutIfAbsentOperation<>(1L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + resolver.compact(chain); + verify(chain).replaceAtHead(argThat(contains(operation(new PutOperation<>(1L, "Matthew", 0L))))); + } + + @Test + public void testCompactForMultipleKeysAndOperations() { + //create a random mix of operations + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutIfAbsentOperation<>(1L, "Albin", 0L), + new PutOperation<>(1L, "Suresh", 0L), + new PutOperation<>(1L, "Matthew", 0L), + new PutOperation<>(2L, "Melvin", 0L), + new ReplaceOperation<>(1L, "Joseph", 0L), + new RemoveOperation<>(2L, 0L), + new ConditionalRemoveOperation<>(1L, "Albin", 0L), + new PutOperation<>(1L, "Gregory", 0L), + new ConditionalReplaceOperation<>(1L, "Albin", "Abraham", 0L), + new RemoveOperation<>(1L, 0L), + new PutIfAbsentOperation<>(2L, "Albin", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + resolver.compact(chain); + verify(chain).replaceAtHead(argThat(contains(operation(new PutOperation<>(2L, "Albin", 0L))))); + } + + @Test + public void testCompactHasCorrectTimeStamp() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0), + new PutOperation<>(1L, "Albin", 1), + new RemoveOperation<>(1L, 2), + new PutOperation<>(1L, "Albin", 3)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + resolver.compact(chain); + + verify(chain).replaceAtHead(argThat(contains(operation(new PutOperation<>(1L, "Albin", 3))))); + } + + @Test + public void testCompactDecodesOperationValueOnlyOnDemand() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 1), + new PutOperation<>(1L, "Suresh", 2), + new PutOperation<>(1L, "Matthew", 3)); + + CountingLongSerializer keySerializer = new CountingLongSerializer(); + CountingStringSerializer valueSerializer = new CountingStringSerializer(); + OperationsCodec customCodec = new OperationsCodec<>(keySerializer, valueSerializer); + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration(), customCodec); + resolver.compact(chain); + + assertThat(keySerializer.decodeCount, is(3)); //Three decodes: one for each operation + assertThat(keySerializer.encodeCount, is(1)); //One encode from encoding the resolved operation's key + + assertThat(valueSerializer.decodeCount, is(0)); + assertThat(valueSerializer.encodeCount, is(0)); + } + + @Test + @SuppressWarnings("unchecked") + public void testResolvingTwoKeys() { + Chain chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new PutOperation<>(2L, "Albin", 0L), + new PutOperation<>(1L, "Suresh", 0L), + new PutOperation<>(2L, "Suresh", 0L), + new PutOperation<>(2L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + + Map> resolved = resolver.resolveAll(chain, 0L); + + assertThat(resolved.get(1L).get(), is("Suresh")); + assertThat(resolved.get(2L).get(), is("Matthew")); + } + + @Test + public void testFullResolveEmptyChain() { + Chain chain = (new ChainBuilder()).build(); + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Map> resolved = resolver.resolveAll(chain, 0L); + assertThat(resolved, is(emptyMap())); + } + + @Test + public void testFullResolveSinglePut() { + Chain chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L) + ); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Map> resolved = resolver.resolveAll(chain, 0L); + + assertThat(resolved.get(1L).get(), is("Albin")); + } + + @Test + public void testFullResolveMultiplePuts() { + Chain chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new PutOperation<>(1L, "Suresh", 0L), + new PutOperation<>(1L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Map> resolved = resolver.resolveAll(chain, 0L); + assertThat(resolved.get(1L).get(), is("Matthew")); + } + + @Test + public void testFullResolveSingleRemove() { + Chain chain = getEntryFromOperations(new RemoveOperation<>(1L, 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Map> resolved = resolver.resolveAll(chain, 0L); + assertThat(resolved, is(emptyMap())); + } + + @Test + public void testFullResolveMultipleRemoves() { + Chain chain = getEntryFromOperations( + new RemoveOperation<>(1L, 0L), + new RemoveOperation<>(1L, 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Map> resolved = resolver.resolveAll(chain, 0L); + assertThat(resolved, is(emptyMap())); + } + + @Test + public void testFullResolvePutAndRemove() { + Chain chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new RemoveOperation<>(1L, 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Map> resolved = resolver.resolveAll(chain, 0L); + assertThat(resolved, is(emptyMap())); + } + + @Test + public void testFullResolveSinglePutIfAbsent() { + Chain chain = getEntryFromOperations(new PutIfAbsentOperation<>(1L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Map> resolved = resolver.resolveAll(chain, 0L); + assertThat(resolved.get(1L).get(), is("Matthew")); + } + + @Test + public void testFullResolveMultiplePutIfAbsents() { + Chain chain = getEntryFromOperations( + new PutIfAbsentOperation<>(1L, "Albin", 0L), + new PutIfAbsentOperation<>(1L, "Suresh", 0L), + new PutIfAbsentOperation<>(1L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Map> resolved = resolver.resolveAll(chain, 0L); + assertThat(resolved.get(1L).get(), is("Albin")); + } + + @Test + public void testFullResolvePutIfAbsentAfterRemove() { + Chain chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0L), + new RemoveOperation<>(1L, 0L), + new PutIfAbsentOperation<>(1L, "Matthew", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Map> resolved = resolver.resolveAll(chain, 0L); + assertThat(resolved.get(1L).get(), is("Matthew")); + } + + @Test + public void testFullResolveForMultipleKeysAndOperations() { + //create a random mix of operations + Chain chain = getEntryFromOperations( + new PutIfAbsentOperation<>(1L, "Albin", 0L), + new PutOperation<>(1L, "Suresh", 0L), + new PutOperation<>(1L, "Matthew", 0L), + new PutOperation<>(2L, "Melvin", 0L), + new ReplaceOperation<>(1L, "Joseph", 0L), + new RemoveOperation<>(2L, 0L), + new ConditionalRemoveOperation<>(1L, "Albin", 0L), + new PutOperation<>(1L, "Gregory", 0L), + new ConditionalReplaceOperation<>(1L, "Albin", "Abraham", 0L), + new RemoveOperation<>(1L, 0L), + new PutIfAbsentOperation<>(2L, "Albin", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Map> resolved = resolver.resolveAll(chain, 0L); + assertThat(resolved.get(2L).get(), is("Albin")); + } + + @Test + public void testFullResolveHasCorrectTimeStamp() { + Chain chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0), + new PutOperation<>(1L, "Albin", 1), + new RemoveOperation<>(1L, 2), + new PutOperation<>(1L, "Albin", 3)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + Map> resolved = resolver.resolveAll(chain, 3); + + assertThat(resolved.get(1L).get(), is("Albin")); + } + + @Test + public void testResolveForMultipleOperationHasCorrectIsFirstAndTimeStamp() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin1", 0), + new PutOperation<>(1L, "Albin2", 1), + new RemoveOperation<>(1L, 2), + new PutOperation<>(1L, "AlbinAfterRemove", 3)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofHours(1))); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 3); + assertThat(valueHolder.get(), is("AlbinAfterRemove")); + verify(chain).replaceAtHead(argThat(contains(operation(new PutOperation<>(1L, "AlbinAfterRemove", TimeUnit.HOURS.toMillis(1) + 3))))); + } + + @Test + public void testResolveForMultipleOperationHasCorrectIsFirstAndTimeStampWithExpiry() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin1", 0L), + new PutOperation<>(1L, "Albin2", 1L), + new PutOperation<>(1L, "Albin3", 2L), + new PutOperation<>(1L, "Albin4", 3L) + ); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1L))); + Store.ValueHolder valueHolder = resolver.resolve(chain, 1L, 3L); + + assertThat(valueHolder.get(), is("Albin4")); + verify(chain).replaceAtHead(argThat(contains(operation(new PutOperation<>(1L, "Albin4", 4L))))); + } + + @Test + public void testCompactHasCorrectWithExpiry() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin1", 0L), + new PutOperation<>(1L, "Albin2", 1L), + new PutOperation<>(1L, "Albin3", 2L), + new PutOperation<>(1L, "Albin4", 3L) + ); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1L))); + resolver.compact(chain); + + verify(chain).replaceAtHead(argThat(contains(operation(new PutOperation<>(1L, "Albin4", 3L))))); + } + + protected ChainResolver createChainResolver(ExpiryPolicy expiryPolicy) { + return createChainResolver(expiryPolicy, codec); + } + + @Test + public void testNonExpiringTimestampIsCleared() throws TimeoutException { + PutOperation expected = new PutOperation<>(1L, "Albin", 0L); + ServerStoreProxy.ChainEntry chain = getEntryFromOperations(expected, + new TimestampOperation<>(1L, 1L) + ); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration()); + + assertThat(resolver.resolve(chain, 1L, 2L).get(), is("Albin")); + verify(chain).replaceAtHead(argThat(contains(operation(expected)))); + } + + + @SafeVarargs + protected final ServerStoreProxy.ChainEntry getEntryFromOperations(Operation ... operations) { + ChainBuilder chainBuilder = new ChainBuilder(); + for(Operation operation: operations) { + chainBuilder = chainBuilder.add(codec.encode(operation)); + } + Chain chain = chainBuilder.build(); + return spy(new ServerStoreProxy.ChainEntry(){ + + @Override + public Iterator iterator() { + return chain.iterator(); + } + + @Override + public void append(ByteBuffer payLoad) throws TimeoutException { + //nothing + } + + @Override + public void replaceAtHead(Chain equivalent) { + //nothing + } + + @Override + public boolean isEmpty() { + return chain.isEmpty(); + } + + @Override + public int length() { + return chain.length(); + } + }); + } + + protected List> getOperationsListFromChain(Chain chain) { + List> list = new ArrayList<>(); + for (Element element : chain) { + Operation operation = codec.decode(element.getPayload()); + list.add(operation); + } + return list; + } + + protected Matcher operation(Operation operation) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(Element item) { + return operation.equals(codec.decode(item.getPayload())); + } + + @Override + public void describeTo(Description description) { + description.appendText("is ").appendValue(operation); + } + }; + } + + protected Matcher binaryOperation(Operation operation) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(ByteBuffer item) { + return operation.equals(codec.decode(item.duplicate())); + } + + @Override + public void describeTo(Description description) { + description.appendText("is ").appendValue(operation); + } + }; + } + + protected static class CountingLongSerializer extends LongSerializer { + + protected int encodeCount = 0; + protected int decodeCount = 0; + + @Override + public ByteBuffer serialize(final Long object) { + encodeCount++; + return super.serialize(object); + } + + @Override + public Long read(final ByteBuffer binary) throws ClassNotFoundException { + decodeCount++; + return super.read(binary); + } + + @Override + public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException { + return super.equals(object, binary); + } + } + + protected static class CountingStringSerializer extends StringSerializer { + + protected int encodeCount = 0; + protected int decodeCount = 0; + + @Override + public ByteBuffer serialize(final String object) { + encodeCount++; + return super.serialize(object); + } + + @Override + public String read(final ByteBuffer binary) throws ClassNotFoundException { + decodeCount++; + return super.read(binary); + } + + @Override + public boolean equals(final String object, final ByteBuffer binary) throws ClassNotFoundException { + return super.equals(object, binary); + } + } + + T argThat(Matcher matches) { + return ArgumentMatchers.argThat(new ArgumentMatcher() { + @Override + public boolean matches(T argument) { + return matches.matches(argument); + } + }); + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/BaseKeyValueOperationTest.java similarity index 93% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperationTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/BaseKeyValueOperationTest.java index f173613c4a..5d8d19ba98 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperationTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/BaseKeyValueOperationTest.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.clustered.client.TestTimeSource; import org.ehcache.impl.serialization.LongSerializer; @@ -24,9 +24,9 @@ import java.nio.ByteBuffer; -import static org.ehcache.clustered.client.internal.store.operations.Operation.BYTE_SIZE_BYTES; -import static org.ehcache.clustered.client.internal.store.operations.Operation.INT_SIZE_BYTES; -import static org.ehcache.clustered.client.internal.store.operations.Operation.LONG_SIZE_BYTES; +import static org.ehcache.clustered.common.internal.store.operations.Operation.BYTE_SIZE_BYTES; +import static org.ehcache.clustered.common.internal.store.operations.Operation.INT_SIZE_BYTES; +import static org.ehcache.clustered.common.internal.store.operations.Operation.LONG_SIZE_BYTES; import static org.junit.Assert.*; public abstract class BaseKeyValueOperationTest { @@ -98,4 +98,4 @@ public void testDecodeThrowsOnInvalidType() throws Exception { ByteBuffer buffer = ByteBuffer.wrap(new byte[] {2}); getNewOperation(buffer, keySerializer, valueSerializer); } -} \ No newline at end of file +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/ConditionalRemoveOperationTest.java similarity index 96% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperationTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/ConditionalRemoveOperationTest.java index b28879bc94..40c97f376a 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperationTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/ConditionalRemoveOperationTest.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.spi.serialization.Serializer; import org.junit.Test; diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/ConditionalReplaceOperationTest.java similarity index 90% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperationTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/ConditionalReplaceOperationTest.java index f536417a86..c87b1ebd6f 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperationTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/ConditionalReplaceOperationTest.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.clustered.client.TestTimeSource; import org.ehcache.impl.serialization.LongSerializer; @@ -24,11 +24,15 @@ import java.nio.ByteBuffer; -import static org.ehcache.clustered.client.internal.store.operations.Operation.BYTE_SIZE_BYTES; -import static org.ehcache.clustered.client.internal.store.operations.Operation.INT_SIZE_BYTES; -import static org.ehcache.clustered.client.internal.store.operations.Operation.LONG_SIZE_BYTES; +import static org.ehcache.clustered.common.internal.store.operations.Operation.BYTE_SIZE_BYTES; +import static org.ehcache.clustered.common.internal.store.operations.Operation.INT_SIZE_BYTES; +import static org.ehcache.clustered.common.internal.store.operations.Operation.LONG_SIZE_BYTES; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.*; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; public class ConditionalReplaceOperationTest { diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/EternalChainResolverTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/EternalChainResolverTest.java new file mode 100644 index 0000000000..e3ddfb8717 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/EternalChainResolverTest.java @@ -0,0 +1,35 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.store.operations; + +import org.ehcache.clustered.client.internal.store.operations.ChainResolver; +import org.ehcache.clustered.client.internal.store.operations.EternalChainResolver; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.expiry.ExpiryPolicy; + +import static org.ehcache.config.builders.ExpiryPolicyBuilder.noExpiration; +import static org.hamcrest.Matchers.is; +import static org.junit.Assume.assumeThat; + +public class EternalChainResolverTest extends AbstractChainResolverTest { + + @Override + protected ChainResolver createChainResolver(ExpiryPolicy expiryPolicy, OperationsCodec codec) { + assumeThat(expiryPolicy, is(noExpiration())); + return new EternalChainResolver<>(codec); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/ExpiryChainResolverTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/ExpiryChainResolverTest.java new file mode 100644 index 0000000000..e17bc673ef --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/ExpiryChainResolverTest.java @@ -0,0 +1,364 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.store.operations; + +import org.ehcache.clustered.client.TestTimeSource; +import org.ehcache.clustered.client.internal.store.ServerStoreProxy; +import org.ehcache.clustered.client.internal.store.operations.ChainResolver; +import org.ehcache.clustered.client.internal.store.operations.ExpiryChainResolver; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.expiry.ExpiryPolicy; +import org.junit.Test; +import org.mockito.InOrder; + +import java.util.Map; +import java.util.concurrent.TimeoutException; + +import static java.time.Duration.ofMillis; +import static java.util.Collections.emptyMap; +import static org.ehcache.config.builders.ExpiryPolicyBuilder.timeToLiveExpiration; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.core.IsNull.nullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class ExpiryChainResolverTest extends AbstractChainResolverTest { + + @Override + protected ChainResolver createChainResolver(ExpiryPolicy expiryPolicy, OperationsCodec codec) { + return new ExpiryChainResolver<>(codec, expiryPolicy); + } + + @Test @Override + public void testCompactDecodesOperationValueOnlyOnDemand() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 1), + new PutOperation<>(1L, "Suresh", 2), + new PutOperation<>(1L, "Matthew", 3)); + + CountingLongSerializer keySerializer = new CountingLongSerializer(); + CountingStringSerializer valueSerializer = new CountingStringSerializer(); + OperationsCodec customCodec = new OperationsCodec<>(keySerializer, valueSerializer); + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration(), customCodec); + resolver.compact(chain); + + assertThat(keySerializer.decodeCount, is(3)); + assertThat(valueSerializer.decodeCount, is(3)); + assertThat(valueSerializer.encodeCount, is(0)); + assertThat(keySerializer.encodeCount, is(1)); //One encode from encoding the resolved operation's key + } + + @Test @Override + public void testResolveDecodesOperationValueOnlyOnDemand() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 1), + new PutOperation<>(1L, "Suresh", 2), + new PutOperation<>(1L, "Matthew", 3)); + + CountingLongSerializer keySerializer = new CountingLongSerializer(); + CountingStringSerializer valueSerializer = new CountingStringSerializer(); + OperationsCodec customCodec = new OperationsCodec<>(keySerializer, valueSerializer); + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.noExpiration(), customCodec); + resolver.resolve(chain, 1L, 0L); + + assertThat(keySerializer.decodeCount, is(3)); + assertThat(valueSerializer.decodeCount, is(3)); + assertThat(valueSerializer.encodeCount, is(0)); + assertThat(keySerializer.encodeCount, is(1)); //One encode from encoding the resolved operation's key + } + + @Test + @SuppressWarnings("unchecked") + public void testGetExpiryForAccessIsIgnored() { + TimeSource timeSource = new TestTimeSource(); + ExpiryPolicy expiry = mock(ExpiryPolicy.class); + ChainResolver chainResolver = createChainResolver(expiry); + + when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(ExpiryPolicy.INFINITE); + + + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "One", timeSource.getTimeMillis()), + new PutOperation<>(1L, "Second", timeSource.getTimeMillis()) + ); + + Store.ValueHolder valueHolder = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + + verify(expiry, times(0)).getExpiryForAccess(anyLong(), any()); + verify(expiry, times(1)).getExpiryForCreation(anyLong(), anyString()); + verify(expiry, times(1)).getExpiryForUpdate(anyLong(), any(), anyString()); + + verify(chain).replaceAtHead(any()); + } + + @Test + @SuppressWarnings("unchecked") + public void testGetExpiryForCreationIsInvokedOnlyOnce() { + TimeSource timeSource = new TestTimeSource(); + ExpiryPolicy expiry = mock(ExpiryPolicy.class); + ChainResolver chainResolver = createChainResolver(expiry); + + when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(ExpiryPolicy.INFINITE); + + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "One", timeSource.getTimeMillis()), + new PutOperation<>(1L, "Second", timeSource.getTimeMillis()), + new PutOperation<>(1L, "Three", timeSource.getTimeMillis()), + new PutOperation<>(1L, "Four", timeSource.getTimeMillis()) + ); + + Store.ValueHolder valueHolder = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + + InOrder inOrder = inOrder(expiry); + + inOrder.verify(expiry, times(1)).getExpiryForCreation(anyLong(), anyString()); + inOrder.verify(expiry, times(3)).getExpiryForUpdate(anyLong(), any(), anyString()); + + verify(chain).replaceAtHead(any()); + } + + @Test + @SuppressWarnings("unchecked") + public void testGetExpiryForCreationIsNotInvokedForReplacedChains() { + TimeSource timeSource = new TestTimeSource(); + ExpiryPolicy expiry = mock(ExpiryPolicy.class); + ChainResolver chainResolver = createChainResolver(expiry); + + when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(ExpiryPolicy.INFINITE); + + + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Replaced", -10L), + new PutOperation<>(1L, "SecondAfterReplace", timeSource.getTimeMillis()), + new PutOperation<>(1L, "ThirdAfterReplace", timeSource.getTimeMillis()), + new PutOperation<>(1L, "FourthAfterReplace", timeSource.getTimeMillis()) + ); + + Store.ValueHolder valueHolder = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + verify(expiry, times(0)).getExpiryForCreation(anyLong(), anyString()); + verify(expiry, times(3)).getExpiryForUpdate(anyLong(), any(), anyString()); + + verify(chain).replaceAtHead(any()); + } + + @Test + @SuppressWarnings("unchecked") + public void testGetExpiryForCreationIsInvokedAfterRemoveOperations() { + TimeSource timeSource = new TestTimeSource(); + ExpiryPolicy expiry = mock(ExpiryPolicy.class); + ChainResolver chainResolver = createChainResolver(expiry); + + when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(ExpiryPolicy.INFINITE); + + + ServerStoreProxy.ChainEntry chainA = getEntryFromOperations( + new PutOperation<>(1L, "Replaced", 10L), + new PutOperation<>(1L, "SecondAfterReplace", 3L), + new RemoveOperation<>(1L, 4L), + new PutOperation<>(1L, "FourthAfterReplace", 5L) + ); + + Store.ValueHolder valueHolder = chainResolver.resolve(chainA, 1L, timeSource.getTimeMillis()); + + InOrder inOrder = inOrder(expiry); + + verify(expiry, times(0)).getExpiryForAccess(anyLong(), any()); + inOrder.verify(expiry, times(1)).getExpiryForCreation(anyLong(), anyString()); + inOrder.verify(expiry, times(1)).getExpiryForUpdate(anyLong(), any(), anyString()); + inOrder.verify(expiry, times(1)).getExpiryForCreation(anyLong(), anyString()); + + verify(chainA).replaceAtHead(any()); + + reset(expiry); + + when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(ExpiryPolicy.INFINITE); + + + ServerStoreProxy.ChainEntry chainB = getEntryFromOperations( + new PutOperation<>(1L, "One", timeSource.getTimeMillis()), + new PutOperation<>(1L, "Second", timeSource.getTimeMillis()), + new RemoveOperation<>(1L, timeSource.getTimeMillis()), + new PutOperation<>(1L, "Four", timeSource.getTimeMillis()) + ); + + chainResolver.resolve(chainB, 1L, timeSource.getTimeMillis()); + + inOrder = inOrder(expiry); + + verify(expiry, times(0)).getExpiryForAccess(anyLong(), any()); + inOrder.verify(expiry, times(1)).getExpiryForCreation(anyLong(), anyString()); + inOrder.verify(expiry, times(1)).getExpiryForUpdate(anyLong(), any(), anyString()); + inOrder.verify(expiry, times(1)).getExpiryForCreation(anyLong(), anyString()); + + verify(chainB).replaceAtHead(any()); + } + + @Test + @SuppressWarnings("unchecked") + public void testNullGetExpiryForCreation() { + TimeSource timeSource = new TestTimeSource(); + ExpiryPolicy expiry = mock(ExpiryPolicy.class); + ChainResolver chainResolver = createChainResolver(expiry); + + when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(null); + + ServerStoreProxy.ChainEntry chain = getEntryFromOperations(new PutOperation<>(1L, "Replaced", 10L)); + + Store.ValueHolder valueHolder = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + + assertThat(valueHolder, nullValue()); + verify(chain, never()).replaceAtHead(any()); + } + + @Test + @SuppressWarnings("unchecked") + public void testNullGetExpiryForUpdate() { + TimeSource timeSource = new TestTimeSource(); + ExpiryPolicy expiry = mock(ExpiryPolicy.class); + ChainResolver chainResolver = createChainResolver(expiry); + + when(expiry.getExpiryForUpdate(anyLong(), any(), anyString())).thenReturn(null); + + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Replaced", -10L), + new PutOperation<>(1L, "New", timeSource.getTimeMillis()) + ); + + Store.ValueHolder resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + + assertThat(resolvedChain.get(), is("New")); + + verify(chain).replaceAtHead(argThat(contains(operation(new PutOperation<>(1L, "New", -10L))))); + } + + @Test + @SuppressWarnings("unchecked") + public void testGetExpiryForUpdateUpdatesExpirationTimeStamp() { + TimeSource timeSource = new TestTimeSource(); + ExpiryPolicy expiry = mock(ExpiryPolicy.class); + ChainResolver chainResolver = createChainResolver(expiry); + + when(expiry.getExpiryForUpdate(anyLong(), any(), anyString())).thenReturn(ofMillis(2L)); + + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Replaced", -10L), + new PutOperation<>(1L, "New", timeSource.getTimeMillis()) + ); + + Store.ValueHolder valueHolder = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + + assertThat(valueHolder.get(), is("New")); + verify(chain).replaceAtHead(argThat(contains(operation(new PutOperation<>(1L, "New", -2L))))); + } + + @Test + @SuppressWarnings("unchecked") + public void testExpiryThrowsException() { + TimeSource timeSource = new TestTimeSource(); + ExpiryPolicy expiry = mock(ExpiryPolicy.class); + ChainResolver chainResolver = createChainResolver(expiry); + + when(expiry.getExpiryForUpdate(anyLong(), any(), anyString())).thenThrow(new RuntimeException("Test Update Expiry")); + when(expiry.getExpiryForCreation(anyLong(), anyString())).thenThrow(new RuntimeException("Test Create Expiry")); + + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "One", -10L), + new PutOperation<>(1L, "Two", timeSource.getTimeMillis()) + ); + + Store.ValueHolder valueHolder = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + + assertThat(valueHolder, nullValue()); + + chain = getEntryFromOperations( + new PutOperation<>(1L, "One", timeSource.getTimeMillis()), + new PutOperation<>(1L, "Two", timeSource.getTimeMillis()) + ); + + valueHolder = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + + assertThat(valueHolder, nullValue()); + + verify(chain).replaceAtHead(any()); + } + + @Test + public void testResolveExpiresUsingOperationTime() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation(1L, "Albin", 0), + new PutIfAbsentOperation(1L, "Chris", 900) + ); + + ChainResolver resolver = createChainResolver(timeToLiveExpiration(ofMillis(1000))); + + Store.ValueHolder result = resolver.resolve(chain, 1L, 1500); + assertThat(result, nullValue()); + } + + @Test + public void testResolveAllExpiresUsingOperationTime() { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations( + new PutOperation<>(1L, "Albin", 0), + new PutIfAbsentOperation<>(1L, "Chris", 900) + ); + + ChainResolver resolver = createChainResolver(timeToLiveExpiration(ofMillis(1000))); + + Map> result = resolver.resolveAll(chain, 1500); + + assertThat(result, is(emptyMap())); + } + + @Test + public void testExpiredResolvedValueAddsTimestamp() throws TimeoutException { + ServerStoreProxy.ChainEntry chain = getEntryFromOperations(new PutOperation<>(1L, "Albin", 0L)); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.timeToLiveExpiration(ofMillis(1000))); + + assertThat(resolver.resolve(chain, 1L, 1001L), nullValue()); + verify(chain).append(argThat(binaryOperation(new TimestampOperation<>(1L, 1001L)))); + verify(chain, never()).replaceAtHead(any()); + + } + + @Test + public void testExpiredTimestampClearsChain() { + PutOperation expected = new PutOperation<>(1L, "Albin", 0L); + ServerStoreProxy.ChainEntry chain = getEntryFromOperations(expected, + new TimestampOperation<>(1L, 1000L) + ); + + ChainResolver resolver = createChainResolver(ExpiryPolicyBuilder.timeToLiveExpiration(ofMillis(1000))); + + assertThat(resolver.resolve(chain, 1L, 999L), nullValue()); + verify(chain).replaceAtHead(argThat(emptyIterable())); + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolderTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/LazyValueHolderTest.java similarity index 96% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolderTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/LazyValueHolderTest.java index a89f2bf8d3..d9d0a9f163 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolderTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/LazyValueHolderTest.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.spi.serialization.Serializer; import org.junit.Before; @@ -25,8 +25,8 @@ import java.nio.ByteBuffer; import java.util.Date; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.sameInstance; -import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/PutIfAbsentOperationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/PutIfAbsentOperationTest.java similarity index 96% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/PutIfAbsentOperationTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/PutIfAbsentOperationTest.java index 7843d24a19..84cbc04566 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/PutIfAbsentOperationTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/PutIfAbsentOperationTest.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.spi.serialization.Serializer; import org.junit.Test; diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/PutOperationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/PutOperationTest.java similarity index 96% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/PutOperationTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/PutOperationTest.java index 9d4deb8bf5..8eca0e0dc4 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/PutOperationTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/PutOperationTest.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.spi.serialization.Serializer; import org.junit.Test; diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/PutWithWriterOperationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/PutWithWriterOperationTest.java new file mode 100644 index 0000000000..b4f332da79 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/PutWithWriterOperationTest.java @@ -0,0 +1,51 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.common.internal.store.operations; + +import org.ehcache.spi.serialization.Serializer; +import org.junit.Test; + +import java.nio.ByteBuffer; + +import static org.junit.Assert.*; + +public class PutWithWriterOperationTest extends BaseKeyValueOperationTest { + + @Override + protected BaseKeyValueOperation getNewOperation(K key, V value, long timestamp) { + return new PutWithWriterOperation<>(key, value, timestamp); + } + + @Override + protected BaseKeyValueOperation getNewOperation(ByteBuffer buffer, Serializer keySerializer, Serializer valueSerializer) { + return new PutWithWriterOperation<>(buffer, keySerializer, valueSerializer); + } + + @Override + protected OperationCode getOperationCode() { + return OperationCode.PUT_WITH_WRITER; + } + + @Test + public void testApply() { + PutWithWriterOperation putOperation = new PutWithWriterOperation<>(1L, "one", System.currentTimeMillis()); + Result result = putOperation.apply(null); + assertSame(putOperation, result); + PutWithWriterOperation anotherOperation = new PutWithWriterOperation<>(1L, "two", System.currentTimeMillis()); + result = anotherOperation.apply(putOperation); + assertSame(anotherOperation, result); + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/RemoveOperationTest.java similarity index 94% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperationTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/RemoveOperationTest.java index b1d6b14c75..533cd0e4e8 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperationTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/RemoveOperationTest.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.clustered.client.TestTimeSource; import org.ehcache.impl.serialization.LongSerializer; @@ -24,8 +24,8 @@ import java.nio.ByteBuffer; -import static org.ehcache.clustered.client.internal.store.operations.Operation.BYTE_SIZE_BYTES; -import static org.ehcache.clustered.client.internal.store.operations.Operation.LONG_SIZE_BYTES; +import static org.ehcache.clustered.common.internal.store.operations.Operation.BYTE_SIZE_BYTES; +import static org.ehcache.clustered.common.internal.store.operations.Operation.LONG_SIZE_BYTES; import static org.junit.Assert.*; public class RemoveOperationTest { diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ReplaceOperationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/ReplaceOperationTest.java similarity index 96% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ReplaceOperationTest.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/ReplaceOperationTest.java index 94787b7948..621a3f43aa 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ReplaceOperationTest.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/ReplaceOperationTest.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.spi.serialization.Serializer; import org.junit.Test; diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/TimestampOperationTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/TimestampOperationTest.java new file mode 100644 index 0000000000..80a990ade7 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/common/internal/store/operations/TimestampOperationTest.java @@ -0,0 +1,96 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.store.operations; + +import org.ehcache.clustered.client.TestTimeSource; +import org.ehcache.impl.serialization.LongSerializer; +import org.ehcache.impl.serialization.StringSerializer; +import org.ehcache.spi.serialization.Serializer; +import org.junit.Test; + +import java.nio.ByteBuffer; + +import static org.ehcache.clustered.common.internal.store.operations.Operation.BYTE_SIZE_BYTES; +import static org.ehcache.clustered.common.internal.store.operations.Operation.LONG_SIZE_BYTES; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.IsNull.nullValue; +import static org.hamcrest.core.IsSame.sameInstance; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +public class TimestampOperationTest { + + private static final Serializer keySerializer = new LongSerializer(); + private static final Serializer valueSerializer = new StringSerializer(); + + private static final TestTimeSource TIME_SOURCE = new TestTimeSource(); + + @Test + public void testEncode() throws Exception { + Long key = 12L; + TimestampOperation operation = new TimestampOperation<>(key, TIME_SOURCE.getTimeMillis()); + ByteBuffer byteBuffer = operation.encode(keySerializer, valueSerializer); + + ByteBuffer expected = ByteBuffer.allocate(BYTE_SIZE_BYTES + 2 * LONG_SIZE_BYTES); + expected.put(OperationCode.TIMESTAMP.getValue()); + expected.putLong(TIME_SOURCE.getTimeMillis()); + expected.putLong(key); + expected.flip(); + assertArrayEquals(expected.array(), byteBuffer.array()); + } + + @Test + public void testDecode() throws Exception { + Long key = 12L; + ByteBuffer blob = ByteBuffer.allocate(BYTE_SIZE_BYTES + 2 * LONG_SIZE_BYTES); + blob.put(OperationCode.TIMESTAMP.getValue()); + blob.putLong(TIME_SOURCE.getTimeMillis()); + blob.putLong(key); + blob.flip(); + + TimestampOperation operation = new TimestampOperation<>(blob, keySerializer); + assertEquals(key, operation.getKey()); + } + + @Test + public void testEncodeDecodeInvariant() throws Exception { + Long key = 12L; + TimestampOperation operation = new TimestampOperation<>(key, System.currentTimeMillis()); + + TimestampOperation decodedOperation = + new TimestampOperation<>(operation.encode(keySerializer, valueSerializer), keySerializer); + assertEquals(key, decodedOperation.getKey()); + } + + @Test(expected = IllegalArgumentException.class) + public void testDecodeThrowsOnInvalidType() throws Exception { + ByteBuffer buffer = ByteBuffer.wrap(new byte[] {10}); + new TimestampOperation(buffer, keySerializer); + } + + @Test + public void testApply() throws Exception { + TimestampOperation operation = new TimestampOperation<>(1L, System.currentTimeMillis()); + + Result result = operation.apply(null); + assertThat(result, nullValue()); + + PutOperation anotherOperation = new PutOperation<>(1L, "another one", System.currentTimeMillis()); + result = operation.apply(anotherOperation); + assertThat(result, sameInstance(anotherOperation)); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/loaderWriter/BasicClusteredLoaderWriterTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/loaderWriter/BasicClusteredLoaderWriterTest.java new file mode 100644 index 0000000000..83baff2609 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/loaderWriter/BasicClusteredLoaderWriterTest.java @@ -0,0 +1,285 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.loaderWriter; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.CachePersistenceException; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.client.internal.service.ClusterTierValidationException; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.resilience.ThrowingResilienceStrategy; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.net.URI; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ThreadLocalRandom; + +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.*; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.*; + +public class BasicClusteredLoaderWriterTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/clustered-loader-writer"); + + @Before + public void definePassthroughServer() throws Exception { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 4, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() throws Exception { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + @Test @SuppressWarnings("try") + public void testAllClientsNeedToHaveLoaderWriterConfigured() { + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + CacheConfiguration cacheConfiguration = getCacheConfiguration(loaderWriter); + + try (CacheManager cacheManager = CacheManagerBuilder + .newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("cache-1", cacheConfiguration) + .build(true)) { + + CacheConfiguration withoutLoaderWriter = newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder + .newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).offheap(1, MemoryUnit.MB) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) + .withResilienceStrategy(new ThrowingResilienceStrategy<>()) + .build(); + + try (CacheManager anotherManager = CacheManagerBuilder + .newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("cache-1", withoutLoaderWriter) + .build(true)) { + } catch (RuntimeException e) { + assertThat(e.getCause().getCause().getCause().getCause(), instanceOf(CachePersistenceException.class)); + assertThat(e.getCause().getCause().getCause().getCause().getCause(), instanceOf(ClusterTierValidationException.class)); + } + } + } + + @Test + public void testBasicClusteredCacheLoaderWriter() { + + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + CacheConfiguration cacheConfiguration = getCacheConfiguration(loaderWriter); + + try (CacheManager cacheManager = CacheManagerBuilder + .newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("cache-1", cacheConfiguration) + .build(true)) { + + Cache cache = cacheManager.getCache("cache-1", Long.class, String.class); + + cache.put(1L, "1"); + + assertThat(cache.get(1L), is("1")); + + assertThat(loaderWriter.storeMap.get(1L), is("1")); + } + } + + @Test + public void testLoaderWriterMultipleClients() { + + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + + CacheConfiguration cacheConfiguration = getCacheConfiguration(loaderWriter); + + try (CacheManager cacheManager1 = CacheManagerBuilder + .newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("cache-1", cacheConfiguration) + .build(true)) { + + try (CacheManager cacheManager2 = CacheManagerBuilder + .newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("cache-1", cacheConfiguration) + .build(true)) { + + Cache client1 = cacheManager1.getCache("cache-1", Long.class, String.class); + Cache client2 = cacheManager2.getCache("cache-1", Long.class, String.class); + + client1.put(1L, "1"); + client2.put(1L, "2"); + + assertThat(client1.get(1L), is("2")); + assertThat(loaderWriter.storeMap.get(1L), is("2")); + + client1.remove(1L); + + assertThat(client2.get(1L), nullValue()); + assertThat(loaderWriter.storeMap.get(1L), nullValue()); + } + } + } + + @Test + public void testCASOpsMultipleClients() { + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + + CacheConfiguration cacheConfiguration = getCacheConfiguration(loaderWriter); + + try (CacheManager cacheManager1 = CacheManagerBuilder + .newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("cache-1", cacheConfiguration) + .build(true)) { + + try (CacheManager cacheManager2 = CacheManagerBuilder + .newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("cache-1", cacheConfiguration) + .build(true)) { + + Cache client1 = cacheManager1.getCache("cache-1", Long.class, String.class); + Cache client2 = cacheManager2.getCache("cache-1", Long.class, String.class); + + assertThat(client1.putIfAbsent(1L, "1"), nullValue()); + assertThat(client2.putIfAbsent(1L, "2"), is("1")); + + assertThat(client1.get(1L), is("1")); + assertThat(loaderWriter.storeMap.get(1L), is("1")); + + assertThat(client1.replace(1L, "2"), is("1")); + assertThat(client2.replace(1L, "3"), is("2")); + + assertThat(client1.get(1L), is("3")); + assertThat(loaderWriter.storeMap.get(1L), is("3")); + + assertThat(client1.replace(1L, "2", "4"), is(false)); + assertThat(client2.replace(1L, "3", "4"), is(true)); + + assertThat(client1.get(1L), is("4")); + assertThat(loaderWriter.storeMap.get(1L), is("4")); + + assertThat(client1.remove(1L, "5"), is(false)); + assertThat(client2.remove(1L, "4"), is(true)); + } + } + } + + @Test + public void testBulkOps() { + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + CacheConfiguration cacheConfiguration = getCacheConfiguration(loaderWriter); + + try (CacheManager cacheManager = CacheManagerBuilder + .newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("cache-1", cacheConfiguration) + .build(true)) { + + Cache cache = cacheManager.getCache("cache-1", Long.class, String.class); + + Map mappings = new HashMap<>(); + + for (int i = 1; i <= 5; i++) { + mappings.put((long) i, "" + i); + } + + cache.putAll(mappings); + + assertThat(loaderWriter.storeMap.keySet(), containsInAnyOrder(mappings.keySet().toArray())); + + cache.clear(); + + Map loadedData = cache.getAll(mappings.keySet()); + + assertThat(mappings.keySet(), containsInAnyOrder(loadedData.keySet().toArray())); + + cache.removeAll(mappings.keySet()); + + assertThat(loaderWriter.storeMap.isEmpty(), is(true)); + } + } + + @Test + public void testCASOps() { + TestCacheLoaderWriter loaderWriter = new TestCacheLoaderWriter(); + + CacheConfiguration cacheConfiguration = getCacheConfiguration(loaderWriter); + + try (CacheManager cacheManager1 = CacheManagerBuilder + .newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("cache-1", cacheConfiguration) + .build(true)) { + + try (CacheManager cacheManager2 = CacheManagerBuilder + .newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache("cache-1", cacheConfiguration) + .build(true)) { + + Cache client1 = cacheManager1.getCache("cache-1", Long.class, String.class); + Cache client2 = cacheManager2.getCache("cache-1", Long.class, String.class); + + assertThat(loaderWriter.storeMap.isEmpty(), is(true)); + + Set keys = new HashSet<>(); + ThreadLocalRandom.current().longs(10).forEach(x -> { + keys.add(x); + client1.put(x, Long.toString(x)); + }); + assertThat(loaderWriter.storeMap.size(), is(10)); + + + keys.forEach(x -> assertThat(client2.putIfAbsent(x, "Again" + x), is(Long.toString(x)))); + + keys.stream().limit(5).forEach(x -> + assertThat(client2.replace(x, "Replaced" + x), is(Long.toString(x)))); + + keys.forEach(x -> client1.remove(x, Long.toString(x))); + + assertThat(loaderWriter.storeMap.size(), is(5)); + } + } + } + + private CacheConfiguration getCacheConfiguration(TestCacheLoaderWriter loaderWriter) { + return newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder + .newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).offheap(1, MemoryUnit.MB) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) + .withLoaderWriter(loaderWriter) + .withResilienceStrategy(new ThrowingResilienceStrategy<>()) + .build(); + } + +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/loaderWriter/TestCacheLoaderWriter.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/loaderWriter/TestCacheLoaderWriter.java new file mode 100644 index 0000000000..36d1f10d7a --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/loaderWriter/TestCacheLoaderWriter.java @@ -0,0 +1,42 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.loaderWriter; + +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +public class TestCacheLoaderWriter implements CacheLoaderWriter { + + public final Map storeMap = new ConcurrentHashMap<>(); + + @Override + public String load(Long key) throws Exception { + return storeMap.get(key); + } + + @Override + public void write(Long key, String value) throws Exception { + storeMap.put(key, value); + } + + @Override + public void delete(Long key) throws Exception { + storeMap.remove(key); + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/loaderWriter/writebehind/BasicClusteredWriteBehindPassthroughTest.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/loaderWriter/writebehind/BasicClusteredWriteBehindPassthroughTest.java new file mode 100644 index 0000000000..9175f25a83 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/loaderWriter/writebehind/BasicClusteredWriteBehindPassthroughTest.java @@ -0,0 +1,279 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.loaderWriter.writebehind; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.builders.WriteBehindConfigurationBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.net.URI; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class BasicClusteredWriteBehindPassthroughTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/clustered-write-behind"); + + @Before + public void definePassthroughServer() { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 4, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + private RecordingLoaderWriter loaderWriter; + private final List cacheRecords = new ArrayList<>(); + + private static final String CACHE_NAME = "cache-1"; + private static final long KEY = 1L; + + @Before + public void setUp() { + loaderWriter = new RecordingLoaderWriter<>(); + } + + @Test + public void testBasicClusteredWriteBehind() { + try (PersistentCacheManager cacheManager = createCacheManager()) { + Cache cache = cacheManager.getCache(CACHE_NAME, Long.class, String.class); + + put(cache, String.valueOf(0)); + put(cache, String.valueOf(1)); + + assertValue(cache, String.valueOf(1)); + + verifyRecords(cache); + cache.clear(); + } + } + + @Test + public void testWriteBehindMultipleClients() { + try (PersistentCacheManager cacheManager1 = createCacheManager(); + PersistentCacheManager cacheManager2 = createCacheManager()) { + Cache client1 = cacheManager1.getCache(CACHE_NAME, Long.class, String.class); + Cache client2 = cacheManager2.getCache(CACHE_NAME, Long.class, String.class); + + put(client1, "The one from client1"); + put(client2, "The one one from client2"); + assertValue(client1, "The one one from client2"); + remove(client1); + put(client2, "The one from client2"); + put(client1, "The one one from client1"); + assertValue(client2, "The one one from client1"); + remove(client2); + assertValue(client1, null); + put(client1, "The one from client1"); + put(client1, "The one one from client1"); + remove(client2); + put(client2, "The one from client2"); + put(client2, "The one one from client2"); + remove(client1); + assertValue(client2, null); + + verifyRecords(client1); + client1.clear(); + } + } + + @Test + public void testClusteredWriteBehindCAS() { + try (PersistentCacheManager cacheManager = createCacheManager()) { + Cache cache = cacheManager.getCache(CACHE_NAME, Long.class, String.class); + putIfAbsent(cache, "First value", true); + assertValue(cache, "First value"); + putIfAbsent(cache, "Second value", false); + assertValue(cache, "First value"); + put(cache, "First value again"); + assertValue(cache, "First value again"); + replace(cache, "Replaced First value", true); + assertValue(cache, "Replaced First value"); + replace(cache, "Replaced First value", "Replaced First value again", true); + assertValue(cache, "Replaced First value again"); + replace(cache, "Replaced First", "Tried Replacing First value again", false); + assertValue(cache, "Replaced First value again"); + condRemove(cache, "Replaced First value again", true); + assertValue(cache, null); + replace(cache, "Trying to replace value", false); + assertValue(cache, null); + put(cache, "new value", true); + assertValue(cache, "new value"); + condRemove(cache, "new value", false); + + verifyRecords(cache); + cache.clear(); + } + } + + @Test + public void testClusteredWriteBehindLoading() { + try (CacheManager cacheManager = createCacheManager()) { + Cache cache = cacheManager.getCache(CACHE_NAME, Long.class, String.class); + + put(cache, "Some value"); + tryFlushingUpdatesToSOR(cache); + cache.clear(); + + assertThat(cache.get(KEY), notNullValue()); + + cache.clear(); + } + } + + private void assertValue(Cache cache, String value) { + assertThat(cache.get(KEY), is(value)); + } + + private void put(Cache cache, String value) { + put(cache, value, true); + } + + private void put(Cache cache, String value, boolean addToCacheRecords) { + cache.put(KEY, value); + if (addToCacheRecords) { + cacheRecords.add(new Record(KEY, cache.get(KEY))); + } + } + + private void putIfAbsent(Cache cache, String value, boolean addToCacheRecords) { + cache.putIfAbsent(KEY, value); + if (addToCacheRecords) { + cacheRecords.add(new Record(KEY, cache.get(KEY))); + } + } + + private void replace(Cache cache, String value, boolean addToCacheRecords) { + cache.replace(KEY, value); + if (addToCacheRecords) { + cacheRecords.add(new Record(KEY, cache.get(KEY))); + } + } + + private void replace(Cache cache, String oldValue, String newValue, boolean addToCacheRecords) { + cache.replace(KEY, oldValue, newValue); + if (addToCacheRecords) { + cacheRecords.add(new Record(KEY, cache.get(KEY))); + } + } + + private void remove(Cache cache) { + cache.remove(KEY); + cacheRecords.add(new Record(KEY, null)); + } + + private void condRemove(Cache cache, String value, boolean addToCacheRecords) { + cache.remove(KEY, value); + if (addToCacheRecords) { + cacheRecords.add(new Record(KEY, null)); + } + } + + private void verifyRecords(Cache cache) { + tryFlushingUpdatesToSOR(cache); + + Map> loaderWriterRecords = loaderWriter.getRecords(); + + Map track = new HashMap<>(); + for (Record cacheRecord : cacheRecords) { + Long key = cacheRecord.getKey(); + int next = track.compute(key, (k, v) -> v == null ? 0 : v + 1); + assertThat(loaderWriterRecords.get(key).get(next), is(cacheRecord.getValue())); + } + } + + private void tryFlushingUpdatesToSOR(Cache cache) { + int retryCount = 1000; + int i = 0; + while (true) { + String value = "flush_queue_" + i; + put(cache, value, false); + try { + Thread.sleep(100); + } catch (InterruptedException e) { + e.printStackTrace(); + } + if (value.equals(loaderWriter.load(KEY))) break; + if (i > retryCount) { + throw new RuntimeException("Couldn't flush updates to SOR after " + retryCount + " tries"); + } + i++; + } + } + + private PersistentCacheManager createCacheManager() { + CacheConfiguration cacheConfiguration = + newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) + .withLoaderWriter(loaderWriter) + .withService(WriteBehindConfigurationBuilder.newUnBatchedWriteBehindConfiguration()) + .withService(new ClusteredStoreConfiguration(Consistency.STRONG)) + .build(); + + return CacheManagerBuilder + .newCacheManagerBuilder() + .with(cluster(CLUSTER_URI).autoCreate(c -> c)) + .withCache(CACHE_NAME, cacheConfiguration) + .build(true); + } + + private static final class Record { + private final Long key; + private final String value; + + private Record(Long key, String value) { + this.key = key; + this.value = value; + } + + Long getKey() { + return key; + } + + String getValue() { + return value; + } + } +} diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/loaderWriter/writebehind/RecordingLoaderWriter.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/loaderWriter/writebehind/RecordingLoaderWriter.java new file mode 100644 index 0000000000..c6a9334376 --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/loaderWriter/writebehind/RecordingLoaderWriter.java @@ -0,0 +1,68 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.loaderWriter.writebehind; + +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class RecordingLoaderWriter implements CacheLoaderWriter { + + private final Map> records = new HashMap<>(); + + @Override + public synchronized V load(K key) { + List list = records.get(key); + return list == null ? null : list.get(list.size() - 1); + } + + @Override + public synchronized void write(K key, V value) { + record(key, value); + } + + @Override + public synchronized void delete(K key) { + record(key, null); + } + + @Override + public synchronized Map loadAll(Iterable keys) throws Exception { + return CacheLoaderWriter.super.loadAll(keys); + } + + @Override + public void writeAll(Iterable> entries) throws Exception { + CacheLoaderWriter.super.writeAll(entries); + } + + @Override + public void deleteAll(Iterable keys) throws Exception { + CacheLoaderWriter.super.deleteAll(keys); + } + + private void record(K key, V value) { + records.computeIfAbsent(key, k -> new ArrayList<>()).add(value); + } + + public synchronized Map> getRecords() { + return Collections.unmodifiableMap(records); + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java similarity index 94% rename from clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java index f9349df1c7..ea6957ebcf 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java @@ -18,15 +18,12 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; -import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.terracotta.entity.ActiveInvokeContext; -import org.terracotta.entity.ActiveServerEntity; import org.terracotta.entity.ClientDescriptor; import org.terracotta.entity.ClientSourceId; import org.terracotta.entity.ConfigurationException; import org.terracotta.entity.EntityServerService; import org.terracotta.entity.PassiveSynchronizationChannel; -import org.terracotta.entity.ReconnectRejectedException; import org.terracotta.entity.ServiceRegistry; import org.terracotta.entity.StateDumpCollector; @@ -35,6 +32,7 @@ import java.util.Collections; import java.util.List; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; /** * Provides an alternative to {@link ClusterTierManagerServerEntityService} for unit tests to enable observing @@ -50,7 +48,7 @@ public class ObservableEhcacheServerEntityService extends ClusterTierManagerServ * * @return an unmodifiable list of {@code ObservableEhcacheActiveEntity} instances */ - public List getServedActiveEntities() throws NoSuchFieldException, IllegalAccessException { + public List getServedActiveEntities() { return Collections.unmodifiableList(servedActiveEntities); } @@ -114,7 +112,7 @@ public void disconnected(ClientDescriptor clientDescriptor) { } @Override - public EhcacheEntityResponse invokeActive(ActiveInvokeContext invokeContext, EhcacheEntityMessage message) { + public EhcacheEntityResponse invokeActive(ActiveInvokeContext invokeContext, EhcacheEntityMessage message) { return activeEntity.invokeActive(invokeContext, message); } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/server/package-info.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/server/package-info.java similarity index 100% rename from clustered/client/src/test/java/org/ehcache/clustered/server/package-info.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/server/package-info.java diff --git a/clustered/client/src/test/java/org/ehcache/clustered/server/store/ObservableClusterTierServerEntityService.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/server/store/ObservableClusterTierServerEntityService.java similarity index 100% rename from clustered/client/src/test/java/org/ehcache/clustered/server/store/ObservableClusterTierServerEntityService.java rename to clustered/ehcache-client/src/test/java/org/ehcache/clustered/server/store/ObservableClusterTierServerEntityService.java diff --git a/clustered/ehcache-client/src/test/java/org/ehcache/clustered/util/StatisticsTestUtils.java b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/util/StatisticsTestUtils.java new file mode 100644 index 0000000000..e17f9f66fb --- /dev/null +++ b/clustered/ehcache-client/src/test/java/org/ehcache/clustered/util/StatisticsTestUtils.java @@ -0,0 +1,162 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.util; + +import org.ehcache.core.spi.store.Store; +import org.hamcrest.Description; +import org.hamcrest.Factory; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; +import org.terracotta.context.ContextManager; +import org.terracotta.context.TreeNode; +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.ValueStatistic; + +import java.util.Arrays; +import java.util.EnumSet; +import java.util.List; + +import static org.hamcrest.MatcherAssert.assertThat; + +/** + * StatisticsTestUtils + */ +public class StatisticsTestUtils { + /** + * Validates expected {@link OperationStatistic} updates for the + * indicated {@code Ehcache} instance. The statistics identified in {@code changed} are + * checked for a value of {@code 1}; all other statistics in the same enumeration class are + * checked for a value of {@code 0}. + * + * @param store the store instance to check + * @param changed the statistics values that should have updated values + * @param the statistics enumeration type + */ + public static > void validateStats(final Store store, final EnumSet changed) { + assert changed != null; + final EnumSet unchanged = EnumSet.complementOf(changed); + + @SuppressWarnings("unchecked") + final List> sets = Arrays.asList(changed, unchanged); + Class statsClass = null; + for (final EnumSet set : sets) { + if (!set.isEmpty()) { + statsClass = set.iterator().next().getDeclaringClass(); + break; + } + } + assert statsClass != null; + + final OperationStatistic operationStatistic = getOperationStatistic(store, statsClass); + for (final E statId : changed) { + assertThat(String.format("Value for %s.%s", statId.getDeclaringClass().getName(), statId.name()), + getStatistic(operationStatistic, statId), StatisticMatcher.equalTo(1L)); + } + for (final E statId : unchanged) { + assertThat(String.format("Value for %s.%s", statId.getDeclaringClass().getName(), statId.name()), + getStatistic(operationStatistic, statId), StatisticMatcher.equalTo(0L)); + } + } + + public static > void validateStat(final Store store, E outcome, long count) { + OperationStatistic operationStatistic = getOperationStatistic(store, outcome.getDeclaringClass()); + assertThat(getStatistic(operationStatistic, outcome), StatisticMatcher.equalTo(count)); + } + + /** + * Gets the value of the statistic indicated from an {@link OperationStatistic} + * instance. + * + * @param operationStatistic the {@code OperationStatistic} instance from which the statistic is to + * be obtained + * @param statId the {@code Enum} constant identifying the statistic for which the value must be obtained + * @param The {@code Enum} type for the statistics + * + * @return the value, possibly null, for {@code statId} about {@code ehcache} + */ + private static > Number getStatistic(final OperationStatistic operationStatistic, final E statId) { + if (operationStatistic != null) { + final ValueStatistic valueStatistic = operationStatistic.statistic(statId); + return (valueStatistic == null ? null : valueStatistic.value()); + } + return null; + } + + /** + * Gets a reference to the {@link OperationStatistic} instance holding the + * class of statistics specified for the {@code Ehcache} instance provided. + * + * @param store the store instance for which the {@code OperationStatistic} instance + * should be obtained + * @param statsClass the {@code Class} of statistics for which the {@code OperationStatistic} instance + * should be obtained + * @param the {@code Enum} type for the statistics + * + * @return a reference to the {@code OperationStatistic} instance holding the {@code statsClass} statistics; + * may be {@code null} if {@code statsClass} statistics do not exist for {@code ehcache} + */ + private static > OperationStatistic getOperationStatistic(final Store store, final Class statsClass) { + for (final TreeNode statNode : ContextManager.nodeFor(store).getChildren()) { + final Object statObj = statNode.getContext().attributes().get("this"); + if (statObj instanceof OperationStatistic) { + @SuppressWarnings("unchecked") + final OperationStatistic statistic = (OperationStatistic)statObj; + if (statistic.type().equals(statsClass)) { + return statistic; + } + } + } + return null; + } + + /** + * Local {@code org.hamcrest.TypeSafeMatcher} implementation for testing + * {@code org.terracotta.statistics.OperationStatistic} values. + */ + private static final class StatisticMatcher extends TypeSafeMatcher { + + final Number expected; + + private StatisticMatcher(final Class expectedType, final Number expected) { + super(expectedType); + this.expected = expected; + } + + @Override + protected boolean matchesSafely(final Number value) { + if (value != null) { + return (value.longValue() == this.expected.longValue()); + } else { + return this.expected.longValue() == 0L; + } + } + + @Override + public void describeTo(final Description description) { + if (this.expected.longValue() == 0L) { + description.appendText("zero or null"); + } else { + description.appendValue(this.expected); + } + } + + @Factory + public static Matcher equalTo(final Number expected) { + return new StatisticMatcher(Number.class, expected); + } + } +} diff --git a/clustered/client/src/test/resources/META-INF/services/org.terracotta.connection.ConnectionService b/clustered/ehcache-client/src/test/resources/META-INF/services/org.terracotta.connection.ConnectionService similarity index 100% rename from clustered/client/src/test/resources/META-INF/services/org.terracotta.connection.ConnectionService rename to clustered/ehcache-client/src/test/resources/META-INF/services/org.terracotta.connection.ConnectionService diff --git a/clustered/client/src/test/resources/configs/cluster-ha.xml b/clustered/ehcache-client/src/test/resources/configs/cluster-ha.xml similarity index 96% rename from clustered/client/src/test/resources/configs/cluster-ha.xml rename to clustered/ehcache-client/src/test/resources/configs/cluster-ha.xml index f3e8ba0fdb..1e5ef784b0 100644 --- a/clustered/client/src/test/resources/configs/cluster-ha.xml +++ b/clustered/ehcache-client/src/test/resources/configs/cluster-ha.xml @@ -23,7 +23,7 @@ 5 - + 8 diff --git a/clustered/client/src/test/resources/configs/cluster-invalid-uri.xml b/clustered/ehcache-client/src/test/resources/configs/cluster-invalid-uri.xml similarity index 100% rename from clustered/client/src/test/resources/configs/cluster-invalid-uri.xml rename to clustered/ehcache-client/src/test/resources/configs/cluster-invalid-uri.xml diff --git a/clustered/ehcache-client/src/test/resources/configs/clustered-cache.xml b/clustered/ehcache-client/src/test/resources/configs/clustered-cache.xml new file mode 100644 index 0000000000..33e84b2194 --- /dev/null +++ b/clustered/ehcache-client/src/test/resources/configs/clustered-cache.xml @@ -0,0 +1,71 @@ + + + + + + + + + 5 + 5 + 150 + + + 8388608 + + + + + + java.lang.Long + java.lang.String + + + + + 12 + + + + + + java.lang.Long + java.lang.String + + + + + + + + + + + java.lang.Long + java.lang.String + + + + + 12 + + + + + + diff --git a/clustered/client/src/test/resources/configs/consistency.xml b/clustered/ehcache-client/src/test/resources/configs/consistency.xml similarity index 86% rename from clustered/client/src/test/resources/configs/consistency.xml rename to clustered/ehcache-client/src/test/resources/configs/consistency.xml index a32851c490..4f69a9afbb 100644 --- a/clustered/client/src/test/resources/configs/consistency.xml +++ b/clustered/ehcache-client/src/test/resources/configs/consistency.xml @@ -16,12 +16,8 @@ --> + xmlns:tc='http://www.ehcache.org/v3/clustered'> diff --git a/clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml b/clustered/ehcache-client/src/test/resources/configs/docs/ehcache-clustered.xml similarity index 82% rename from clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml rename to clustered/ehcache-client/src/test/resources/configs/docs/ehcache-clustered.xml index fc34ea80f5..84f263a0a7 100644 --- a/clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml +++ b/clustered/ehcache-client/src/test/resources/configs/docs/ehcache-clustered.xml @@ -15,12 +15,8 @@ --> + xmlns:tc='http://www.ehcache.org/v3/clustered'> diff --git a/clustered/client/src/test/resources/configs/offheap-resource.xml b/clustered/ehcache-client/src/test/resources/configs/offheap-resource.xml similarity index 94% rename from clustered/client/src/test/resources/configs/offheap-resource.xml rename to clustered/ehcache-client/src/test/resources/configs/offheap-resource.xml index 5018f2226c..b805e5dca2 100644 --- a/clustered/client/src/test/resources/configs/offheap-resource.xml +++ b/clustered/ehcache-client/src/test/resources/configs/offheap-resource.xml @@ -17,7 +17,6 @@ --> 64 diff --git a/clustered/client/src/test/resources/configs/simple-cluster.xml b/clustered/ehcache-client/src/test/resources/configs/simple-cluster.xml similarity index 95% rename from clustered/client/src/test/resources/configs/simple-cluster.xml rename to clustered/ehcache-client/src/test/resources/configs/simple-cluster.xml index d1bb120854..e8016a8f54 100644 --- a/clustered/client/src/test/resources/configs/simple-cluster.xml +++ b/clustered/ehcache-client/src/test/resources/configs/simple-cluster.xml @@ -23,7 +23,7 @@ 5 - + 8 @@ -33,7 +33,7 @@ java.lang.Long java.lang.String - 16 + 8 diff --git a/clustered/client/src/test/resources/configs/unknown-cluster-cache-invalid-attribute.xml b/clustered/ehcache-client/src/test/resources/configs/unknown-cluster-cache-invalid-attribute.xml similarity index 90% rename from clustered/client/src/test/resources/configs/unknown-cluster-cache-invalid-attribute.xml rename to clustered/ehcache-client/src/test/resources/configs/unknown-cluster-cache-invalid-attribute.xml index 89295bc1bb..5b39fa2d8a 100644 --- a/clustered/client/src/test/resources/configs/unknown-cluster-cache-invalid-attribute.xml +++ b/clustered/ehcache-client/src/test/resources/configs/unknown-cluster-cache-invalid-attribute.xml @@ -16,14 +16,13 @@ --> - + diff --git a/clustered/client/src/test/resources/configs/unknown-cluster-cache-invalid-element.xml b/clustered/ehcache-client/src/test/resources/configs/unknown-cluster-cache-invalid-element.xml similarity index 90% rename from clustered/client/src/test/resources/configs/unknown-cluster-cache-invalid-element.xml rename to clustered/ehcache-client/src/test/resources/configs/unknown-cluster-cache-invalid-element.xml index ed0af06f8b..642b8a3d1f 100644 --- a/clustered/client/src/test/resources/configs/unknown-cluster-cache-invalid-element.xml +++ b/clustered/ehcache-client/src/test/resources/configs/unknown-cluster-cache-invalid-element.xml @@ -16,14 +16,13 @@ --> - + diff --git a/clustered/client/src/test/resources/configs/unknown-cluster-cache.xml b/clustered/ehcache-client/src/test/resources/configs/unknown-cluster-cache.xml similarity index 90% rename from clustered/client/src/test/resources/configs/unknown-cluster-cache.xml rename to clustered/ehcache-client/src/test/resources/configs/unknown-cluster-cache.xml index 63f9c7a278..0ee2f72f42 100644 --- a/clustered/client/src/test/resources/configs/unknown-cluster-cache.xml +++ b/clustered/ehcache-client/src/test/resources/configs/unknown-cluster-cache.xml @@ -16,14 +16,13 @@ --> - + diff --git a/clustered/ehcache-clustered/build.gradle b/clustered/ehcache-clustered/build.gradle new file mode 100644 index 0000000000..92ddb8616e --- /dev/null +++ b/clustered/ehcache-clustered/build.gradle @@ -0,0 +1,180 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import aQute.bnd.osgi.Constants + +/** + * NOTE: this directory had to be named clustered-dist instead of just dist + * because gradle creatively substitutes :dist for :clustered:dist or vice versa + * if groups are the same + * https://discuss.gradle.org/t/dependency-substitution-wrong-with-more-than-one-sub-project-with-same-name/7253/6 + */ + +plugins { + id 'org.ehcache.build.package' + id 'distribution' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 Clustered Module' + description = 'Ehcache 3 Clustered: Defines the client jar and the kit containing the Terracotta server' + } +} + +ext { + docsFolder = "$buildDir/docs/asciidoc" +} + +configurations { + [apiElements, runtimeElements].each { + it.outgoing { + variants.removeIf { it.name == 'classes' || it.name == 'resources' } + capability "org.ehcache:ehcache-clustered:$version" + capability "org.ehcache.modules.clustered:ehcache-client:$version" + } + } + + contents { + exclude group:'org.ehcache.modules' + exclude group:'org.slf4j' + } +} + +dependencies { + contents project(':clustered:ehcache-client') + contents "org.terracotta.internal:client-runtime:$terracottaCoreVersion" + + implementation "org.slf4j:slf4j-api:$parent.slf4jVersion" + implementation project(':ehcache') +} + +def kitProvides = { Configuration c -> + c.exclude group:'org.slf4j', module:'slf4j-api' + c.exclude group:'org.terracotta', module:'entity-server-api' + c.exclude group:'org.terracotta', module:'entity-common-api' + c.exclude group:'org.terracotta', module:'packaging-support' + c.exclude group:'org.terracotta', module:'standard-cluster-services' + c.exclude group:'org.terracotta', module:'statistics' + c.exclude group:'org.terracotta', module:'runnel' + c.exclude group:'org.terracotta', module:'client-message-tracker' + c.exclude group:'org.terracotta.management', module:'monitoring-service-api' + c.exclude group:'org.terracotta.management', module:'management-model' + c.exclude group:'org.terracotta.management', module:'management-registry' + c.exclude group:'org.terracotta.management', module:'cluster-topology' + c.exclude group:'org.terracotta.management', module:'sequence-generator' +} + +configurations { + kit + serverApis(kitProvides) + serverLibs(kitProvides) +} + +dependencies { + serverApis project(':clustered:server:ehcache-service-api') + serverLibs project(':clustered:server:ehcache-entity') + serverLibs project(':clustered:server:ehcache-service') + + kit "org.terracotta:platform-kit:$terracottaPlatformVersion@tar.gz" +} + +task copyDocs(type: Sync) { + dependsOn asciidocZip + from zipTree(asciidocZip.archivePath) + into docsFolder +} + +javadoc { + exclude '**/core/**', '**/impl/**', '**/xml/**', '**/jsr107/**', '**/transactions/**', '**/management/**', '**/tck/**' +} + +tasks.named('jar') { + osgi { + instruction Constants.BUNDLE_SYMBOLICNAME, 'org.ehcache.clustered' + instruction Constants.EXPORT_PACKAGE, '!com.tc.*, !com.terracotta.*, !org.terracotta.*, !org.ehcache.*.internal.*, !sun.misc, org.ehcache.clustered.client.*, org.ehcache.clustered.common.*' + instruction Constants.IMPORT_PACKAGE, '!sun.misc.*, org.ehcache.xml.*;resolution:=optional, jdk.jfr.*;resolution:=optional, !com.fasterxml.jackson.*, !org.terracotta.json, javax.xml.bind*;version="[2.2,3)", *' + } +} + +distributions { + main { + distributionBaseName = archivesBaseName + contents { + filesMatching('**/*.jar') { + // We can safely exclude JAR duplicates as our dependency strategy is fail on conflict + duplicatesStrategy DuplicatesStrategy.EXCLUDE + } + //tc kit + into ('') { + from configurations.kit.elements.map { + files -> files.collect { tarTree(it) } + } + eachFile { f -> + // remove top level directory from the kit + f.path = f.path.replace("platform-kit-$terracottaPlatformVersion/", "") + } + exclude { f -> + // Exclude tc's README.txt - Issue 1273 + f.path.contains('README.txt') || f.path.contains('server/conf') + } + includeEmptyDirs = false + } + into ("server/plugins/api") { + from configurations.serverApis + } + into ("server/plugins/lib") { + from (configurations.serverLibs - configurations.serverApis) + } + into ('client/ehcache') { + from jar + from project(':ehcache').jar + exclude { f -> + !f.path.contains('ehcache') // do not add any transitives in this directory + } + } + into ('client/ehcache/documentation') { + from "$docsFolder/user" + } + into ('client/ehcache/javadoc') { + from tasks.named('javadocJar') + from project(':ehcache').javadocJar + } + into ('client/lib') { + from configurations.runtimeClasspath + } + into ('') { + from 'src/assemble' + } + } + } +} + +distTar { + archiveClassifier = 'kit' + compression = Compression.GZIP +} + +distZip { + archiveClassifier = 'kit' +} + +publishing.publications.withType(MavenPublication) { + artifact distZip + artifact distTar +} + +[distTar, distZip, installDist]*.dependsOn copyDocs, javadocJar, project(':ehcache').jar, project(':ehcache').javadocJar diff --git a/clustered/clustered-dist/src/assemble/README.txt b/clustered/ehcache-clustered/src/assemble/README.txt similarity index 100% rename from clustered/clustered-dist/src/assemble/README.txt rename to clustered/ehcache-clustered/src/assemble/README.txt diff --git a/clustered/clustered-dist/src/assemble/legal/APACHE_PUBLIC_LICENSE.txt b/clustered/ehcache-clustered/src/assemble/legal/APACHE_PUBLIC_LICENSE.txt similarity index 100% rename from clustered/clustered-dist/src/assemble/legal/APACHE_PUBLIC_LICENSE.txt rename to clustered/ehcache-clustered/src/assemble/legal/APACHE_PUBLIC_LICENSE.txt diff --git a/clustered/clustered-dist/src/assemble/legal/LICENSE b/clustered/ehcache-clustered/src/assemble/legal/LICENSE similarity index 100% rename from clustered/clustered-dist/src/assemble/legal/LICENSE rename to clustered/ehcache-clustered/src/assemble/legal/LICENSE diff --git a/clustered/ehcache-clustered/src/assemble/server/conf/cluster.cfg b/clustered/ehcache-clustered/src/assemble/server/conf/cluster.cfg new file mode 100644 index 0000000000..230113261a --- /dev/null +++ b/clustered/ehcache-clustered/src/assemble/server/conf/cluster.cfg @@ -0,0 +1,13 @@ +client-lease-duration=150s +client-reconnect-window=120s +cluster-name=default-cluster +failover-priority=availability +offheap-resources=main:512MB +stripe-names=default-stripe +default-stripe:node-names=default-node +default-node:bind-address=0.0.0.0 +default-node:group-bind-address=0.0.0.0 +default-node:group-port=9430 +default-node:hostname=localhost +default-node:log-dir=%H/terracotta/logs +default-node:port=9410 diff --git a/clustered/ehcache-common-api/build.gradle b/clustered/ehcache-common-api/build.gradle new file mode 100644 index 0000000000..e748bfe3be --- /dev/null +++ b/clustered/ehcache-common-api/build.gradle @@ -0,0 +1,30 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.clustered-module' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 Common Clustering API module' + description = 'The Common Clustering API module of Ehcache 3' + } +} + +dependencies { + api "org.terracotta:entity-common-api:$terracottaApisVersion" +} diff --git a/clustered/common/config/checkstyle-suppressions.xml b/clustered/ehcache-common-api/config/checkstyle-suppressions.xml similarity index 100% rename from clustered/common/config/checkstyle-suppressions.xml rename to clustered/ehcache-common-api/config/checkstyle-suppressions.xml diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/Consistency.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/Consistency.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/Consistency.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/Consistency.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/PoolAllocation.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/PoolAllocation.java similarity index 96% rename from clustered/common/src/main/java/org/ehcache/clustered/common/PoolAllocation.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/PoolAllocation.java index 9a60d37aaa..c8250fd31a 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/PoolAllocation.java +++ b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/PoolAllocation.java @@ -17,6 +17,7 @@ package org.ehcache.clustered.common; import java.io.Serializable; +import java.util.Objects; /** * PoolAllocation @@ -61,6 +62,7 @@ public Dedicated(String resourceName, long size) { * * @return the dedicated allocation size */ + @Override public long getSize() { return size; } @@ -71,6 +73,7 @@ public long getSize() { * * @return the server-side resource name */ + @Override public String getResourceName() { return resourceName; } @@ -84,8 +87,7 @@ public boolean isCompatible(PoolAllocation other) { final Dedicated dedicated = (Dedicated)other; - if (size != dedicated.size) return false; - return resourceName != null ? resourceName.equals(dedicated.resourceName) : dedicated.resourceName == null; + return Objects.equals(resourceName, dedicated.resourceName); } @Override @@ -117,6 +119,7 @@ public Shared(String resourcePoolName) { * * @return the server-side resource pool name */ + @Override public String getResourcePoolName() { return resourcePoolName; } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/ServerSideConfiguration.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/ServerSideConfiguration.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/ServerSideConfiguration.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/ServerSideConfiguration.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/ClusterTierManagerConfiguration.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/ClusterTierManagerConfiguration.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/ClusterTierManagerConfiguration.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/ClusterTierManagerConfiguration.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java similarity index 77% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java index 2ce6ad708f..0779729877 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java +++ b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java @@ -37,20 +37,37 @@ public class ServerStoreConfiguration implements Serializable { private final String keySerializerType; private final String valueSerializerType; private final Consistency consistency; - // TODO: Loader/Writer configuration ... + private final boolean loaderWriterConfigured; + private final boolean writeBehindConfigured; public ServerStoreConfiguration(PoolAllocation poolAllocation, String storedKeyType, String storedValueType, String keySerializerType, String valueSerializerType, - Consistency consistency) { + Consistency consistency, + boolean loaderWriterConfigured) { + this(poolAllocation, storedKeyType, storedValueType, keySerializerType, valueSerializerType, consistency, + loaderWriterConfigured, false); + } + + + public ServerStoreConfiguration(PoolAllocation poolAllocation, + String storedKeyType, + String storedValueType, + String keySerializerType, + String valueSerializerType, + Consistency consistency, + boolean loaderWriterConfigured, + boolean writeBehindConfigured) { this.poolAllocation = poolAllocation; this.storedKeyType = storedKeyType; this.storedValueType = storedValueType; this.keySerializerType = keySerializerType; this.valueSerializerType = valueSerializerType; this.consistency = consistency; + this.loaderWriterConfigured = loaderWriterConfigured; + this.writeBehindConfigured = writeBehindConfigured; } public PoolAllocation getPoolAllocation() { @@ -77,6 +94,14 @@ public Consistency getConsistency() { return consistency; } + public boolean isLoaderWriterConfigured() { + return loaderWriterConfigured; + } + + public boolean isWriteBehindConfigured() { + return writeBehindConfigured; + } + public boolean isCompatible(ServerStoreConfiguration otherConfiguration, StringBuilder sb) { boolean isCompatible = true; @@ -86,6 +111,8 @@ public boolean isCompatible(ServerStoreConfiguration otherConfiguration, StringB isCompatible = isCompatible && compareField(sb, "valueSerializerType", valueSerializerType, otherConfiguration.getValueSerializerType()); isCompatible = isCompatible && compareConsistencyField(sb, consistency, otherConfiguration.getConsistency()); isCompatible = isCompatible && comparePoolAllocation(sb, otherConfiguration.getPoolAllocation()); + isCompatible = isCompatible && (otherConfiguration.isLoaderWriterConfigured() == loaderWriterConfigured); + isCompatible = isCompatible && (otherConfiguration.isWriteBehindConfigured() == writeBehindConfigured); return isCompatible; } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/ClusterException.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/exceptions/ClusterException.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/ClusterException.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/exceptions/ClusterException.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/DestroyInProgressException.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/exceptions/DestroyInProgressException.java similarity index 80% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/DestroyInProgressException.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/exceptions/DestroyInProgressException.java index 8a183d81d5..ae6c63c4f4 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/DestroyInProgressException.java +++ b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/exceptions/DestroyInProgressException.java @@ -21,7 +21,14 @@ */ public class DestroyInProgressException extends LifecycleException { + private static final long serialVersionUID = 1917543049279158303L; + public DestroyInProgressException(String message) { super(message); } + + @Override + public DestroyInProgressException withClientStackTrace() { + return new DestroyInProgressException(this.getMessage()); + } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerSideConfigurationException.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerSideConfigurationException.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerSideConfigurationException.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerSideConfigurationException.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidStoreException.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidStoreException.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidStoreException.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidStoreException.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/LifecycleException.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/exceptions/LifecycleException.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/LifecycleException.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/exceptions/LifecycleException.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java diff --git a/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java new file mode 100644 index 0000000000..61dbfcb597 --- /dev/null +++ b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java @@ -0,0 +1,379 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + + +import org.ehcache.clustered.common.internal.exceptions.ClusterException; +import org.ehcache.clustered.common.internal.store.Chain; +import org.terracotta.entity.EntityResponse; + +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + +public abstract class EhcacheEntityResponse implements EntityResponse { + + public abstract EhcacheResponseType getResponseType(); + + public static Success success() { + return Success.INSTANCE; + } + + public static class Success extends EhcacheEntityResponse { + + private static final Success INSTANCE = new Success(); + + private Success() { + //singleton + } + + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.SUCCESS; + } + } + + public static Failure failure(ClusterException cause) { + return new Failure(cause); + } + + public static class Failure extends EhcacheEntityResponse { + + private final ClusterException cause; + + private Failure(ClusterException cause) { + this.cause = cause; + } + + public ClusterException getCause() { + return cause; + } + + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.FAILURE; + } + } + + public static GetResponse getResponse(Chain chain) { + return new GetResponse(chain); + } + + public static class GetResponse extends EhcacheEntityResponse { + + private final Chain chain; + + private GetResponse(Chain chain) { + this.chain = chain; + } + + public Chain getChain() { + return chain; + } + + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.GET_RESPONSE; + } + } + + public static HashInvalidationDone hashInvalidationDone(long key) { + return new HashInvalidationDone(key); + } + + public static class HashInvalidationDone extends EhcacheEntityResponse { + private final long key; + + private HashInvalidationDone(long key) { + this.key = key; + } + + public long getKey() { + return key; + } + + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.HASH_INVALIDATION_DONE; + } + } + + public static AllInvalidationDone allInvalidationDone() { + return new AllInvalidationDone(); + } + + public static class AllInvalidationDone extends EhcacheEntityResponse { + + private AllInvalidationDone() { + } + + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.ALL_INVALIDATION_DONE; + } + } + + public static ServerAppend serverAppend(ByteBuffer appended, Chain beforeAppend) { + return new ServerAppend(appended, beforeAppend); + } + + public static class ServerAppend extends EhcacheEntityResponse { + private final ByteBuffer appended; + private final Chain beforeAppend; + + ServerAppend(ByteBuffer appended, Chain beforeAppend) { + this.appended = appended; + this.beforeAppend = beforeAppend; + } + + public ByteBuffer getAppended() { + return appended; + } + + public Chain getBeforeAppend() { + return beforeAppend; + } + + @Override + public EhcacheResponseType getResponseType() { + return EhcacheResponseType.SERVER_APPEND; + } + } + + public static ServerInvalidateHash serverInvalidateHash(long key, Chain evictedChain) { + return new ServerInvalidateHash(key, evictedChain); + } + + // this is fired when the server evicts a chain + public static class ServerInvalidateHash extends EhcacheEntityResponse { + private final long key; + private final Chain evictedChain; + + private ServerInvalidateHash(long key, Chain evictedChain) { + this.key = key; + this.evictedChain = evictedChain; + } + + public long getKey() { + return key; + } + + public Chain getEvictedChain() { + return evictedChain; + } + + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.SERVER_INVALIDATE_HASH; + } + } + + public static ClientInvalidateHash clientInvalidateHash(long key, int invalidationId) { + return new ClientInvalidateHash(key, invalidationId); + } + + // this is fired when a client modifies a chain (i.e.: on append) + public static class ClientInvalidateHash extends EhcacheEntityResponse { + private final long key; + private final int invalidationId; + + private ClientInvalidateHash(long key, int invalidationId) { + this.key = key; + this.invalidationId = invalidationId; + } + + public long getKey() { + return key; + } + + public int getInvalidationId() { + return invalidationId; + } + + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.CLIENT_INVALIDATE_HASH; + } + } + + public static ClientInvalidateAll clientInvalidateAll(int invalidationId) { + return new ClientInvalidateAll(invalidationId); + } + + public static class ClientInvalidateAll extends EhcacheEntityResponse { + private final int invalidationId; + + private ClientInvalidateAll(int invalidationId) { + this.invalidationId = invalidationId; + } + + public int getInvalidationId() { + return invalidationId; + } + + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.CLIENT_INVALIDATE_ALL; + } + } + + public static MapValue mapValue(Object value) { + return new MapValue(value); + } + + public static class MapValue extends EhcacheEntityResponse { + + private final Object value; + + private MapValue(Object value) { + this.value = value; + } + + public Object getValue() { + return this.value; + } + + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.MAP_VALUE; + } + } + + public static PrepareForDestroy prepareForDestroy(Set stores) { + return new PrepareForDestroy(stores); + } + + public static class PrepareForDestroy extends EhcacheEntityResponse { + + private final Set stores; + + private PrepareForDestroy(Set stores) { + this.stores = stores; + } + + @Override + public EhcacheResponseType getResponseType() { + return EhcacheResponseType.PREPARE_FOR_DESTROY; + } + + public Set getStores() { + return stores; + } + } + + public static ResolveRequest resolveRequest(long key, Chain chain) { + return new ResolveRequest(key, chain); + } + + public static class ResolveRequest extends EhcacheEntityResponse { + + private final long key; + private final Chain chain; + + ResolveRequest(long key, Chain chain) { + this.key = key; + this.chain = chain; + } + + @Override + public EhcacheResponseType getResponseType() { + return EhcacheResponseType.RESOLVE_REQUEST; + } + + public long getKey() { + return key; + } + + public Chain getChain() { + return chain; + } + } + + public static LockSuccess lockSuccess(Chain chain) { + return new LockSuccess(chain); + } + + public static class LockSuccess extends EhcacheEntityResponse { + + private final Chain chain; + + LockSuccess(Chain chain) { + this.chain = chain; + } + + public Chain getChain() { + return chain; + } + + @Override + public EhcacheResponseType getResponseType() { + return EhcacheResponseType.LOCK_SUCCESS; + } + } + + public static LockFailure lockFailure() { + return new LockFailure(); + } + + public static class LockFailure extends EhcacheEntityResponse { + + private LockFailure() { + + } + + @Override + public EhcacheResponseType getResponseType() { + return EhcacheResponseType.LOCK_FAILURE; + } + } + + public static IteratorBatch iteratorBatchResponse(UUID id, List> chains, boolean last) { + return new IteratorBatch(id, chains, last); + } + + public static class IteratorBatch extends EhcacheEntityResponse { + + private final UUID id; + private final List> chains; + private final boolean last; + + private IteratorBatch(UUID id, List> chains, boolean last) { + this.id = id; + this.chains = chains; + this.last = last; + } + + @Override + public EhcacheResponseType getResponseType() { + return EhcacheResponseType.ITERATOR_BATCH; + } + + public boolean isLast() { + return last; + } + + public List> getChains() { + return chains; + } + + public UUID getIdentity() { + return id; + } + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java similarity index 76% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java index cc32993831..06963dd68b 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java +++ b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java @@ -16,12 +16,9 @@ package org.ehcache.clustered.common.internal.messages; -import org.terracotta.runnel.EnumMapping; - import java.util.EnumSet; import static java.util.EnumSet.of; -import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; /** * EhcacheMessageType @@ -42,6 +39,12 @@ public enum EhcacheMessageType { CLIENT_INVALIDATION_ALL_ACK, CLEAR, GET_STORE, + LOCK, + UNLOCK, + ITERATOR_OPEN, + ITERATOR_CLOSE, + ITERATOR_ADVANCE, + ENABLE_EVENT_LISTENER, // StateRepository operation messages GET_STATE_REPO, @@ -51,38 +54,16 @@ public enum EhcacheMessageType { // Passive replication messages CHAIN_REPLICATION_OP, CLEAR_INVALIDATION_COMPLETE, - INVALIDATION_COMPLETE; - - public static final String MESSAGE_TYPE_FIELD_NAME = "opCode"; - public static final int MESSAGE_TYPE_FIELD_INDEX = 10; - public static final EnumMapping EHCACHE_MESSAGE_TYPES_ENUM_MAPPING = newEnumMappingBuilder(EhcacheMessageType.class) - .mapping(VALIDATE, 1) - .mapping(VALIDATE_SERVER_STORE, 2) - .mapping(PREPARE_FOR_DESTROY, 3) - - .mapping(GET_AND_APPEND, 21) - .mapping(APPEND, 22) - .mapping(REPLACE, 23) - .mapping(CLIENT_INVALIDATION_ACK, 24) - .mapping(CLIENT_INVALIDATION_ALL_ACK, 25) - .mapping(CLEAR, 26) - .mapping(GET_STORE, 27) - - .mapping(GET_STATE_REPO, 41) - .mapping(PUT_IF_ABSENT, 42) - .mapping(ENTRY_SET, 43) - - .mapping(CHAIN_REPLICATION_OP, 61) - .mapping(CLEAR_INVALIDATION_COMPLETE, 63) - .mapping(INVALIDATION_COMPLETE, 64) - .build(); + INVALIDATION_COMPLETE, + MESSAGE_CATCHUP; public static final EnumSet LIFECYCLE_MESSAGES = of(VALIDATE, VALIDATE_SERVER_STORE, PREPARE_FOR_DESTROY); public static boolean isLifecycleMessage(EhcacheMessageType value) { return LIFECYCLE_MESSAGES.contains(value); } - public static final EnumSet STORE_OPERATION_MESSAGES = of(GET_AND_APPEND, APPEND, REPLACE, CLIENT_INVALIDATION_ACK, CLIENT_INVALIDATION_ALL_ACK, CLEAR, GET_STORE); + public static final EnumSet STORE_OPERATION_MESSAGES = of(GET_AND_APPEND, APPEND, + REPLACE, CLIENT_INVALIDATION_ACK, CLIENT_INVALIDATION_ALL_ACK, CLEAR, GET_STORE, LOCK, UNLOCK, ITERATOR_OPEN, ITERATOR_CLOSE, ITERATOR_ADVANCE, ENABLE_EVENT_LISTENER); public static boolean isStoreOperationMessage(EhcacheMessageType value) { return STORE_OPERATION_MESSAGES.contains(value); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java diff --git a/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java new file mode 100644 index 0000000000..90b683b994 --- /dev/null +++ b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java @@ -0,0 +1,39 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +/** + * EhcacheResponseType + */ +public enum EhcacheResponseType { + SUCCESS, + FAILURE, + GET_RESPONSE, + HASH_INVALIDATION_DONE, + CLIENT_INVALIDATE_HASH, + CLIENT_INVALIDATE_ALL, + SERVER_INVALIDATE_HASH, + MAP_VALUE, + ALL_INVALIDATION_DONE, + PREPARE_FOR_DESTROY, + RESOLVE_REQUEST, + LOCK_SUCCESS, + LOCK_FAILURE, + ITERATOR_BATCH, + SERVER_APPEND, + ; +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java similarity index 87% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java index 203f874985..953ac5c7fa 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java +++ b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java @@ -20,6 +20,8 @@ public abstract class StateRepositoryOpMessage extends EhcacheOperationMessage implements Serializable { + private static final long serialVersionUID = -6701802926010996981L; + private final String cacheId; private final String mapId; @@ -38,6 +40,8 @@ public String getMapId() { private static abstract class KeyBasedMessage extends StateRepositoryOpMessage { + private static final long serialVersionUID = 2338704755924839309L; + private final Object key; private KeyBasedMessage(final String cacheId, final String mapId, final Object key) { @@ -53,6 +57,8 @@ public Object getKey() { public static class GetMessage extends KeyBasedMessage { + private static final long serialVersionUID = 7263513962868446470L; + public GetMessage(final String cacheId, final String mapId, final Object key) { super(cacheId, mapId, key); } @@ -65,6 +71,8 @@ public EhcacheMessageType getMessageType() { public static class PutIfAbsentMessage extends KeyBasedMessage { + private static final long serialVersionUID = 2743653481411126124L; + private final Object value; public PutIfAbsentMessage(final String cacheId, final String mapId, final Object key, final Object value) { @@ -84,6 +92,8 @@ public EhcacheMessageType getMessageType() { public static class EntrySetMessage extends StateRepositoryOpMessage { + private static final long serialVersionUID = 5230634750732779978L; + public EntrySetMessage(final String cacheId, final String mapId) { super(cacheId, mapId); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Chain.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/store/Chain.java similarity index 87% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Chain.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/store/Chain.java index 2919628a38..836d7193cd 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Chain.java +++ b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/store/Chain.java @@ -34,15 +34,6 @@ */ public interface Chain extends Iterable { - /** - * Returns the iterator to iterate the {@link Chain} of - * {@link Element}s in backwards direction i.e. starting - * from last one. - * - * @return an Iterator. - */ - Iterator reverseIterator(); - /** * Returns true if Chain is empty else false. * diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Element.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/store/Element.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Element.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/store/Element.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/ServerStore.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/store/ServerStore.java similarity index 95% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/ServerStore.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/store/ServerStore.java index e59bea92ac..ab62818b89 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/ServerStore.java +++ b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/store/ServerStore.java @@ -17,6 +17,8 @@ package org.ehcache.clustered.common.internal.store; import java.nio.ByteBuffer; +import java.util.Iterator; +import java.util.Map; import java.util.concurrent.TimeoutException; /** @@ -128,4 +130,11 @@ public interface ServerStore { * @throws TimeoutException if the get exceeds the timeout configured for write operations */ void clear() throws TimeoutException; + + /** + * Returns an iterator over the chains. + * + * @return an chain iterator. + */ + Iterator> iterator() throws TimeoutException; } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/ValueWrapper.java b/clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/store/ValueWrapper.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/ValueWrapper.java rename to clustered/ehcache-common-api/src/main/java/org/ehcache/clustered/common/internal/store/ValueWrapper.java diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/ServerSideConfigurationTest.java b/clustered/ehcache-common-api/src/test/java/org/ehcache/clustered/common/ServerSideConfigurationTest.java similarity index 100% rename from clustered/common/src/test/java/org/ehcache/clustered/common/ServerSideConfigurationTest.java rename to clustered/ehcache-common-api/src/test/java/org/ehcache/clustered/common/ServerSideConfigurationTest.java diff --git a/clustered/ehcache-common/build.gradle b/clustered/ehcache-common/build.gradle new file mode 100644 index 0000000000..0a5f41937d --- /dev/null +++ b/clustered/ehcache-common/build.gradle @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.clustered-module' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 Common Clustering module' + description = 'The Common Clustering module of Ehcache 3' + } +} + +dependencies { + api project(':ehcache-api') + api project(':clustered:ehcache-common-api') + + implementation "org.terracotta:entity-common-api:$terracottaApisVersion" + implementation "org.terracotta:runnel:$terracottaPlatformVersion" + + testImplementation project(':clustered:test-utils') +} diff --git a/clustered/server/config/checkstyle-suppressions.xml b/clustered/ehcache-common/config/checkstyle-suppressions.xml similarity index 100% rename from clustered/server/config/checkstyle-suppressions.xml rename to clustered/ehcache-common/config/checkstyle-suppressions.xml diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/EhcacheEntityVersion.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/EhcacheEntityVersion.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/EhcacheEntityVersion.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/EhcacheEntityVersion.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/IllegalMessageException.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/exceptions/IllegalMessageException.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/IllegalMessageException.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/exceptions/IllegalMessageException.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidOperationException.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidOperationException.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidOperationException.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidOperationException.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationException.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationException.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationException.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationException.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java similarity index 87% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java index da21efeddb..14746b151f 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java @@ -16,7 +16,9 @@ package org.ehcache.clustered.common.internal.exceptions; -public class UnknownClusterException extends ClusterException{ +public class UnknownClusterException extends ClusterException { + + private static final long serialVersionUID = -2612856483315331382L; public UnknownClusterException(String message) { super(message); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java similarity index 91% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java index 544edee519..8f5b23d4f3 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java @@ -25,7 +25,7 @@ public class LockMessaging { private static final MessageCodec CODEC = new MessageCodec() { @Override - public byte[] encodeMessage(LockOperation message) throws MessageCodecException { + public byte[] encodeMessage(LockOperation message) { return new byte[] { (byte) message.getOperation().ordinal(), (byte) message.getHoldType().ordinal() @@ -33,12 +33,12 @@ public byte[] encodeMessage(LockOperation message) throws MessageCodecException } @Override - public LockOperation decodeMessage(byte[] bytes) throws MessageCodecException { + public LockOperation decodeMessage(byte[] bytes) { return new LockOperation(Operation.values()[bytes[0]], HoldType.values()[bytes[1]]); } @Override - public byte[] encodeResponse(LockTransition response) throws MessageCodecException { + public byte[] encodeResponse(LockTransition response) { if (response.isAcquired()) { return new byte[] {0x00}; } else if (response.isReleased()) { @@ -126,10 +126,10 @@ public boolean isReleased() { } public enum HoldType { - WRITE, READ; + WRITE, READ } public enum Operation { - ACQUIRE, TRY_ACQUIRE, RELEASE; + ACQUIRE, TRY_ACQUIRE, RELEASE } } diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/BaseCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/BaseCodec.java new file mode 100644 index 0000000000..b85360cb2e --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/BaseCodec.java @@ -0,0 +1,113 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.terracotta.runnel.EnumMapping; + +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.APPEND; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.CHAIN_REPLICATION_OP; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.CLEAR; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.CLEAR_INVALIDATION_COMPLETE; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.CLIENT_INVALIDATION_ACK; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.CLIENT_INVALIDATION_ALL_ACK; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.ENABLE_EVENT_LISTENER; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.ENTRY_SET; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.GET_AND_APPEND; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.GET_STATE_REPO; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.GET_STORE; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.INVALIDATION_COMPLETE; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.ITERATOR_ADVANCE; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.ITERATOR_CLOSE; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.ITERATOR_OPEN; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.LOCK; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_CATCHUP; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.PUT_IF_ABSENT; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.REPLACE; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.UNLOCK; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.VALIDATE; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.VALIDATE_SERVER_STORE; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.ALL_INVALIDATION_DONE; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.CLIENT_INVALIDATE_ALL; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.CLIENT_INVALIDATE_HASH; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.FAILURE; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.GET_RESPONSE; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.HASH_INVALIDATION_DONE; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.ITERATOR_BATCH; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.LOCK_FAILURE; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.LOCK_SUCCESS; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.MAP_VALUE; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.RESOLVE_REQUEST; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.SERVER_APPEND; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.SERVER_INVALIDATE_HASH; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.SUCCESS; +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; + +public class BaseCodec { + + public static final String MESSAGE_TYPE_FIELD_NAME = "opCode"; + public static final int MESSAGE_TYPE_FIELD_INDEX = 10; + public static final EnumMapping EHCACHE_MESSAGE_TYPES_ENUM_MAPPING = newEnumMappingBuilder(EhcacheMessageType.class) + .mapping(VALIDATE, 1) + .mapping(VALIDATE_SERVER_STORE, 2) + .mapping(EhcacheMessageType.PREPARE_FOR_DESTROY, 3) + + .mapping(GET_AND_APPEND, 21) + .mapping(APPEND, 22) + .mapping(REPLACE, 23) + .mapping(CLIENT_INVALIDATION_ACK, 24) + .mapping(CLIENT_INVALIDATION_ALL_ACK, 25) + .mapping(CLEAR, 26) + .mapping(GET_STORE, 27) + .mapping(LOCK, 28) + .mapping(UNLOCK, 29) + .mapping(ITERATOR_OPEN, 30) + .mapping(ITERATOR_CLOSE, 31) + .mapping(ITERATOR_ADVANCE, 32) + .mapping(ENABLE_EVENT_LISTENER, 33) + + .mapping(GET_STATE_REPO, 41) + .mapping(PUT_IF_ABSENT, 42) + .mapping(ENTRY_SET, 43) + + .mapping(CHAIN_REPLICATION_OP, 61) + .mapping(CLEAR_INVALIDATION_COMPLETE, 63) + .mapping(INVALIDATION_COMPLETE, 64) + + .mapping(MESSAGE_CATCHUP, 71) + .build(); + + public static final String RESPONSE_TYPE_FIELD_NAME = "opCode"; + public static final int RESPONSE_TYPE_FIELD_INDEX = 10; + public static final EnumMapping EHCACHE_RESPONSE_TYPES_ENUM_MAPPING = newEnumMappingBuilder(EhcacheResponseType.class) + .mapping(SUCCESS, 80) + .mapping(FAILURE, 81) + .mapping(GET_RESPONSE, 82) + .mapping(HASH_INVALIDATION_DONE, 83) + .mapping(ALL_INVALIDATION_DONE, 84) + .mapping(CLIENT_INVALIDATE_HASH, 85) + .mapping(CLIENT_INVALIDATE_ALL, 86) + .mapping(SERVER_INVALIDATE_HASH, 87) + .mapping(MAP_VALUE, 88) + .mapping(EhcacheResponseType.PREPARE_FOR_DESTROY, 89) + .mapping(RESOLVE_REQUEST, 90) + .mapping(LOCK_SUCCESS, 91) + .mapping(LOCK_FAILURE, 92) + .mapping(ITERATOR_BATCH, 93) + .mapping(SERVER_APPEND, 94) + .build(); + +} diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java new file mode 100644 index 0000000000..b66ba41de7 --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java @@ -0,0 +1,131 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.common.internal.store.SequencedElement; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.StructArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructArrayEncoder; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.nio.ByteBuffer; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.ehcache.clustered.common.internal.util.ChainBuilder.chainFromList; + +public final class ChainCodec { + + private ChainCodec() { + //no implementations please + } + + private static final Struct ELEMENT_STRUCT = StructBuilder.newStructBuilder() + .int64("sequence", 10) + .byteBuffer("payload", 20) + .build(); + + public static final Struct CHAIN_STRUCT = StructBuilder.newStructBuilder() + .structs("elements", 10, ELEMENT_STRUCT) + .build(); + + public static final Struct CHAIN_ENTRY_STRUCT = StructBuilder.newStructBuilder() + .int64("key", 5) + .structs("elements", 10, ELEMENT_STRUCT) + .build(); + + public static byte[] encodeChain(Chain chain) { + StructEncoder encoder = CHAIN_STRUCT.encoder(); + + encodeChain(encoder, chain); + + ByteBuffer byteBuffer = encoder.encode(); + return byteBuffer.array(); + } + + public static void encodeChain(StructEncoder encoder, Chain chain) { + StructArrayEncoder> elementsEncoder = encoder.structs("elements"); + for (Element element : chain) { + StructEncoder elementEncoder = elementsEncoder.add(); + if (element instanceof SequencedElement) { + elementEncoder.int64("sequence", ((SequencedElement) element).getSequenceNumber()); + } + elementEncoder.byteBuffer("payload", element.getPayload()); + elementEncoder.end(); + } + elementsEncoder.end(); + } + + public static void encodeChainEntry(StructEncoder encoder, Map.Entry chain) { + encoder.int64("key", chain.getKey()); + encodeChain(encoder, chain.getValue()); + } + + public static Chain decodeChain(byte[] payload) { + StructDecoder decoder = CHAIN_STRUCT.decoder(ByteBuffer.wrap(payload)); + return decodeChain(decoder); + } + + public static Chain decodeChain(StructDecoder decoder) { + StructArrayDecoder> elementsDecoder = decoder.structs("elements"); + + final List elements = new ArrayList<>(); + for (int i = 0; i < elementsDecoder.length(); i++) { + StructDecoder elementDecoder = elementsDecoder.next(); + Long sequence = elementDecoder.int64("sequence"); + ByteBuffer byteBuffer = elementDecoder.byteBuffer("payload"); + elementDecoder.end(); + + if (sequence == null) { + elements.add(byteBuffer::asReadOnlyBuffer); + } else { + elements.add(new SequencedElement() { + @Override + public long getSequenceNumber() { + return sequence; + } + + @Override + public ByteBuffer getPayload() { + return byteBuffer.asReadOnlyBuffer(); + } + + @Override + public String toString() { + return "SequencedElement{sequence=" + sequence + " size=" + byteBuffer.capacity() + "}"; + } + }); + } + } + + elementsDecoder.end(); + + return chainFromList(elements); + } + + public static Map.Entry decodeChainEntry(StructDecoder decoder) { + Long key = decoder.int64("key"); + Chain elements = decodeChain(decoder); + return new AbstractMap.SimpleImmutableEntry<>(key, elements); + } +} diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ClusterTierReconnectMessage.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ClusterTierReconnectMessage.java new file mode 100644 index 0000000000..129cca97ea --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ClusterTierReconnectMessage.java @@ -0,0 +1,69 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import java.util.HashSet; +import java.util.Set; + +public class ClusterTierReconnectMessage { + + private final Set hashInvalidationsInProgress; + private boolean clearInProgress = false; + private final Set locksHeld; + private boolean eventsEnabled; + + public ClusterTierReconnectMessage(boolean eventsEnabled) { + this.eventsEnabled = eventsEnabled; + hashInvalidationsInProgress = new HashSet<>(); + locksHeld = new HashSet<>(); + } + + public ClusterTierReconnectMessage(Set hashInvalidationsInProgress, Set locksHeld, boolean clearInProgress, boolean eventsEnabled) { + this.hashInvalidationsInProgress = hashInvalidationsInProgress; + this.locksHeld = locksHeld; + this.clearInProgress = clearInProgress; + this.eventsEnabled = eventsEnabled; + } + + public void addInvalidationsInProgress(Set hashInvalidationsInProgress) { + this.hashInvalidationsInProgress.addAll(hashInvalidationsInProgress); + } + + public void addLocksHeld(Set locksHeld) { + this.locksHeld.addAll(locksHeld); + } + + public Set getInvalidationsInProgress() { + return hashInvalidationsInProgress; + } + + public void clearInProgress() { + clearInProgress = true; + } + + public boolean isClearInProgress() { + return clearInProgress; + } + + public Set getLocksHeld() { + return locksHeld; + } + + public boolean isEventsEnabled() { + return eventsEnabled; + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CodecUtil.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/CodecUtil.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CodecUtil.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/CodecUtil.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java similarity index 85% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java index b03c732fc8..1175b5b38d 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java @@ -32,6 +32,7 @@ import java.util.HashMap; import java.util.Map; +import java.util.Optional; import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; import static org.terracotta.runnel.StructBuilder.newStructBuilder; @@ -49,11 +50,13 @@ public class CommonConfigCodec implements ConfigCodec { private static final String STORE_CONFIG_VALUE_TYPE_FIELD = "valueType"; private static final String STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD = "valueSerializerType"; private static final String STORE_CONFIG_CONSISTENCY_FIELD = "consistency"; - private static final String POOL_SIZE_FIELD = "poolSize"; - private static final String POOL_RESOURCE_NAME_FIELD = "resourceName"; + public static final String POOL_SIZE_FIELD = "poolSize"; + public static final String POOL_RESOURCE_NAME_FIELD = "resourceName"; private static final String DEFAULT_RESOURCE_FIELD = "defaultResource"; private static final String POOLS_SUB_STRUCT = "pools"; private static final String POOL_NAME_FIELD = "poolName"; + private static final String LOADER_WRITER_CONFIGURED_FIELD = "loaderWriterConfigured"; + private static final String WRITE_BEHIND_CONFIGURED_FIELD = "writeBehindConfigured"; private static final EnumMapping CONSISTENCY_ENUM_MAPPING = newEnumMappingBuilder(Consistency.class) .mapping(Consistency.EVENTUAL, 1) @@ -67,18 +70,25 @@ public class CommonConfigCodec implements ConfigCodec { @Override public InjectTuple injectServerStoreConfiguration(StructBuilder baseBuilder, final int index) { + //this needs to be returned whenever the index for builder is changed, so that + //other injecting places get the correct last index for adding structs to codec + int lastIndexToReturn = index + 30; final StructBuilder structBuilder = baseBuilder.string(STORE_CONFIG_KEY_TYPE_FIELD, index) .string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD, index + 10) .string(STORE_CONFIG_VALUE_TYPE_FIELD, index + 11) .string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD, index + 15) .enm(STORE_CONFIG_CONSISTENCY_FIELD, index + 16, CONSISTENCY_ENUM_MAPPING) + .bool(LOADER_WRITER_CONFIGURED_FIELD, index + 17) + .bool(WRITE_BEHIND_CONFIGURED_FIELD, index + 18) + // keep poolsize and resource name last .int64(POOL_SIZE_FIELD, index + 20) - .string(POOL_RESOURCE_NAME_FIELD, index + 30); + .string(POOL_RESOURCE_NAME_FIELD, lastIndexToReturn); + return new InjectTuple() { @Override public int getLastIndex() { - return index + 30; + return lastIndexToReturn; } @Override @@ -116,6 +126,9 @@ public void encodeServerStoreConfiguration(PrimitiveEncodingSupport encoder, encoder.enm(STORE_CONFIG_CONSISTENCY_FIELD, configuration.getConsistency()); } + encoder.bool(LOADER_WRITER_CONFIGURED_FIELD, configuration.isLoaderWriterConfigured()); + encoder.bool(WRITE_BEHIND_CONFIGURED_FIELD, configuration.isWriteBehindConfigured()); + PoolAllocation poolAllocation = configuration.getPoolAllocation(); if (poolAllocation instanceof PoolAllocation.Dedicated) { PoolAllocation.Dedicated dedicatedPool = (PoolAllocation.Dedicated) poolAllocation; @@ -139,6 +152,9 @@ public ServerStoreConfiguration decodeServerStoreConfiguration(PrimitiveDecoding if (consistencyEnm.isValid()) { consistency = consistencyEnm.get(); } + Boolean loaderWriterConfigured = decoder.bool(LOADER_WRITER_CONFIGURED_FIELD); + Boolean writeBehindConfigured = decoder.bool(WRITE_BEHIND_CONFIGURED_FIELD); + Long poolSize = decoder.int64(POOL_SIZE_FIELD); String poolResource = decoder.string(POOL_RESOURCE_NAME_FIELD); PoolAllocation poolAllocation = new PoolAllocation.Unknown(); @@ -147,7 +163,13 @@ public ServerStoreConfiguration decodeServerStoreConfiguration(PrimitiveDecoding } else if (poolResource != null) { poolAllocation = new PoolAllocation.Shared(poolResource); } - return new ServerStoreConfiguration(poolAllocation, keyType, valueType, keySerializer, valueSerializer, consistency); + + return new ServerStoreConfiguration(poolAllocation, keyType, valueType, keySerializer, valueSerializer, consistency, + getNonNullBoolean(loaderWriterConfigured), getNonNullBoolean(writeBehindConfigured)); + } + + private static Boolean getNonNullBoolean(Boolean loaderWriterConfigured) { + return Optional.ofNullable(loaderWriterConfigured).orElse(false); } @Override diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConcurrentEntityMessage.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ConcurrentEntityMessage.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConcurrentEntityMessage.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ConcurrentEntityMessage.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConfigCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ConfigCodec.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConfigCodec.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ConfigCodec.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java similarity index 87% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java index 0bd585dcdc..932735aee1 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java @@ -26,9 +26,9 @@ import java.nio.ByteBuffer; import static java.nio.ByteBuffer.wrap; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_NAME; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isLifecycleMessage; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStateRepoOperationMessage; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStoreOperationMessage; @@ -70,7 +70,7 @@ public byte[] encodeMessage(EhcacheEntityMessage message) { } @Override - public EhcacheEntityMessage decodeMessage(byte[] payload) throws MessageCodecException { + public EhcacheEntityMessage decodeMessage(byte[] payload) { ByteBuffer byteBuffer = wrap(payload); Enm opCodeEnm = OP_CODE_DECODER.decoder(byteBuffer).enm("opCode"); @@ -101,12 +101,12 @@ public EhcacheEntityMessage decodeMessage(ByteBuffer byteBuffer, EhcacheMessageT } @Override - public byte[] encodeResponse(EhcacheEntityResponse response) throws MessageCodecException { + public byte[] encodeResponse(EhcacheEntityResponse response) { return responseCodec.encode(response); } @Override - public EhcacheEntityResponse decodeResponse(byte[] payload) throws MessageCodecException { + public EhcacheEntityResponse decodeResponse(byte[] payload) { return responseCodec.decode(payload); } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EntityConfigurationCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/EntityConfigurationCodec.java similarity index 82% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EntityConfigurationCodec.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/EntityConfigurationCodec.java index 150457fbbc..e9edfc9478 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EntityConfigurationCodec.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/EntityConfigurationCodec.java @@ -25,6 +25,8 @@ import org.terracotta.runnel.decoding.StructDecoder; import org.terracotta.runnel.encoding.StructEncoder; +import java.nio.ByteBuffer; + import static java.nio.ByteBuffer.wrap; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; import static org.terracotta.runnel.StructBuilder.newStructBuilder; @@ -36,28 +38,31 @@ public class EntityConfigurationCodec { private static final String IDENTIFIER = "identifier"; - private final StructBuilder tierManagerConfigurationBaseStruct = newStructBuilder() - .string(IDENTIFIER, 10); - private final StructBuilder clusteredStoreConfigurationBaseStruct = newStructBuilder() - .string(IDENTIFIER, 10) - .string(SERVER_STORE_NAME_FIELD, 20); - private final ConfigCodec configCodec; private final Struct tierManagerConfigurationStruct; private final Struct clusteredStoreConfigurationStruct; public EntityConfigurationCodec(ConfigCodec configCodec) { this.configCodec = configCodec; + + StructBuilder tierManagerConfigurationBaseStruct = newStructBuilder() + .string(IDENTIFIER, 10); + tierManagerConfigurationStruct = configCodec.injectServerSideConfiguration(tierManagerConfigurationBaseStruct, 10) .getUpdatedBuilder() .build(); + + StructBuilder clusteredStoreConfigurationBaseStruct = newStructBuilder() + .string(IDENTIFIER, 10) + .string(SERVER_STORE_NAME_FIELD, 20); + clusteredStoreConfigurationStruct = configCodec.injectServerStoreConfiguration(clusteredStoreConfigurationBaseStruct, 30) .getUpdatedBuilder() .build(); } public byte[] encode(ClusterTierEntityConfiguration configuration) { - StructEncoder encoder = clusteredStoreConfigurationStruct.encoder(); + StructEncoder encoder = clusteredStoreConfigurationStruct.encoder(); encoder.string(IDENTIFIER, configuration.getManagerIdentifier()) .string(SERVER_STORE_NAME_FIELD, configuration.getStoreIdentifier()); configCodec.encodeServerStoreConfiguration(encoder, configuration.getConfiguration()); @@ -65,7 +70,11 @@ public byte[] encode(ClusterTierEntityConfiguration configuration) { } public ClusterTierEntityConfiguration decodeClusteredStoreConfiguration(byte[] configuration) { - StructDecoder decoder = clusteredStoreConfigurationStruct.decoder(wrap(configuration)); + return decodeClusteredStoreConfiguration(wrap(configuration)); + } + + public ClusterTierEntityConfiguration decodeClusteredStoreConfiguration(ByteBuffer buffer) { + StructDecoder decoder = clusteredStoreConfigurationStruct.decoder(buffer); String managerIdentifier = decoder.string(IDENTIFIER); if (managerIdentifier == null) { throw new IllegalArgumentException("Payload is an invalid content"); @@ -75,14 +84,14 @@ public ClusterTierEntityConfiguration decodeClusteredStoreConfiguration(byte[] c return new ClusterTierEntityConfiguration(managerIdentifier, storeIdentifier, serverStoreConfiguration); } public byte[] encode(ClusterTierManagerConfiguration configuration) { - StructEncoder encoder = tierManagerConfigurationStruct.encoder(); + StructEncoder encoder = tierManagerConfigurationStruct.encoder(); encoder.string(IDENTIFIER, configuration.getIdentifier()); configCodec.encodeServerSideConfiguration(encoder, configuration.getConfiguration()); return encoder.encode().array(); } public ClusterTierManagerConfiguration decodeClusterTierManagerConfiguration(byte[] payload) { - StructDecoder decoder = tierManagerConfigurationStruct.decoder(wrap(payload)); + StructDecoder decoder = tierManagerConfigurationStruct.decoder(wrap(payload)); String identifier = decoder.string(IDENTIFIER); if (identifier == null) { throw new IllegalArgumentException("Payload is an invalid content"); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java similarity index 92% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java index 7499986ed7..8d4b3dc83d 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java @@ -26,7 +26,6 @@ import org.terracotta.runnel.decoding.StructDecoder; import org.terracotta.runnel.encoding.StructArrayEncoder; import org.terracotta.runnel.encoding.StructEncoder; -import org.terracotta.runnel.encoding.StructEncoderFunction; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; @@ -92,10 +91,10 @@ public static ClusterException decode(StructDecoder> decoder element.end(); } arrayDecoder.end(); - Class clazz = null; - ClusterException exception = null; + Class clazz = null; + ClusterException exception; try { - clazz = Class.forName(exceptionClassName); + clazz = Class.forName(exceptionClassName).asSubclass(ClusterException.class); } catch (ClassNotFoundException e) { LOGGER.error("Exception type not found", e); } @@ -108,12 +107,12 @@ public static ClusterException decode(StructDecoder> decoder } @SuppressWarnings("unchecked") - private static ClusterException getClusterException(String message, Class clazz) { + private static ClusterException getClusterException(String message, Class clazz) { ClusterException exception = null; if (clazz != null) { try { - Constructor declaredConstructor = clazz.getDeclaredConstructor(String.class); - exception = (ClusterException)declaredConstructor.newInstance(message); + Constructor declaredConstructor = clazz.getDeclaredConstructor(String.class); + exception = declaredConstructor.newInstance(message); } catch (NoSuchMethodException | InvocationTargetException | InstantiationException | IllegalAccessException e) { LOGGER.error("Failed to instantiate exception object.", e); } diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java new file mode 100644 index 0000000000..0841cfb534 --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java @@ -0,0 +1,145 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.nio.ByteBuffer; + +import static org.ehcache.clustered.common.internal.messages.BaseCodec.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.encodeMandatoryFields; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; + +public class LifeCycleMessageCodec { + + private static final String CONFIG_PRESENT_FIELD = "configPresent"; + + private static final int CONFIGURE_MESSAGE_NEXT_INDEX = 40; + private static final int VALIDATE_STORE_NEXT_INDEX = 40; + + private final Struct PREPARE_FOR_DESTROY_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .build(); + + private final Struct validateMessageStruct; + private final Struct validateStoreMessageStruct; + + private final ConfigCodec configCodec; + + public LifeCycleMessageCodec(ConfigCodec configCodec) { + this.configCodec = configCodec; + + StructBuilder validateMessageStructBuilderPrefix = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .bool(CONFIG_PRESENT_FIELD, 30); + + validateMessageStruct = this.configCodec.injectServerSideConfiguration( + validateMessageStructBuilderPrefix, CONFIGURE_MESSAGE_NEXT_INDEX).getUpdatedBuilder().build(); + + StructBuilder validateStoreMessageStructBuilderPrefix = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 30); + + validateStoreMessageStruct = this.configCodec.injectServerStoreConfiguration( + validateStoreMessageStructBuilderPrefix, VALIDATE_STORE_NEXT_INDEX).getUpdatedBuilder().build(); + } + + public byte[] encode(LifecycleMessage message) { + switch (message.getMessageType()) { + case VALIDATE: + return encodeTierManagerValidateMessage((LifecycleMessage.ValidateStoreManager) message); + case VALIDATE_SERVER_STORE: + return encodeValidateStoreMessage((LifecycleMessage.ValidateServerStore) message); + case PREPARE_FOR_DESTROY: + return encodePrepareForDestroyMessage(message); + default: + throw new IllegalArgumentException("Unknown lifecycle message: " + message.getClass()); + } + } + + private byte[] encodePrepareForDestroyMessage(LifecycleMessage message) { + return encodeMandatoryFields(PREPARE_FOR_DESTROY_STRUCT, message).encode().array(); + } + + private byte[] encodeValidateStoreMessage(LifecycleMessage.ValidateServerStore message) { + StructEncoder encoder = encodeMandatoryFields(validateStoreMessageStruct, message); + + encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); + configCodec.encodeServerStoreConfiguration(encoder, message.getStoreConfiguration()); + return encoder.encode().array(); + } + + private byte[] encodeTierManagerValidateMessage(LifecycleMessage.ValidateStoreManager message) { + StructEncoder encoder = encodeMandatoryFields(validateMessageStruct, message); + ServerSideConfiguration config = message.getConfiguration(); + if (config == null) { + encoder.bool(CONFIG_PRESENT_FIELD, false); + } else { + encoder.bool(CONFIG_PRESENT_FIELD, true); + configCodec.encodeServerSideConfiguration(encoder, config); + } + return encoder.encode().array(); + } + + public EhcacheEntityMessage decode(EhcacheMessageType messageType, ByteBuffer messageBuffer) { + + switch (messageType) { + case VALIDATE: + return decodeValidateMessage(messageBuffer); + case VALIDATE_SERVER_STORE: + return decodeValidateServerStoreMessage(messageBuffer); + case PREPARE_FOR_DESTROY: + return decodePrepareForDestroyMessage(); + default: + throw new IllegalArgumentException("LifeCycleMessage operation not defined for : " + messageType); + } + } + + private LifecycleMessage.PrepareForDestroy decodePrepareForDestroyMessage() { + return new LifecycleMessage.PrepareForDestroy(); + } + + private LifecycleMessage.ValidateServerStore decodeValidateServerStoreMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = validateStoreMessageStruct.decoder(messageBuffer); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + ServerStoreConfiguration config = configCodec.decodeServerStoreConfiguration(decoder); + + return new LifecycleMessage.ValidateServerStore(storeName, config); + } + + private LifecycleMessage.ValidateStoreManager decodeValidateMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = validateMessageStruct.decoder(messageBuffer); + + boolean configPresent = decoder.bool(CONFIG_PRESENT_FIELD); + + ServerSideConfiguration config = null; + if (configPresent) { + config = configCodec.decodeServerSideConfiguration(decoder); + } + + return new LifecycleMessage.ValidateStoreManager(config); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java similarity index 88% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java index 174800b8f1..b479a02cd8 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java @@ -23,8 +23,12 @@ public abstract class LifecycleMessage extends EhcacheOperationMessage implements Serializable { + private static final long serialVersionUID = -5877907682623164227L; + public static class ValidateStoreManager extends LifecycleMessage { + private static final long serialVersionUID = -7459333332357106170L; + private final ServerSideConfiguration configuration; ValidateStoreManager(ServerSideConfiguration config) { @@ -46,6 +50,8 @@ public ServerSideConfiguration getConfiguration() { */ public static class ValidateServerStore extends LifecycleMessage { + private static final long serialVersionUID = -7271460156539083757L; + private final String name; private final ServerStoreConfiguration storeConfiguration; @@ -69,6 +75,9 @@ public EhcacheMessageType getMessageType() { } public static class PrepareForDestroy extends LifecycleMessage { + + private static final long serialVersionUID = -680257947889507297L; + @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.PREPARE_FOR_DESTROY; diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java new file mode 100644 index 0000000000..4b37ece7e1 --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java @@ -0,0 +1,37 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.encoding.StructEncoder; + +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_NAME; + +/** + * MessageCodecUtils + */ +public final class MessageCodecUtils { + + public static final String SERVER_STORE_NAME_FIELD = "serverStoreName"; + public static final String KEY_FIELD = "key"; + + private MessageCodecUtils() {} + + public static StructEncoder encodeMandatoryFields(Struct struct, EhcacheOperationMessage message) { + return struct.encoder().enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()); + } +} diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java new file mode 100644 index 0000000000..d21fbb6ce6 --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java @@ -0,0 +1,84 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.decoding.ArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.ArrayEncoder; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.util.HashSet; +import java.util.Set; + +import static java.nio.ByteBuffer.wrap; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; + +public class ReconnectMessageCodec { + + private static final String HASH_INVALIDATION_IN_PROGRESS_FIELD = "hashInvalidationInProgress"; + private static final String CLEAR_IN_PROGRESS_FIELD = "clearInProgress"; + private static final String LOCKS_HELD_FIELD = "locksHeld"; + private static final String EVENTS_ENABLED_FIELD = "eventsEnabled"; + + private static final Struct CLUSTER_TIER_RECONNECT_MESSAGE_STRUCT = newStructBuilder() + .int64s(HASH_INVALIDATION_IN_PROGRESS_FIELD, 20) + .bool(CLEAR_IN_PROGRESS_FIELD, 30) + .int64s(LOCKS_HELD_FIELD, 40) + .bool(EVENTS_ENABLED_FIELD, 50) // added in 10.5.0 + .build(); + + public byte[] encode(ClusterTierReconnectMessage reconnectMessage) { + StructEncoder encoder = CLUSTER_TIER_RECONNECT_MESSAGE_STRUCT.encoder(); + ArrayEncoder> arrayEncoder = encoder.int64s(HASH_INVALIDATION_IN_PROGRESS_FIELD); + reconnectMessage.getInvalidationsInProgress().forEach(arrayEncoder::value); + encoder.bool(CLEAR_IN_PROGRESS_FIELD, reconnectMessage.isClearInProgress()); + ArrayEncoder> locksHeldEncoder = encoder.int64s(LOCKS_HELD_FIELD); + reconnectMessage.getLocksHeld().forEach(locksHeldEncoder::value); + encoder.bool(EVENTS_ENABLED_FIELD, reconnectMessage.isEventsEnabled()); + return encoder.encode().array(); + } + + public ClusterTierReconnectMessage decode(byte[] payload) { + StructDecoder decoder = CLUSTER_TIER_RECONNECT_MESSAGE_STRUCT.decoder(wrap(payload)); + ArrayDecoder> arrayDecoder = decoder.int64s(HASH_INVALIDATION_IN_PROGRESS_FIELD); + + Set hashes = decodeLongs(arrayDecoder); + + Boolean clearInProgress = decoder.bool(CLEAR_IN_PROGRESS_FIELD); + + ArrayDecoder> locksHeldDecoder = decoder.int64s(LOCKS_HELD_FIELD); + Set locks = decodeLongs(locksHeldDecoder); + + Boolean eventsEnabled = decoder.bool(EVENTS_ENABLED_FIELD); + + return new ClusterTierReconnectMessage(hashes, locks, clearInProgress != null ? clearInProgress : false, eventsEnabled != null ? eventsEnabled : false); + } + + private static Set decodeLongs(ArrayDecoder> decoder) { + Set longs; + if (decoder != null) { + longs = new HashSet<>(decoder.length()); + for (int i = 0; i < decoder.length(); i++) { + longs.add(decoder.value()); + } + } else { + longs = new HashSet<>(0); + } + return longs; + } +} diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java new file mode 100644 index 0000000000..d082a095c6 --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java @@ -0,0 +1,354 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.exceptions.ClusterException; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.PrepareForDestroy; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ResolveRequest; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Util; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.ArrayDecoder; +import org.terracotta.runnel.decoding.Enm; +import org.terracotta.runnel.decoding.StructArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.ArrayEncoder; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + +import static java.nio.ByteBuffer.wrap; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.EHCACHE_RESPONSE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.RESPONSE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.RESPONSE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.ChainCodec.CHAIN_ENTRY_STRUCT; +import static org.ehcache.clustered.common.internal.messages.ChainCodec.CHAIN_STRUCT; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.allInvalidationDone; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.clientInvalidateAll; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.clientInvalidateHash; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.failure; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.getResponse; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.hashInvalidationDone; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.lockFailure; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.mapValue; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.prepareForDestroy; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.resolveRequest; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.serverAppend; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.serverInvalidateHash; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.success; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; +import static org.ehcache.clustered.common.internal.messages.StateRepositoryOpCodec.WHITELIST_PREDICATE; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; + +public class ResponseCodec { + + private static final String EXCEPTION_FIELD = "exception"; + private static final String INVALIDATION_ID_FIELD = "invalidationId"; + private static final String CHAIN_FIELD = "chain"; + private static final String APPENDED_FIELD = "appended"; + private static final String MAP_VALUE_FIELD = "mapValue"; + private static final String STORES_FIELD = "stores"; + + private static final Struct SUCCESS_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .build(); + private static final Struct FAILURE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .struct(EXCEPTION_FIELD, 20, ExceptionCodec.EXCEPTION_STRUCT) + .build(); + private static final Struct GET_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .struct(CHAIN_FIELD, 20, CHAIN_STRUCT) + .build(); + private static final Struct HASH_INVALIDATION_DONE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .int64(KEY_FIELD, 20) + .build(); + private static final Struct ALL_INVALIDATION_DONE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .build(); + private static final Struct CLIENT_INVALIDATE_HASH_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .int64(KEY_FIELD, 20) + .int32(INVALIDATION_ID_FIELD, 30) + .build(); + private static final Struct CLIENT_INVALIDATE_ALL_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .int32(INVALIDATION_ID_FIELD, 20) + .build(); + private static final Struct SERVER_INVALIDATE_HASH_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .int64(KEY_FIELD, 20) + .struct(CHAIN_FIELD, 30, CHAIN_STRUCT) // added in version 10.5.0 + .build(); + private static final Struct SERVER_APPEND_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .byteBuffer(APPENDED_FIELD, 20) + .struct(CHAIN_FIELD, 30, CHAIN_STRUCT) + .build(); + private static final Struct MAP_VALUE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .byteBuffer(MAP_VALUE_FIELD, 20) + .build(); + private static final Struct PREPARE_FOR_DESTROY_RESPONSE_STRUCT = newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .strings(STORES_FIELD, 20) + .build(); + private static final Struct RESOLVE_REQUEST_RESPONSE_STRUCT = newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .int64(KEY_FIELD, 20) + .struct(CHAIN_FIELD, 30, CHAIN_STRUCT) + .build(); + private static final Struct LOCK_RESPONSE_STRUCT = newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .struct(CHAIN_FIELD, 20, CHAIN_STRUCT) + .build(); + private static final Struct ITERATOR_BATCH_STRUCT = newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .string("id", 20) + .structs("chains", 30, CHAIN_ENTRY_STRUCT) + .bool("last", 40) + .build(); + + public byte[] encode(EhcacheEntityResponse response) { + switch (response.getResponseType()) { + case FAILURE: + final EhcacheEntityResponse.Failure failure = (EhcacheEntityResponse.Failure)response; + return FAILURE_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, failure.getResponseType()) + .struct(EXCEPTION_FIELD, failure.getCause(), ExceptionCodec::encode) + .encode().array(); + case SUCCESS: + return SUCCESS_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, response.getResponseType()) + .encode().array(); + case GET_RESPONSE: + final EhcacheEntityResponse.GetResponse getResponse = (EhcacheEntityResponse.GetResponse)response; + return GET_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, getResponse.getResponseType()) + .struct(CHAIN_FIELD, getResponse.getChain(), ChainCodec::encodeChain) + .encode().array(); + case HASH_INVALIDATION_DONE: { + EhcacheEntityResponse.HashInvalidationDone hashInvalidationDone = (EhcacheEntityResponse.HashInvalidationDone) response; + return HASH_INVALIDATION_DONE_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, hashInvalidationDone.getResponseType()) + .int64(KEY_FIELD, hashInvalidationDone.getKey()) + .encode().array(); + } + case ALL_INVALIDATION_DONE: { + EhcacheEntityResponse.AllInvalidationDone allInvalidationDone = (EhcacheEntityResponse.AllInvalidationDone) response; + return ALL_INVALIDATION_DONE_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, allInvalidationDone.getResponseType()) + .encode().array(); + } + case CLIENT_INVALIDATE_HASH: { + EhcacheEntityResponse.ClientInvalidateHash clientInvalidateHash = (EhcacheEntityResponse.ClientInvalidateHash) response; + return CLIENT_INVALIDATE_HASH_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, clientInvalidateHash.getResponseType()) + .int64(KEY_FIELD, clientInvalidateHash.getKey()) + .int32(INVALIDATION_ID_FIELD, clientInvalidateHash.getInvalidationId()) + .encode().array(); + } + case CLIENT_INVALIDATE_ALL: { + EhcacheEntityResponse.ClientInvalidateAll clientInvalidateAll = (EhcacheEntityResponse.ClientInvalidateAll) response; + return CLIENT_INVALIDATE_ALL_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, clientInvalidateAll.getResponseType()) + .int32(INVALIDATION_ID_FIELD, clientInvalidateAll.getInvalidationId()) + .encode().array(); + } + case SERVER_APPEND: { + EhcacheEntityResponse.ServerAppend serverAppend = (EhcacheEntityResponse.ServerAppend) response; + return SERVER_APPEND_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, serverAppend.getResponseType()) + .byteBuffer(APPENDED_FIELD, serverAppend.getAppended()) + .struct(CHAIN_FIELD, serverAppend.getBeforeAppend(), ChainCodec::encodeChain) + .encode().array(); + } + case SERVER_INVALIDATE_HASH: { + EhcacheEntityResponse.ServerInvalidateHash serverInvalidateHash = (EhcacheEntityResponse.ServerInvalidateHash) response; + StructEncoder encoder = SERVER_INVALIDATE_HASH_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, serverInvalidateHash.getResponseType()) + .int64(KEY_FIELD, serverInvalidateHash.getKey()); + if (serverInvalidateHash.getEvictedChain() != null) { + encoder.struct(CHAIN_FIELD, serverInvalidateHash.getEvictedChain(), ChainCodec::encodeChain); + } + return encoder.encode().array(); + } + case MAP_VALUE: { + EhcacheEntityResponse.MapValue mapValue = (EhcacheEntityResponse.MapValue) response; + byte[] encodedMapValue = Util.marshall(mapValue.getValue()); + return MAP_VALUE_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, mapValue.getResponseType()) + .byteBuffer(MAP_VALUE_FIELD, wrap(encodedMapValue)) + .encode().array(); + } + case PREPARE_FOR_DESTROY: { + PrepareForDestroy prepare = (PrepareForDestroy) response; + StructEncoder encoder = PREPARE_FOR_DESTROY_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, prepare.getResponseType()); + ArrayEncoder> storesEncoder = encoder.strings(STORES_FIELD); + for (String storeName : prepare.getStores()) { + storesEncoder.value(storeName); + } + return encoder + .encode().array(); + } + case RESOLVE_REQUEST: { + ResolveRequest resolve = (ResolveRequest) response; + return RESOLVE_REQUEST_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, resolve.getResponseType()) + .int64(KEY_FIELD, resolve.getKey()) + .struct(CHAIN_FIELD, resolve.getChain(), ChainCodec::encodeChain) + .encode().array(); + } + case LOCK_SUCCESS: { + EhcacheEntityResponse.LockSuccess lockSuccess = (EhcacheEntityResponse.LockSuccess) response; + return LOCK_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, lockSuccess.getResponseType()) + .struct(CHAIN_FIELD, lockSuccess.getChain(), ChainCodec::encodeChain) + .encode().array(); + } + case LOCK_FAILURE: { + EhcacheEntityResponse.LockFailure lockFailure = (EhcacheEntityResponse.LockFailure) response; + return LOCK_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, lockFailure.getResponseType()) + .encode().array(); + } + case ITERATOR_BATCH: { + EhcacheEntityResponse.IteratorBatch iteratorBatch = (EhcacheEntityResponse.IteratorBatch) response; + return ITERATOR_BATCH_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, iteratorBatch.getResponseType()) + .string("id", iteratorBatch.getIdentity().toString()) + .structs("chains", iteratorBatch.getChains(), ChainCodec::encodeChainEntry) + .bool("last", iteratorBatch.isLast()) + .encode().array(); + } + default: + throw new UnsupportedOperationException("The operation is not supported : " + response.getResponseType()); + } + } + + public EhcacheEntityResponse decode(byte[] payload) { + ByteBuffer buffer = wrap(payload); + StructDecoder decoder = SUCCESS_RESPONSE_STRUCT.decoder(buffer); + Enm opCodeEnm = decoder.enm(RESPONSE_TYPE_FIELD_NAME); + + if (!opCodeEnm.isFound()) { + throw new AssertionError("Got a response without an opCode"); + } + if (!opCodeEnm.isValid()) { + // Need to ignore the response here as we do not understand its type - coming from the future? + return null; + } + + EhcacheResponseType opCode = opCodeEnm.get(); + buffer.rewind(); + switch (opCode) { + case SUCCESS: + return success(); + case FAILURE: + decoder = FAILURE_RESPONSE_STRUCT.decoder(buffer); + ClusterException exception = ExceptionCodec.decode(decoder.struct(EXCEPTION_FIELD)); + return failure(exception.withClientStackTrace()); + case GET_RESPONSE: + decoder = GET_RESPONSE_STRUCT.decoder(buffer); + return getResponse(ChainCodec.decodeChain(decoder.struct(CHAIN_FIELD))); + case HASH_INVALIDATION_DONE: { + decoder = HASH_INVALIDATION_DONE_RESPONSE_STRUCT.decoder(buffer); + long key = decoder.int64(KEY_FIELD); + return hashInvalidationDone(key); + } + case ALL_INVALIDATION_DONE: { + return allInvalidationDone(); + } + case SERVER_APPEND: { + decoder = SERVER_APPEND_RESPONSE_STRUCT.decoder(buffer); + ByteBuffer appended = decoder.byteBuffer(APPENDED_FIELD); + StructDecoder> chainDecoder = decoder.struct(CHAIN_FIELD); + Chain chain = chainDecoder == null ? null : ChainCodec.decodeChain(chainDecoder); + return serverAppend(appended, chain); + } + case CLIENT_INVALIDATE_HASH: { + decoder = CLIENT_INVALIDATE_HASH_RESPONSE_STRUCT.decoder(buffer); + long key = decoder.int64(KEY_FIELD); + int invalidationId = decoder.int32(INVALIDATION_ID_FIELD); + return clientInvalidateHash(key, invalidationId); + } + case CLIENT_INVALIDATE_ALL: { + decoder = CLIENT_INVALIDATE_ALL_RESPONSE_STRUCT.decoder(buffer); + int invalidationId = decoder.int32(INVALIDATION_ID_FIELD); + return clientInvalidateAll(invalidationId); + } + case SERVER_INVALIDATE_HASH: { + decoder = SERVER_INVALIDATE_HASH_RESPONSE_STRUCT.decoder(buffer); + long key = decoder.int64(KEY_FIELD); + StructDecoder> chainDecoder = decoder.struct(CHAIN_FIELD); + Chain evictedChain = chainDecoder == null ? null : ChainCodec.decodeChain(chainDecoder); + return serverInvalidateHash(key, evictedChain); + } + case MAP_VALUE: { + decoder = MAP_VALUE_RESPONSE_STRUCT.decoder(buffer); + return mapValue( + Util.unmarshall(decoder.byteBuffer(MAP_VALUE_FIELD), WHITELIST_PREDICATE)); + } + case PREPARE_FOR_DESTROY: { + decoder = PREPARE_FOR_DESTROY_RESPONSE_STRUCT.decoder(buffer); + ArrayDecoder> storesDecoder = decoder.strings(STORES_FIELD); + Set stores = new HashSet<>(); + for (int i = 0; i < storesDecoder.length(); i++) { + stores.add(storesDecoder.value()); + } + return prepareForDestroy(stores); + } + case RESOLVE_REQUEST: { + decoder = RESOLVE_REQUEST_RESPONSE_STRUCT.decoder(buffer); + long key = decoder.int64(KEY_FIELD); + Chain chain = ChainCodec.decodeChain(decoder.struct(CHAIN_FIELD)); + return resolveRequest(key, chain); + } + case LOCK_SUCCESS: { + decoder = LOCK_RESPONSE_STRUCT.decoder(buffer); + Chain chain = ChainCodec.decodeChain(decoder.struct(CHAIN_FIELD)); + return new EhcacheEntityResponse.LockSuccess(chain); + } + case LOCK_FAILURE: { + return lockFailure(); + } + case ITERATOR_BATCH: { + decoder = ITERATOR_BATCH_STRUCT.decoder(buffer); + UUID id = UUID.fromString(decoder.string("id")); + StructArrayDecoder> chainsDecoder = decoder.structs("chains"); + List> chains = new ArrayList<>(chainsDecoder.length()); + while (chainsDecoder.hasNext()) { + chains.add(ChainCodec.decodeChainEntry(chainsDecoder.next())); + } + boolean last = decoder.bool("last"); + return EhcacheEntityResponse.iteratorBatchResponse(id, chains, last); + } + + default: + throw new UnsupportedOperationException("The operation is not supported with opCode : " + opCode); + } + } +} diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java new file mode 100644 index 0000000000..cd38dc0310 --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java @@ -0,0 +1,256 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.AppendMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClearMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAck; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAllAck; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetAndAppendMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ReplaceAtHeadMessage; +import org.ehcache.clustered.common.internal.store.Chain; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.decoding.StructDecoder; + +import java.nio.ByteBuffer; +import java.util.UUID; + +import static org.ehcache.clustered.common.internal.messages.BaseCodec.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.ChainCodec.CHAIN_STRUCT; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.encodeMandatoryFields; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; + +public class ServerStoreOpCodec { + + private static final Struct GET_AND_APPEND_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(KEY_FIELD, 30) + .byteBuffer("payload", 40) + .build(); + + private static final Struct APPEND_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(KEY_FIELD, 30) + .byteBuffer("payload", 40) + .build(); + + private static final Struct REPLACE_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(KEY_FIELD, 30) + .struct("expect", 40, CHAIN_STRUCT) + .struct("update", 50, CHAIN_STRUCT) + .build(); + + private static final Struct CLIENT_INVALIDATION_ACK_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(KEY_FIELD, 20) + .int32("invalidationId", 30) + .build(); + + private static final Struct CLIENT_INVALIDATION_ALL_ACK_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int32("invalidationId", 40) + .build(); + + private static final Struct CLEAR_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .build(); + + private static final Struct GET_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(KEY_FIELD, 30) + .build(); + + private static final Struct LOCK_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64("hash", 30) + .build(); + + private static final Struct ITERATOR_OPEN_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int32("batchSize", 20) + .build(); + + private static final Struct ITERATOR_CLOSE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .string("id", 20) + .build(); + + private static final Struct ITERATOR_ADVANCE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .string("id", 20) + .int32("batchSize", 30) + .build(); + + private static final Struct ENABLE_EVENT_LISTENER_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .bool("enable", 20) + .build(); + + public byte[] encode(ServerStoreOpMessage message) { + switch (message.getMessageType()) { + case GET_STORE: + GetMessage getMessage = (GetMessage) message; + return encodeMandatoryFields(GET_MESSAGE_STRUCT, message) + .int64(KEY_FIELD, getMessage.getKey()) + .encode().array(); + case APPEND: + AppendMessage appendMessage = (AppendMessage) message; + return encodeMandatoryFields(APPEND_MESSAGE_STRUCT, message) + .int64(KEY_FIELD, appendMessage.getKey()) + .byteBuffer("payload", appendMessage.getPayload()) + .encode().array(); + case GET_AND_APPEND: + GetAndAppendMessage getAndAppendMessage = (GetAndAppendMessage) message; + return encodeMandatoryFields(GET_AND_APPEND_MESSAGE_STRUCT, message) + .int64(KEY_FIELD, getAndAppendMessage.getKey()) + .byteBuffer("payload", getAndAppendMessage.getPayload()) + .encode().array(); + case REPLACE: + final ReplaceAtHeadMessage replaceAtHeadMessage = (ReplaceAtHeadMessage) message; + return encodeMandatoryFields(REPLACE_MESSAGE_STRUCT, message) + .int64(KEY_FIELD, replaceAtHeadMessage.getKey()) + .struct("expect", replaceAtHeadMessage.getExpect(), ChainCodec::encodeChain) + .struct("update", replaceAtHeadMessage.getUpdate(), ChainCodec::encodeChain) + .encode().array(); + case CLIENT_INVALIDATION_ACK: + ClientInvalidationAck clientInvalidationAckMessage = (ClientInvalidationAck) message; + return encodeMandatoryFields(CLIENT_INVALIDATION_ACK_MESSAGE_STRUCT, message) + .int64(KEY_FIELD, clientInvalidationAckMessage.getKey()) + .int32("invalidationId", clientInvalidationAckMessage.getInvalidationId()) + .encode().array(); + case CLIENT_INVALIDATION_ALL_ACK: + ClientInvalidationAllAck clientInvalidationAllAckMessage = (ClientInvalidationAllAck) message; + return encodeMandatoryFields(CLIENT_INVALIDATION_ALL_ACK_MESSAGE_STRUCT, message) + .int32("invalidationId", clientInvalidationAllAckMessage.getInvalidationId()) + .encode().array(); + case CLEAR: + return encodeMandatoryFields(CLEAR_MESSAGE_STRUCT, message) + .encode().array(); + case LOCK: + ServerStoreOpMessage.LockMessage lockMessage = (ServerStoreOpMessage.LockMessage) message; + return encodeMandatoryFields(LOCK_STRUCT, message) + .int64("hash", lockMessage.getHash()) + .encode().array(); + case UNLOCK: + ServerStoreOpMessage.UnlockMessage unlockMessage = (ServerStoreOpMessage.UnlockMessage) message; + return encodeMandatoryFields(LOCK_STRUCT, message) + .int64("hash", unlockMessage.getHash()) + .encode().array(); + case ITERATOR_OPEN: + return encodeMandatoryFields(ITERATOR_OPEN_STRUCT, message) + .int32("batchSize", ((ServerStoreOpMessage.IteratorOpenMessage) message).getBatchSize()) + .encode().array(); + case ITERATOR_CLOSE: + return encodeMandatoryFields(ITERATOR_CLOSE_STRUCT, message) + .string("id", ((ServerStoreOpMessage.IteratorCloseMessage) message).getIdentity().toString()) + .encode().array(); + case ITERATOR_ADVANCE: + return encodeMandatoryFields(ITERATOR_ADVANCE_STRUCT, message) + .string("id", ((ServerStoreOpMessage.IteratorAdvanceMessage) message).getIdentity().toString()) + .int32("batchSize", ((ServerStoreOpMessage.IteratorAdvanceMessage) message).getBatchSize()) + .encode().array(); + case ENABLE_EVENT_LISTENER: + return encodeMandatoryFields(ENABLE_EVENT_LISTENER_STRUCT, message) + .bool("enable", ((ServerStoreOpMessage.EnableEventListenerMessage) message).isEnable()) + .encode().array(); + default: + throw new RuntimeException("Unhandled message operation : " + message.getMessageType()); + } + } + + public EhcacheEntityMessage decode(EhcacheMessageType opCode, ByteBuffer messageBuffer) { + switch (opCode) { + case GET_STORE: { + StructDecoder decoder = GET_MESSAGE_STRUCT.decoder(messageBuffer); + Long key = decoder.int64(KEY_FIELD); + return new GetMessage(key); + } + case GET_AND_APPEND: { + StructDecoder decoder = GET_AND_APPEND_MESSAGE_STRUCT.decoder(messageBuffer); + Long key = decoder.int64(KEY_FIELD); + ByteBuffer payload = decoder.byteBuffer("payload"); + return new GetAndAppendMessage(key, payload); + } + case APPEND: { + StructDecoder decoder = APPEND_MESSAGE_STRUCT.decoder(messageBuffer); + Long key = decoder.int64(KEY_FIELD); + ByteBuffer payload = decoder.byteBuffer("payload"); + return new AppendMessage(key, payload); + } + case REPLACE: { + StructDecoder decoder = REPLACE_MESSAGE_STRUCT.decoder(messageBuffer); + Long key = decoder.int64(KEY_FIELD); + Chain expect = ChainCodec.decodeChain(decoder.struct("expect")); + Chain update = ChainCodec.decodeChain(decoder.struct("update")); + return new ReplaceAtHeadMessage(key, expect, update); + } + case CLIENT_INVALIDATION_ACK: { + StructDecoder decoder = CLIENT_INVALIDATION_ACK_MESSAGE_STRUCT.decoder(messageBuffer); + Long key = decoder.int64(KEY_FIELD); + Integer invalidationId = decoder.int32("invalidationId"); + return new ClientInvalidationAck(key, invalidationId); + } + case CLIENT_INVALIDATION_ALL_ACK: { + StructDecoder decoder = CLIENT_INVALIDATION_ALL_ACK_MESSAGE_STRUCT.decoder(messageBuffer); + Integer invalidationId = decoder.int32("invalidationId"); + return new ClientInvalidationAllAck(invalidationId); + } + case CLEAR: { + return new ClearMessage(); + } + case LOCK: { + StructDecoder decoder = LOCK_STRUCT.decoder(messageBuffer); + long hash = decoder.int64("hash"); + return new ServerStoreOpMessage.LockMessage(hash); + } + case UNLOCK: { + StructDecoder decoder = LOCK_STRUCT.decoder(messageBuffer); + long hash = decoder.int64("hash"); + return new ServerStoreOpMessage.UnlockMessage(hash); + } + case ITERATOR_OPEN: { + StructDecoder decoder = ITERATOR_OPEN_STRUCT.decoder(messageBuffer); + int batchSize = decoder.int32("batchSize"); + return new ServerStoreOpMessage.IteratorOpenMessage(batchSize); + } + case ITERATOR_CLOSE: { + StructDecoder decoder = ITERATOR_CLOSE_STRUCT.decoder(messageBuffer); + UUID identity = UUID.fromString(decoder.string("id")); + return new ServerStoreOpMessage.IteratorCloseMessage(identity); + } + case ITERATOR_ADVANCE: { + StructDecoder decoder = ITERATOR_ADVANCE_STRUCT.decoder(messageBuffer); + UUID identity = UUID.fromString(decoder.string("id")); + int batchSize = decoder.int32("batchSize"); + return new ServerStoreOpMessage.IteratorAdvanceMessage(identity, batchSize); + } + case ENABLE_EVENT_LISTENER: { + StructDecoder decoder = ENABLE_EVENT_LISTENER_STRUCT.decoder(messageBuffer); + Boolean enable = decoder.bool("enable"); + return new ServerStoreOpMessage.EnableEventListenerMessage(enable); + } + default: + throw new RuntimeException("Unhandled message operation : " + opCode); + } + } + +} diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java new file mode 100644 index 0000000000..a0b459d590 --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java @@ -0,0 +1,285 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.store.Chain; + +import java.nio.ByteBuffer; +import java.util.UUID; + +public abstract class ServerStoreOpMessage extends EhcacheOperationMessage { + + private ServerStoreOpMessage() { + } + + public static abstract class KeyBasedServerStoreOpMessage extends ServerStoreOpMessage implements ConcurrentEntityMessage { + + private final long key; + + KeyBasedServerStoreOpMessage(final long key) { + this.key = key; + } + + public long getKey() { + return key; + } + + @Override + public long concurrencyKey() { + return key; + } + } + + public static class GetMessage extends KeyBasedServerStoreOpMessage { + + public GetMessage(long key) { + super(key); + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.GET_STORE; + } + } + + public static class GetAndAppendMessage extends KeyBasedServerStoreOpMessage { + + private final ByteBuffer payload; + + public GetAndAppendMessage(long key, ByteBuffer payload) { + super(key); + this.payload = payload; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.GET_AND_APPEND; + } + + public ByteBuffer getPayload() { + return payload; + } + + } + + public static class AppendMessage extends KeyBasedServerStoreOpMessage { + + private final ByteBuffer payload; + + public AppendMessage(long key, ByteBuffer payload) { + super(key); + this.payload = payload; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.APPEND; + } + + public ByteBuffer getPayload() { + return payload; + } + + } + + public static class ReplaceAtHeadMessage extends KeyBasedServerStoreOpMessage { + + private final Chain expect; + private final Chain update; + + public ReplaceAtHeadMessage(long key, Chain expect, Chain update) { + super(key); + this.expect = expect; + this.update = update; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.REPLACE; + } + + public Chain getExpect() { + return expect; + } + + public Chain getUpdate() { + return update; + } + } + + public static class ClientInvalidationAck extends KeyBasedServerStoreOpMessage { + + private final int invalidationId; + + public ClientInvalidationAck(long key, int invalidationId) { + super(key); + this.invalidationId = invalidationId; + } + + public int getInvalidationId() { + return invalidationId; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CLIENT_INVALIDATION_ACK; + } + } + + public static class ClientInvalidationAllAck extends ServerStoreOpMessage { + + private final int invalidationId; + + public ClientInvalidationAllAck(int invalidationId) { + super(); + this.invalidationId = invalidationId; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CLIENT_INVALIDATION_ALL_ACK; + } + + public int getInvalidationId() { + return invalidationId; + } + } + + public static class ClearMessage extends ServerStoreOpMessage { + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CLEAR; + } + } + + public static class LockMessage extends ServerStoreOpMessage { + + private final long hash; + + public LockMessage(long hash) { + this.hash = hash; + } + + public long getHash() { + return hash; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.LOCK; + } + } + + public static class UnlockMessage extends ServerStoreOpMessage { + + private final long hash; + + public UnlockMessage(long hash) { + this.hash = hash; + } + + public long getHash() { + return hash; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.UNLOCK; + } + } + + public static class IteratorOpenMessage extends ServerStoreOpMessage { + + private final int batchSize; + + public IteratorOpenMessage(int batchSize) { + this.batchSize = batchSize; + } + + public int getBatchSize() { + return batchSize; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.ITERATOR_OPEN; + } + } + + public static class IteratorCloseMessage extends ServerStoreOpMessage { + + private final UUID id; + + public IteratorCloseMessage(UUID id) { + this.id = id; + } + + public UUID getIdentity() { + return id; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.ITERATOR_CLOSE; + } + } + + public static class IteratorAdvanceMessage extends ServerStoreOpMessage { + + private final UUID id; + private final int batchSize; + + public IteratorAdvanceMessage(UUID id, int batchSize) { + this.id = id; + this.batchSize = batchSize; + + } + + public UUID getIdentity() { + return id; + } + + public int getBatchSize() { + return batchSize; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.ITERATOR_ADVANCE; + } + } + + public static class EnableEventListenerMessage extends ServerStoreOpMessage { + private final boolean enable; + + public EnableEventListenerMessage(boolean enable) { + this.enable = enable; + } + + public boolean isEnable() { + return enable; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.ENABLE_EVENT_LISTENER; + } + } + +} + diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryMessageFactory.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryMessageFactory.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryMessageFactory.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryMessageFactory.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java similarity index 75% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java index df669e6213..546e0e12aa 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java @@ -19,7 +19,6 @@ import org.ehcache.clustered.common.internal.store.Util; import org.terracotta.runnel.Struct; import org.terracotta.runnel.decoding.StructDecoder; -import org.terracotta.runnel.encoding.StructEncoder; import java.nio.ByteBuffer; import java.util.Arrays; @@ -27,11 +26,12 @@ import java.util.function.Predicate; import static java.nio.ByteBuffer.wrap; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_NAME; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.encodeMandatoryFields; import static org.terracotta.runnel.StructBuilder.newStructBuilder; public class StateRepositoryOpCodec { @@ -60,8 +60,6 @@ public class StateRepositoryOpCodec { .string(MAP_ID_FIELD, 35) .build(); - private final MessageCodecUtils messageCodecUtils = new MessageCodecUtils(); - public byte[] encode(StateRepositoryOpMessage message) { switch (message.getMessageType()) { @@ -77,38 +75,29 @@ public byte[] encode(StateRepositoryOpMessage message) { } private byte[] encodeEntrySetMessage(StateRepositoryOpMessage.EntrySetMessage message) { - StructEncoder encoder = ENTRY_SET_MESSAGE_STRUCT.encoder(); - - messageCodecUtils.encodeMandatoryFields(encoder, message); - encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); - encoder.string(MAP_ID_FIELD, message.getCacheId()); - - return encoder.encode().array(); + return encodeMandatoryFields(ENTRY_SET_MESSAGE_STRUCT, message) + .string(SERVER_STORE_NAME_FIELD, message.getCacheId()) + .string(MAP_ID_FIELD, message.getCacheId()) + .encode().array(); } private byte[] encodePutIfAbsentMessage(StateRepositoryOpMessage.PutIfAbsentMessage message) { - StructEncoder encoder = PUT_IF_ABSENT_MESSAGE_STRUCT.encoder(); - - messageCodecUtils.encodeMandatoryFields(encoder, message); - encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); - encoder.string(MAP_ID_FIELD, message.getCacheId()); - // TODO this needs to change - serialization needs to happen in the StateRepo not here, though we need the hashcode for server side comparison. - encoder.byteBuffer(KEY_FIELD, wrap(Util.marshall(message.getKey()))); - encoder.byteBuffer(VALUE_FIELD, wrap(Util.marshall(message.getValue()))); - - return encoder.encode().array(); + return encodeMandatoryFields(PUT_IF_ABSENT_MESSAGE_STRUCT, message) + .string(SERVER_STORE_NAME_FIELD, message.getCacheId()) + .string(MAP_ID_FIELD, message.getCacheId()) + // TODO this needs to change - serialization needs to happen in the StateRepo not here, though we need the hashcode for server side comparison. + .byteBuffer(KEY_FIELD, wrap(Util.marshall(message.getKey()))) + .byteBuffer(VALUE_FIELD, wrap(Util.marshall(message.getValue()))) + .encode().array(); } private byte[] encodeGetMessage(StateRepositoryOpMessage.GetMessage message) { - StructEncoder encoder = GET_MESSAGE_STRUCT.encoder(); - - messageCodecUtils.encodeMandatoryFields(encoder, message); - encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); - encoder.string(MAP_ID_FIELD, message.getCacheId()); - // TODO this needs to change - serialization needs to happen in the StateRepo not here, though we need the hashcode for server side comparison. - encoder.byteBuffer(KEY_FIELD, wrap(Util.marshall(message.getKey()))); - - return encoder.encode().array(); + return encodeMandatoryFields(GET_MESSAGE_STRUCT, message) + .string(SERVER_STORE_NAME_FIELD, message.getCacheId()) + .string(MAP_ID_FIELD, message.getCacheId()) + // TODO this needs to change - serialization needs to happen in the StateRepo not here, though we need the hashcode for server side comparison. + .byteBuffer(KEY_FIELD, wrap(Util.marshall(message.getKey()))) + .encode().array(); } public StateRepositoryOpMessage decode(EhcacheMessageType messageType, ByteBuffer messageBuffer) { diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/ClusterTierEntityConfiguration.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/ClusterTierEntityConfiguration.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/ClusterTierEntityConfiguration.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/ClusterTierEntityConfiguration.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/CustomLoaderBasedObjectInputStream.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/CustomLoaderBasedObjectInputStream.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/CustomLoaderBasedObjectInputStream.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/CustomLoaderBasedObjectInputStream.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/FilteredObjectInputStream.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/FilteredObjectInputStream.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/FilteredObjectInputStream.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/FilteredObjectInputStream.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/SequencedElement.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/SequencedElement.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/SequencedElement.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/SequencedElement.java diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java new file mode 100644 index 0000000000..a1ee21c176 --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java @@ -0,0 +1,53 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.store; + +import org.ehcache.clustered.common.internal.util.ByteBufferInputStream; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.ListIterator; +import java.util.function.Predicate; + +public class Util { + + public static Object unmarshall(ByteBuffer payload, Predicate> isClassPermitted) { + try (ObjectInputStream objectInputStream = + new FilteredObjectInputStream(new ByteBufferInputStream(payload), isClassPermitted, null)) { + return objectInputStream.readObject(); + } catch (IOException | ClassNotFoundException ex) { + throw new IllegalArgumentException(ex); + } + } + + public static byte[] marshall(Object message) { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + try(ObjectOutputStream oout = new ObjectOutputStream(out)) { + oout.writeObject(message); + } catch (IOException e) { + throw new IllegalArgumentException(e); + } + return out.toByteArray(); + } +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperation.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/BaseKeyValueOperation.java similarity index 96% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperation.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/BaseKeyValueOperation.java index 0655372836..46db028c7f 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperation.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/BaseKeyValueOperation.java @@ -14,9 +14,9 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; -import org.ehcache.clustered.client.internal.store.operations.codecs.CodecException; +import org.ehcache.clustered.common.internal.store.operations.codecs.CodecException; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; @@ -123,7 +123,7 @@ public boolean equals(final Object obj) { return false; } - BaseKeyValueOperation other = (BaseKeyValueOperation) obj; + BaseKeyValueOperation other = (BaseKeyValueOperation) obj; if(this.getOpCode() != other.getOpCode()) { return false; } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperation.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/ConditionalRemoveOperation.java similarity index 89% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperation.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/ConditionalRemoveOperation.java index 21186246fa..780003584f 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperation.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/ConditionalRemoveOperation.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.spi.serialization.Serializer; @@ -37,10 +37,10 @@ public OperationCode getOpCode() { @Override public Result apply(final Result previousOperation) { - if(previousOperation == null) { + if (previousOperation == null) { return null; } else { - if(getValue().equals(previousOperation.getValue())) { + if (getValue().equals(previousOperation.getValue())) { return null; } else { return previousOperation; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/ConditionalReplaceOperation.java similarity index 96% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/ConditionalReplaceOperation.java index cd2a101fe7..caac548cca 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/ConditionalReplaceOperation.java @@ -14,14 +14,14 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; -import org.ehcache.clustered.client.internal.store.operations.codecs.CodecException; +import org.ehcache.clustered.common.internal.store.operations.codecs.CodecException; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; -import static org.ehcache.clustered.client.internal.store.operations.OperationCode.REPLACE_CONDITIONAL; +import static org.ehcache.clustered.common.internal.store.operations.OperationCode.REPLACE_CONDITIONAL; public class ConditionalReplaceOperation implements Operation, Result { @@ -80,7 +80,7 @@ public K getKey() { return key; } - V getOldValue() { + public V getOldValue() { return this.oldValueHolder.getValue(); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolder.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/LazyValueHolder.java similarity index 92% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolder.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/LazyValueHolder.java index dd02977824..fb50329113 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolder.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/LazyValueHolder.java @@ -14,9 +14,9 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; -import org.ehcache.clustered.client.internal.store.operations.codecs.CodecException; +import org.ehcache.clustered.common.internal.store.operations.codecs.CodecException; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/Operation.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/Operation.java similarity index 95% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/Operation.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/Operation.java index d0211f9bd4..c72f9726f1 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/Operation.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/Operation.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.spi.serialization.Serializer; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/OperationCode.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/OperationCode.java similarity index 78% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/OperationCode.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/OperationCode.java index 691851f458..7515a387b7 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/OperationCode.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/OperationCode.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.spi.serialization.Serializer; @@ -27,6 +27,11 @@ public enum OperationCode { public Operation decode(ByteBuffer buffer, final Serializer keySerializer, final Serializer valueSerializer) { return new PutOperation<>(buffer, keySerializer, valueSerializer); } + + @Override + public boolean shouldBePinned() { + return false; + } }, REMOVE((byte)2) { @Override @@ -57,9 +62,21 @@ public Operation decode(final ByteBuffer buffer, final Serializer Operation decode(final ByteBuffer buffer, final Serializer keySerializer, final Serializer valueSerializer) { return new ConditionalReplaceOperation<>(buffer, keySerializer, valueSerializer); } + }, + PUT_WITH_WRITER((byte)7) { + @Override + public Operation decode(ByteBuffer buffer, Serializer keySerializer, Serializer valueSerializer) { + return new PutWithWriterOperation<>(buffer, keySerializer, valueSerializer); + } + }, + TIMESTAMP((byte)8) { + @Override + public Operation decode(ByteBuffer buffer, Serializer keySerializer, Serializer valueSerializer) { + return new TimestampOperation<>(buffer, keySerializer); + } }; - private byte value; + private final byte value; OperationCode(byte value) { this.value = value; @@ -69,6 +86,10 @@ public byte getValue() { return value; } + public boolean shouldBePinned() { + return true; + } + public abstract Operation decode(ByteBuffer buffer, Serializer keySerializer, Serializer valueSerializer); public static OperationCode valueOf(byte value) { @@ -85,6 +106,10 @@ public static OperationCode valueOf(byte value) { return REPLACE; case 6: return REPLACE_CONDITIONAL; + case 7: + return PUT_WITH_WRITER; + case 8: + return TIMESTAMP; default: throw new IllegalArgumentException("Operation undefined for the value " + value); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/PutIfAbsentOperation.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/PutIfAbsentOperation.java similarity index 96% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/PutIfAbsentOperation.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/PutIfAbsentOperation.java index 22361e6de7..1684dcb0c4 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/PutIfAbsentOperation.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/PutIfAbsentOperation.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.spi.serialization.Serializer; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/PutOperation.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/PutOperation.java similarity index 96% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/PutOperation.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/PutOperation.java index 40ce92ab89..6da446f51e 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/PutOperation.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/PutOperation.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.spi.serialization.Serializer; diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/PutWithWriterOperation.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/PutWithWriterOperation.java new file mode 100644 index 0000000000..3273bca77f --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/PutWithWriterOperation.java @@ -0,0 +1,55 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.store.operations; + +import org.ehcache.spi.serialization.Serializer; + +import java.nio.ByteBuffer; + +/** + * @param key type + * @param value type + */ +public class PutWithWriterOperation extends BaseKeyValueOperation implements Result { + + public PutWithWriterOperation(final K key, final V value, final long timeStamp) { + super(key, value, timeStamp); + } + + PutWithWriterOperation(final ByteBuffer buffer, final Serializer keySerializer, final Serializer valueSerializer) { + super(buffer, keySerializer, valueSerializer); + } + + @Override + public OperationCode getOpCode() { + return OperationCode.PUT_WITH_WRITER; + } + + /** + * Put operation applied on top of another {@link Operation} does not care + * what the other operation is. The result is gonna be {@code this} operation. + */ + @Override + public Result apply(final Result previousOperation) { + return this; + } + + @Override + public PutOperation asOperationExpiringAt(long expirationTime) { + return new PutOperation<>(this, -expirationTime); + } +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperation.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/RemoveOperation.java similarity index 96% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperation.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/RemoveOperation.java index db6da0619d..1eb9cda227 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperation.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/RemoveOperation.java @@ -14,9 +14,9 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; -import org.ehcache.clustered.client.internal.store.operations.codecs.CodecException; +import org.ehcache.clustered.common.internal.store.operations.codecs.CodecException; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ReplaceOperation.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/ReplaceOperation.java similarity index 95% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ReplaceOperation.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/ReplaceOperation.java index efc43229dd..ee10ca5a03 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ReplaceOperation.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/ReplaceOperation.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; import org.ehcache.spi.serialization.Serializer; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/Result.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/Result.java similarity index 91% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/Result.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/Result.java index 63203da8b8..bfb2ecdd5b 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/Result.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/Result.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations; +package org.ehcache.clustered.common.internal.store.operations; public interface Result { diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/TimestampOperation.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/TimestampOperation.java new file mode 100644 index 0000000000..ad34af91b0 --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/TimestampOperation.java @@ -0,0 +1,138 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.store.operations; + +import org.ehcache.clustered.common.internal.store.operations.codecs.CodecException; +import org.ehcache.spi.serialization.Serializer; + +import java.nio.ByteBuffer; + +import static java.util.Objects.requireNonNull; + +public class TimestampOperation implements Operation { + + private final K key; + private final long timeStamp; + + public TimestampOperation(final K key, final long timeStamp) { + this.key = requireNonNull(key); + this.timeStamp = timeStamp; + } + + TimestampOperation(final ByteBuffer buffer, final Serializer keySerializer) { + OperationCode opCode = OperationCode.valueOf(buffer.get()); + if (opCode != getOpCode()) { + throw new IllegalArgumentException("Invalid operation: " + opCode); + } + this.timeStamp = buffer.getLong(); + ByteBuffer keyBlob = buffer.slice(); + try { + this.key = keySerializer.read(keyBlob); + } catch (ClassNotFoundException e) { + throw new CodecException(e); + } + } + + public K getKey() { + return key; + } + + @Override + public OperationCode getOpCode() { + return OperationCode.TIMESTAMP; + } + + /** + * Timestamp operation is a no-op - it only exists to establish a wall-time in the chain. + */ + @Override + public Result apply(final Result previousOperation) { + return previousOperation; + } + + @Override + public ByteBuffer encode(final Serializer keySerializer, final Serializer valueSerializer) { + ByteBuffer keyBuf = keySerializer.serialize(key); + + int size = BYTE_SIZE_BYTES + // Operation type + LONG_SIZE_BYTES + // Size of expiration time stamp + keyBuf.remaining(); // the key payload itself + + ByteBuffer buffer = ByteBuffer.allocate(size); + buffer.put(getOpCode().getValue()); + buffer.putLong(this.timeStamp); + buffer.put(keyBuf); + buffer.flip(); + return buffer; + } + + @Override + public String toString() { + return "{" + getOpCode() + "# key: " + key + "}"; + } + + @Override + public boolean equals(final Object obj) { + if(obj == null) { + return false; + } + if(!(obj instanceof TimestampOperation)) { + return false; + } + + @SuppressWarnings("unchecked") + TimestampOperation other = (TimestampOperation) obj; + if(this.getOpCode() != other.getOpCode()) { + return false; + } + if(!this.getKey().equals(other.getKey())) { + return false; + } + return true; + } + + @Override + public int hashCode() { + int hash = getOpCode().hashCode(); + hash = hash * 31 + key.hashCode(); + return hash; + } + + @Override + public long timeStamp() { + if (!isExpiryAvailable()) { + return this.timeStamp; + } else { + throw new RuntimeException("Timestamp not available"); + } + } + + @Override + public boolean isExpiryAvailable() { + return timeStamp < 0; + } + + @Override + public long expirationTime() { + if (isExpiryAvailable()) { + return - this.timeStamp; + } else { + throw new RuntimeException("Expiry not available"); + } + } + +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/CodecException.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/codecs/CodecException.java similarity index 82% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/CodecException.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/codecs/CodecException.java index 454235e94f..cca0af1341 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/CodecException.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/codecs/CodecException.java @@ -14,18 +14,19 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations.codecs; +package org.ehcache.clustered.common.internal.store.operations.codecs; /** * Thrown when a payload can not be encoded or decoded */ public class CodecException extends RuntimeException { + private static final long serialVersionUID = -4879598222155854243L; + /** * Creates a {@code CodecException}. */ public CodecException() { - super(); } /** @@ -33,7 +34,7 @@ public CodecException() { * * @param message information about the exception */ - public CodecException(final String message) { + public CodecException(String message) { super(message); } @@ -43,7 +44,7 @@ public CodecException(final String message) { * @param message information about the exception * @param cause the cause of this exception */ - public CodecException(final String message, final Throwable cause) { + public CodecException(String message, Throwable cause) { super(message, cause); } @@ -52,7 +53,7 @@ public CodecException(final String message, final Throwable cause) { * * @param cause the cause of this exception */ - public CodecException(final Throwable cause) { + public CodecException(Throwable cause) { super(cause); } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/OperationsCodec.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/codecs/OperationsCodec.java similarity index 81% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/OperationsCodec.java rename to clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/codecs/OperationsCodec.java index 16bbf347f9..bc4a48868f 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/OperationsCodec.java +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/store/operations/codecs/OperationsCodec.java @@ -14,10 +14,10 @@ * limitations under the License. */ -package org.ehcache.clustered.client.internal.store.operations.codecs; +package org.ehcache.clustered.common.internal.store.operations.codecs; -import org.ehcache.clustered.client.internal.store.operations.Operation; -import org.ehcache.clustered.client.internal.store.operations.OperationCode; +import org.ehcache.clustered.common.internal.store.operations.Operation; +import org.ehcache.clustered.common.internal.store.operations.OperationCode; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; @@ -37,9 +37,14 @@ public ByteBuffer encode(Operation operation) { return operation.encode(keySerializer, valueSerializer); } - public Operation decode(ByteBuffer buffer) { + public static OperationCode getOperationCode(ByteBuffer buffer) { OperationCode opCode = OperationCode.valueOf(buffer.get()); buffer.rewind(); + return opCode; + } + + public Operation decode(ByteBuffer buffer) { + OperationCode opCode = getOperationCode(buffer); return opCode.decode(buffer, keySerializer, valueSerializer); } diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java new file mode 100644 index 0000000000..895291069b --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java @@ -0,0 +1,62 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.util; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; + +import static java.lang.Math.max; +import static java.lang.Math.min; + +// TODO remove once it comes with Runnel +public class ByteBufferInputStream extends InputStream { + + private final ByteBuffer buffer; + + public ByteBufferInputStream(ByteBuffer buffer) { + this.buffer = buffer.slice(); + } + + @Override + public int read() { + if (buffer.hasRemaining()) { + return 0xff & buffer.get(); + } else { + return -1; + } + } + + @Override + public int read(byte b[], int off, int len) { + len = min(len, buffer.remaining()); + buffer.get(b, off, len); + return len; + } + + @Override + public long skip(long n) { + n = min(buffer.remaining(), max(n, 0)); + buffer.position((int) (buffer.position() + n)); + return n; + } + + @Override + public synchronized int available() { + return buffer.remaining(); + } +} diff --git a/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/util/ChainBuilder.java b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/util/ChainBuilder.java new file mode 100644 index 0000000000..0dd9ff8f0a --- /dev/null +++ b/clustered/ehcache-common/src/main/java/org/ehcache/clustered/common/internal/util/ChainBuilder.java @@ -0,0 +1,68 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.common.internal.util; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +/** + * Builds {@link Chain}s + */ +public class ChainBuilder { + + private List buffers = new ArrayList<>(); + + public ChainBuilder add(final ByteBuffer payload) { + buffers.add(payload); + return this; + } + + public Chain build() { + List elements = new ArrayList<>(); + for (final ByteBuffer buffer : buffers) { + elements.add(buffer::asReadOnlyBuffer); + } + return chainFromList(elements); + } + + public int length() { + return buffers.size(); + } + + public static Chain chainFromList(List elements) { + return new Chain() { + @Override + public boolean isEmpty() { + return elements.isEmpty(); + } + + @Override + public int length() { + return elements.size(); + } + + @Override + public Iterator iterator() { + return elements.iterator(); + } + }; + } +} diff --git a/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/Store/operations/OperationCodeTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/Store/operations/OperationCodeTest.java new file mode 100644 index 0000000000..d874005f94 --- /dev/null +++ b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/Store/operations/OperationCodeTest.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.common.internal.Store.operations; + +import org.ehcache.clustered.common.internal.store.operations.OperationCode; +import org.junit.Test; + +import java.util.Arrays; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +public class OperationCodeTest { + + @Test + public void testPinning() { + assertThat(OperationCode.PUT.shouldBePinned(), is(false)); + + Arrays.stream(OperationCode.values()) + .filter(operationCode -> operationCode != OperationCode.PUT) + .forEach((operationCode -> assertThat(operationCode + " must be pinned", operationCode.shouldBePinned(), is(true)))); + } +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java similarity index 99% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java rename to clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java index 8c5b68f8e1..408a265d5f 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java +++ b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java @@ -22,12 +22,12 @@ import java.lang.reflect.Constructor; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.typeCompatibleWith; -import static org.junit.Assert.assertThat; /** * Foundation for tests on {@link ClusterException} subclasses. diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/IllegalMessageExceptionTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/IllegalMessageExceptionTest.java similarity index 100% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/IllegalMessageExceptionTest.java rename to clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/IllegalMessageExceptionTest.java diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidOperationExceptionTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidOperationExceptionTest.java similarity index 100% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidOperationExceptionTest.java rename to clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidOperationExceptionTest.java diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerSideConfigurationExceptionTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerSideConfigurationExceptionTest.java similarity index 100% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerSideConfigurationExceptionTest.java rename to clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerSideConfigurationExceptionTest.java diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationExceptionTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationExceptionTest.java similarity index 100% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationExceptionTest.java rename to clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationExceptionTest.java diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidStoreExceptionTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidStoreExceptionTest.java similarity index 100% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidStoreExceptionTest.java rename to clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/InvalidStoreExceptionTest.java diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/LifecycleExceptionTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/LifecycleExceptionTest.java similarity index 100% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/LifecycleExceptionTest.java rename to clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/exceptions/LifecycleExceptionTest.java diff --git a/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ChainCodecTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ChainCodecTest.java new file mode 100644 index 0000000000..7fa0a7b0ba --- /dev/null +++ b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ChainCodecTest.java @@ -0,0 +1,187 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.junit.Test; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.nio.ByteBuffer; +import java.util.AbstractMap.SimpleImmutableEntry; +import java.util.Iterator; +import java.util.Map; + +import static org.ehcache.clustered.ChainUtils.chainOf; +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.ehcache.clustered.ChainUtils.readPayload; +import static org.ehcache.clustered.ChainUtils.sequencedChainOf; +import static org.ehcache.clustered.Matchers.hasPayloads; +import static org.ehcache.clustered.Matchers.sameSequenceAs; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +public class ChainCodecTest { + + @Test + public void testChainWithSingleElement() { + Chain chain = chainOf(createPayload(1L)); + + assertThat(chain.isEmpty(), is(false)); + Iterator chainIterator = chain.iterator(); + assertThat(readPayload(chainIterator.next().getPayload()), is(1L)); + assertThat(chainIterator.hasNext(), is(false)); + + Chain decoded = ChainCodec.decodeChain(ChainCodec.encodeChain(chain)); + + assertThat(decoded.isEmpty(), is(false)); + chainIterator = decoded.iterator(); + assertThat(readPayload(chainIterator.next().getPayload()), is(1L)); + assertThat(chainIterator.hasNext(), is(false)); + } + + @Test + public void testChainWithSingleSequencedElement() { + Chain chain = sequencedChainOf(createPayload(1L)); + + assertThat(chain.isEmpty(), is(false)); + Iterator chainIterator = chain.iterator(); + assertThat(readPayload(chainIterator.next().getPayload()), is(1L)); + assertThat(chainIterator.hasNext(), is(false)); + + Chain decoded = ChainCodec.decodeChain(ChainCodec.encodeChain(chain)); + + assertThat(decoded.isEmpty(), is(false)); + chainIterator = decoded.iterator(); + assertThat(readPayload(chainIterator.next().getPayload()), is(1L)); + assertThat(chainIterator.hasNext(), is(false)); + + assertThat(decoded, sameSequenceAs(chain)); + } + + @Test + public void testChainWithMultipleElements() { + Chain chain = chainOf(createPayload(1L), createPayload(2L), createPayload(3L)); + + assertThat(chain.isEmpty(), is(false)); + assertThat(chain, hasPayloads(1L, 2L, 3L)); + + Chain decoded = ChainCodec.decodeChain(ChainCodec.encodeChain(chain)); + + assertThat(decoded.isEmpty(), is(false)); + assertThat(decoded, hasPayloads(1L, 2L, 3L)); + } + + @Test + public void testChainWithMultipleSequencedElements() { + Chain chain = sequencedChainOf(createPayload(1L), createPayload(2L), createPayload(3L)); + + assertThat(chain.isEmpty(), is(false)); + assertThat(chain, hasPayloads(1L, 2L, 3L)); + + Chain decoded = ChainCodec.decodeChain(ChainCodec.encodeChain(chain)); + + assertThat(decoded.isEmpty(), is(false)); + assertThat(decoded, hasPayloads(1L, 2L, 3L)); + + assertThat(decoded, sameSequenceAs(chain)); + } + + @Test + public void testEmptyChain() { + Chain decoded = ChainCodec.decodeChain(ChainCodec.encodeChain(chainOf())); + + assertThat(decoded.isEmpty(), is(true)); + } + + @Test + public void testChainEntryWithSingleElement() { + SimpleImmutableEntry entry = new SimpleImmutableEntry<>(42L, chainOf(createPayload(1L))); + StructEncoder encoder = ChainCodec.CHAIN_ENTRY_STRUCT.encoder(); + ChainCodec.encodeChainEntry(encoder, entry); + + Map.Entry decoded = ChainCodec.decodeChainEntry(ChainCodec.CHAIN_ENTRY_STRUCT.decoder((ByteBuffer) encoder.encode().flip())); + + + assertThat(decoded.getKey(), is(42L)); + assertThat(decoded.getValue().isEmpty(), is(false)); + Iterator chainIterator = decoded.getValue().iterator(); + assertThat(readPayload(chainIterator.next().getPayload()), is(1L)); + assertThat(chainIterator.hasNext(), is(false)); + } + + @Test + public void testChainEntryWithSingleSequencedElement() { + Chain chain = sequencedChainOf(createPayload(1L)); + SimpleImmutableEntry entry = new SimpleImmutableEntry<>(43L, chain); + StructEncoder encoder = ChainCodec.CHAIN_ENTRY_STRUCT.encoder(); + ChainCodec.encodeChainEntry(encoder, entry); + + Map.Entry decoded = ChainCodec.decodeChainEntry(ChainCodec.CHAIN_ENTRY_STRUCT.decoder((ByteBuffer) encoder.encode().flip())); + + assertThat(decoded.getKey(), is(43L)); + assertThat(decoded.getValue().isEmpty(), is(false)); + Iterator chainIterator = decoded.getValue().iterator(); + assertThat(readPayload(chainIterator.next().getPayload()), is(1L)); + assertThat(chainIterator.hasNext(), is(false)); + + assertThat(decoded.getValue(), sameSequenceAs(chain)); + } + + @Test + public void testChainEntryWithMultipleElements() { + Chain chain = chainOf(createPayload(1L), createPayload(2L), createPayload(3L)); + SimpleImmutableEntry entry = new SimpleImmutableEntry<>(44L, chain); + StructEncoder encoder = ChainCodec.CHAIN_ENTRY_STRUCT.encoder(); + ChainCodec.encodeChainEntry(encoder, entry); + + Map.Entry decoded = ChainCodec.decodeChainEntry(ChainCodec.CHAIN_ENTRY_STRUCT.decoder((ByteBuffer) encoder.encode().flip())); + + assertThat(decoded.getKey(), is(44L)); + assertThat(decoded.getValue().isEmpty(), is(false)); + assertThat(decoded.getValue(), hasPayloads(1L, 2L, 3L)); + } + + @Test + public void testChainEntryWithMultipleSequencedElements() { + Chain chain = sequencedChainOf(createPayload(1L), createPayload(2L), createPayload(3L)); + SimpleImmutableEntry entry = new SimpleImmutableEntry<>(45L, chain); + StructEncoder encoder = ChainCodec.CHAIN_ENTRY_STRUCT.encoder(); + ChainCodec.encodeChainEntry(encoder, entry); + + Map.Entry decoded = ChainCodec.decodeChainEntry(ChainCodec.CHAIN_ENTRY_STRUCT.decoder((ByteBuffer) encoder.encode().flip())); + + assertThat(decoded.getKey(), is(45L)); + assertThat(decoded.getValue().isEmpty(), is(false)); + assertThat(decoded.getValue(), hasPayloads(1L, 2L, 3L)); + + assertThat(decoded.getValue(), sameSequenceAs(chain)); + } + + @Test + public void testEmptyChainEntry() { + Chain chain = chainOf(); + SimpleImmutableEntry entry = new SimpleImmutableEntry<>(46L, chain); + StructEncoder encoder = ChainCodec.CHAIN_ENTRY_STRUCT.encoder(); + ChainCodec.encodeChainEntry(encoder, entry); + + Map.Entry decoded = ChainCodec.decodeChainEntry(ChainCodec.CHAIN_ENTRY_STRUCT.decoder((ByteBuffer) encoder.encode().flip())); + + assertThat(decoded.getKey(), is(46L)); + assertThat(decoded.getValue().isEmpty(), is(true)); + } +} diff --git a/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java new file mode 100644 index 0000000000..5732cda349 --- /dev/null +++ b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java @@ -0,0 +1,123 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.junit.Test; +import org.mockito.Mockito; +import org.terracotta.runnel.EnumMapping; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.nio.ByteBuffer; +import java.util.Collections; + +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; + +public class CommonConfigCodecTest { + + private static final CommonConfigCodec CODEC = new CommonConfigCodec(); + + @Test + public void testEncodeDecodeServerSideConfiguration() throws Exception { + ServerSideConfiguration serverSideConfiguration = + new ServerSideConfiguration("foo", Collections.singletonMap("bar", new ServerSideConfiguration.Pool(1))); + Struct serverSideConfigurationStruct = CODEC.injectServerSideConfiguration(newStructBuilder(), 10).getUpdatedBuilder().build(); + StructEncoder encoder = serverSideConfigurationStruct.encoder(); + CODEC.encodeServerSideConfiguration(encoder, serverSideConfiguration); + ByteBuffer byteBuffer = encoder.encode(); + byteBuffer.rewind(); + ServerSideConfiguration decodedServerSideConfiguration = + CODEC.decodeServerSideConfiguration(serverSideConfigurationStruct.decoder(byteBuffer)); + assertThat(decodedServerSideConfiguration.getDefaultServerResource(), is("foo")); + assertThat(decodedServerSideConfiguration.getResourcePools(), hasKey("bar")); + } + + @Test + public void testInjectServerStoreConfiguration() { + PoolAllocation poolAllocation = mock(PoolAllocation.class); + ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(poolAllocation, "Long.class", + "String.class", null, null, Consistency.EVENTUAL, false, false); + ConfigCodec.InjectTuple injectTuple = CODEC.injectServerStoreConfiguration(newStructBuilder(), 10); + + assertThat(injectTuple.getLastIndex(), is(40)); + + Struct struct = injectTuple.getUpdatedBuilder().build(); + StructEncoder encoder = struct.encoder(); + + CODEC.encodeServerStoreConfiguration(encoder, serverStoreConfiguration); + + encoder.int64(CommonConfigCodec.POOL_SIZE_FIELD, 20); + + } + + @Test + public void testDecodeNonLoaderWriterServerStoreConfiguration() { + EnumMapping consistencyEnumMapping = newEnumMappingBuilder(Consistency.class) + .mapping(Consistency.EVENTUAL, 1) + .mapping(Consistency.STRONG, 2) + .build(); + int index = 30; + StructBuilder builder = newStructBuilder() + .string("identifier", 10) + .string(SERVER_STORE_NAME_FIELD, 20) + .string("keyType", index) + .string("keySerializerType", index + 10) + .string("valueType", index + 11) + .string("valueSerializerType", index + 15) + .enm("consistency", index + 16, consistencyEnumMapping) + .int64("poolSize", index + 20) + .string("resourceName", index + 30); + + Struct struct = builder.build(); + + ByteBuffer encodedStoreConfig = struct.encoder() + .string("identifier", "test") + .string(SERVER_STORE_NAME_FIELD, "testStore") + .string("keyType", "Long") + .string("keySerializerType", "Long") + .string("valueType", "Long") + .string("valueSerializerType", "Long") + .enm("consistency", Consistency.STRONG) + .int64("poolSize", 20) + .string("resourceName", "primary").encode(); + + Struct newStruct = CODEC.injectServerStoreConfiguration(newStructBuilder() + .string("identifier", 10) + .string(SERVER_STORE_NAME_FIELD, 20), index) + .getUpdatedBuilder() + .build(); + encodedStoreConfig.flip(); + ServerStoreConfiguration serverStoreConfiguration = + CODEC.decodeServerStoreConfiguration(newStruct.decoder(encodedStoreConfig)); + + assertThat(serverStoreConfiguration.isLoaderWriterConfigured(), is(false)); + assertThat(serverStoreConfiguration.isWriteBehindConfigured(), is(false)); + + } + +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java similarity index 100% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java rename to clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java similarity index 93% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java rename to clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java index 27e6cd172c..7c1e2a98bb 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java +++ b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java @@ -24,13 +24,11 @@ import org.junit.Test; import java.util.Collections; -import java.util.UUID; import static java.nio.ByteBuffer.wrap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.*; /** * LifeCycleMessageCodecTest @@ -64,7 +62,7 @@ public void testValidateServerStoreDedicated() throws Exception { PoolAllocation.Dedicated dedicated = new PoolAllocation.Dedicated("dedicate", 420000L); ServerStoreConfiguration configuration = new ServerStoreConfiguration(dedicated, "java.lang.Long", "java.lang.String", "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", - Consistency.STRONG); + Consistency.STRONG, false); LifecycleMessage message = factory.validateServerStore("store1", configuration); byte[] encoded = codec.encode(message); @@ -75,6 +73,7 @@ public void testValidateServerStoreDedicated() throws Exception { PoolAllocation.Dedicated decodedPoolAllocation = (PoolAllocation.Dedicated) decodedMessage.getStoreConfiguration().getPoolAllocation(); assertThat(decodedPoolAllocation.getResourceName(), is(dedicated.getResourceName())); assertThat(decodedPoolAllocation.getSize(), is(dedicated.getSize())); + assertThat(decodedMessage.getStoreConfiguration().isLoaderWriterConfigured(), is(false)); } @Test @@ -82,7 +81,7 @@ public void testValidateServerStoreShared() throws Exception { PoolAllocation.Shared shared = new PoolAllocation.Shared("shared"); ServerStoreConfiguration configuration = new ServerStoreConfiguration(shared, "java.lang.Long", "java.lang.String", "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", - Consistency.STRONG); + Consistency.STRONG, false); LifecycleMessage message = factory.validateServerStore("store1", configuration); byte[] encoded = codec.encode(message); @@ -92,6 +91,7 @@ public void testValidateServerStoreShared() throws Exception { validateCommonServerStoreConfig(decodedMessage, configuration); PoolAllocation.Shared decodedPoolAllocation = (PoolAllocation.Shared) decodedMessage.getStoreConfiguration().getPoolAllocation(); assertThat(decodedPoolAllocation.getResourcePoolName(), is(shared.getResourcePoolName())); + assertThat(decodedMessage.getStoreConfiguration().isLoaderWriterConfigured(), is(false)); } @Test @@ -99,7 +99,7 @@ public void testValidateServerStoreUnknown() throws Exception { PoolAllocation.Unknown unknown = new PoolAllocation.Unknown(); ServerStoreConfiguration configuration = new ServerStoreConfiguration(unknown, "java.lang.Long", "java.lang.String", "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", - Consistency.STRONG); + Consistency.STRONG, false); LifecycleMessage message = factory.validateServerStore("store1", configuration); byte[] encoded = codec.encode(message); @@ -108,6 +108,7 @@ public void testValidateServerStoreUnknown() throws Exception { assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.VALIDATE_SERVER_STORE)); validateCommonServerStoreConfig(decodedMessage, configuration); assertThat(decodedMessage.getStoreConfiguration().getPoolAllocation(), instanceOf(PoolAllocation.Unknown.class)); + assertThat(decodedMessage.getStoreConfiguration().isLoaderWriterConfigured(), is(false)); } private void validateCommonServerStoreConfig(LifecycleMessage.ValidateServerStore decodedMessage, ServerStoreConfiguration initialConfiguration) { diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java similarity index 85% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java rename to clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java index f2532d7232..26955fe38a 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java +++ b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java @@ -21,12 +21,11 @@ import java.util.HashSet; import java.util.Set; -import java.util.UUID; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -import static org.junit.Assert.assertThat; public class ReconnectMessageCodecTest { @@ -40,20 +39,27 @@ public void setUp() { @Test public void testClusterTierReconnectCodec() { - ClusterTierReconnectMessage reconnectMessage = new ClusterTierReconnectMessage(); + ClusterTierReconnectMessage reconnectMessage = new ClusterTierReconnectMessage(false); Set setToInvalidate = new HashSet<>(); setToInvalidate.add(1L); setToInvalidate.add(11L); setToInvalidate.add(111L); + Set locks = new HashSet<>(); + locks.add(20L); + locks.add(200L); + locks.add(2000L); + reconnectMessage.addInvalidationsInProgress(setToInvalidate); reconnectMessage.clearInProgress(); + reconnectMessage.addLocksHeld(locks); ClusterTierReconnectMessage decoded = reconnectMessageCodec.decode(reconnectMessageCodec.encode(reconnectMessage)); assertThat(decoded, notNullValue()); assertThat(decoded.getInvalidationsInProgress(), containsInAnyOrder(setToInvalidate.toArray())); assertThat(decoded.isClearInProgress(), is(true)); + assertThat(decoded.getLocksHeld(), containsInAnyOrder(locks.toArray())); } } diff --git a/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java new file mode 100644 index 0000000000..967a263def --- /dev/null +++ b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java @@ -0,0 +1,230 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; +import org.ehcache.clustered.common.internal.store.Chain; +import org.hamcrest.Matchers; +import org.junit.Test; + +import java.util.AbstractMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + +import static java.util.Arrays.asList; +import static org.ehcache.clustered.ChainUtils.chainOf; +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.ehcache.clustered.Matchers.hasPayloads; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.allInvalidationDone; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.clientInvalidateAll; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.clientInvalidateHash; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.failure; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.getResponse; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.hashInvalidationDone; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.mapValue; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.prepareForDestroy; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.serverInvalidateHash; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.success; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class ResponseCodecTest { + + private static final ResponseCodec RESPONSE_CODEC = new ResponseCodec(); + private static final long KEY = 42L; + private static final int INVALIDATION_ID = 134; + + @Test + public void testFailureResponseCodec() { + EhcacheEntityResponse failure = failure(new IllegalMessageException("Test Exception")); + + EhcacheEntityResponse decoded = RESPONSE_CODEC.decode(RESPONSE_CODEC.encode(failure)); + + assertThat(((EhcacheEntityResponse.Failure)decoded).getCause().getMessage(), is("Test Exception")); + } + + @Test + public void testGetResponseCodec() { + EhcacheEntityResponse getResponse = getResponse(chainOf(createPayload(1L), createPayload(11L), createPayload(111L))); + + EhcacheEntityResponse decoded = RESPONSE_CODEC.decode(RESPONSE_CODEC.encode(getResponse)); + + Chain decodedChain = ((EhcacheEntityResponse.GetResponse) decoded).getChain(); + + assertThat(decodedChain, hasPayloads(1L, 11L, 111L)); + } + + @Test + public void testMapValueCodec() throws Exception { + Object subject = new Integer(10); + EhcacheEntityResponse mapValue = mapValue(subject); + EhcacheEntityResponse.MapValue decoded = + (EhcacheEntityResponse.MapValue) RESPONSE_CODEC.decode(RESPONSE_CODEC.encode(mapValue)); + assertThat(decoded.getValue(), equalTo(subject)); + } + + @Test + public void testSuccess() throws Exception { + byte[] encoded = RESPONSE_CODEC.encode(success()); + assertThat(RESPONSE_CODEC.decode(encoded), Matchers.sameInstance(success())); + } + + @Test + public void testHashInvalidationDone() throws Exception { + EhcacheEntityResponse.HashInvalidationDone response = hashInvalidationDone(KEY); + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.HashInvalidationDone decodedResponse = (EhcacheEntityResponse.HashInvalidationDone) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.HASH_INVALIDATION_DONE)); + assertThat(decodedResponse.getKey(), is(KEY)); + } + + @Test + public void testAllInvalidationDone() throws Exception { + EhcacheEntityResponse.AllInvalidationDone response = allInvalidationDone(); + + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.AllInvalidationDone decodedResponse = (EhcacheEntityResponse.AllInvalidationDone) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.ALL_INVALIDATION_DONE)); + } + + @Test + public void testClientInvalidateHash() throws Exception { + EhcacheEntityResponse.ClientInvalidateHash response = clientInvalidateHash(KEY, INVALIDATION_ID); + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.ClientInvalidateHash decodedResponse = (EhcacheEntityResponse.ClientInvalidateHash) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.CLIENT_INVALIDATE_HASH)); + assertThat(decodedResponse.getKey(), is(KEY)); + assertThat(decodedResponse.getInvalidationId(), is(INVALIDATION_ID)); + } + + @Test + public void testClientInvalidateAll() throws Exception { + EhcacheEntityResponse.ClientInvalidateAll response = clientInvalidateAll(INVALIDATION_ID); + + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.ClientInvalidateAll decodedResponse = (EhcacheEntityResponse.ClientInvalidateAll) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.CLIENT_INVALIDATE_ALL)); + assertThat(decodedResponse.getInvalidationId(), is(INVALIDATION_ID)); + } + + @Test + public void testServerInvalidateHash_withEvictedChain() { + EhcacheEntityResponse.ServerInvalidateHash response = serverInvalidateHash(KEY, chainOf(createPayload(1L), createPayload(11L), createPayload(111L))); + + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.ServerInvalidateHash decodedResponse = (EhcacheEntityResponse.ServerInvalidateHash) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.SERVER_INVALIDATE_HASH)); + assertThat(decodedResponse.getKey(), is(KEY)); + assertThat(decodedResponse.getEvictedChain(), hasPayloads(1L, 11L, 111L)); + } + + @Test + public void testServerInvalidateHash_withoutEvictedChain() { + EhcacheEntityResponse.ServerInvalidateHash response = serverInvalidateHash(KEY, null); + + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.ServerInvalidateHash decodedResponse = (EhcacheEntityResponse.ServerInvalidateHash) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.SERVER_INVALIDATE_HASH)); + assertThat(decodedResponse.getKey(), is(KEY)); + assertThat(decodedResponse.getEvictedChain(), is(nullValue())); + } + + @Test + public void testPrepareForDestroy() throws Exception { + Set storeIdentifiers = new HashSet<>(); + storeIdentifiers.add("store1"); + storeIdentifiers.add("anotherStore"); + EhcacheEntityResponse.PrepareForDestroy response = prepareForDestroy(storeIdentifiers); + + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.PrepareForDestroy decodedResponse = (EhcacheEntityResponse.PrepareForDestroy) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.PREPARE_FOR_DESTROY)); + assertThat(decodedResponse.getStores(), is(storeIdentifiers)); + } + + @Test + public void testResolveRequest() throws Exception { + long hash = 42L; + EhcacheEntityResponse.ResolveRequest response = new EhcacheEntityResponse.ResolveRequest(hash, chainOf(createPayload(1L), createPayload(11L), createPayload(111L))); + + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.ResolveRequest decodedResponse = (EhcacheEntityResponse.ResolveRequest) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.RESOLVE_REQUEST)); + assertThat(decodedResponse.getKey(), is(42L)); + assertThat(decodedResponse.getChain(), hasPayloads(1L, 11L, 111L)); + } + + @Test + public void testLockResponse() { + EhcacheEntityResponse.LockSuccess lockSuccess = new EhcacheEntityResponse.LockSuccess(chainOf(createPayload(1L), createPayload(10L))); + + byte[] sucessEncoded = RESPONSE_CODEC.encode(lockSuccess); + EhcacheEntityResponse.LockSuccess successDecoded = (EhcacheEntityResponse.LockSuccess) RESPONSE_CODEC.decode(sucessEncoded); + + assertThat(successDecoded.getResponseType(), is(EhcacheResponseType.LOCK_SUCCESS)); + assertThat(successDecoded.getChain(), hasPayloads(1L, 10L)); + + EhcacheEntityResponse.LockFailure lockFailure = EhcacheEntityResponse.lockFailure(); + byte[] failureEncoded = RESPONSE_CODEC.encode(lockFailure); + EhcacheEntityResponse.LockFailure failureDecoded = (EhcacheEntityResponse.LockFailure) RESPONSE_CODEC.decode(failureEncoded); + + assertThat(failureDecoded.getResponseType(), is(EhcacheResponseType.LOCK_FAILURE)); + } + + @Test + public void testIteratorBatchResponse() { + UUID uuid = UUID.randomUUID(); + List> chains = asList( + new AbstractMap.SimpleImmutableEntry<>(1L, chainOf(createPayload(1L), createPayload(10L))), + new AbstractMap.SimpleImmutableEntry<>(2L, chainOf(createPayload(2L), createPayload(20L)))); + EhcacheEntityResponse.IteratorBatch iteratorBatch = EhcacheEntityResponse.iteratorBatchResponse(uuid, chains, true); + + byte[] encoded = RESPONSE_CODEC.encode(iteratorBatch); + EhcacheEntityResponse.IteratorBatch batchDecoded = (EhcacheEntityResponse.IteratorBatch) RESPONSE_CODEC.decode(encoded); + + assertThat(batchDecoded.getResponseType(), is(EhcacheResponseType.ITERATOR_BATCH)); + assertThat(batchDecoded.getIdentity(), is(uuid)); + assertThat(batchDecoded.getChains().get(0).getValue(), hasPayloads(1L, 10L)); + assertThat(batchDecoded.getChains().get(1).getValue(), hasPayloads(2L, 20L)); + assertThat(batchDecoded.isLast(), is(true)); + } + + @Test + public void testServerAppendResponse() { + EhcacheEntityResponse.ServerAppend serverAppend = new EhcacheEntityResponse.ServerAppend(createPayload(3L), chainOf(createPayload(1L), createPayload(2L))); + + byte[] encoded = RESPONSE_CODEC.encode(serverAppend); + EhcacheEntityResponse.ServerAppend appendDecoded = (EhcacheEntityResponse.ServerAppend) RESPONSE_CODEC.decode(encoded); + + assertThat(appendDecoded.getResponseType(), is(EhcacheResponseType.SERVER_APPEND)); + assertThat(appendDecoded.getAppended().asLongBuffer().get(), is(3L)); + assertThat(appendDecoded.getBeforeAppend(), hasPayloads(1L, 2L)); + } +} diff --git a/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java new file mode 100644 index 0000000000..2b25c1495f --- /dev/null +++ b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java @@ -0,0 +1,186 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.junit.Test; + +import java.util.UUID; + +import static java.nio.ByteBuffer.wrap; +import static org.ehcache.clustered.ChainUtils.chainOf; +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.ehcache.clustered.ChainUtils.readPayload; +import static org.ehcache.clustered.ChainUtils.sequencedChainOf; +import static org.ehcache.clustered.Matchers.hasPayloads; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +public class ServerStoreOpCodecTest { + + private static final ServerStoreOpCodec STORE_OP_CODEC = new ServerStoreOpCodec(); + + @Test + public void testAppendMessageCodec() { + + ServerStoreOpMessage.AppendMessage appendMessage = new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L)); + + byte[] encoded = STORE_OP_CODEC.encode(appendMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(appendMessage.getMessageType(), wrap(encoded)); + ServerStoreOpMessage.AppendMessage decodedAppendMessage = (ServerStoreOpMessage.AppendMessage) decodedMsg; + + assertThat(decodedAppendMessage.getKey(), is(1L)); + assertThat(readPayload(decodedAppendMessage.getPayload()), is(1L)); + assertThat(decodedAppendMessage.getMessageType(), is(EhcacheMessageType.APPEND)); + } + + @Test + public void testGetMessageCodec() { + ServerStoreOpMessage getMessage = new ServerStoreOpMessage.GetMessage(2L); + + byte[] encoded = STORE_OP_CODEC.encode(getMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(getMessage.getMessageType(), wrap(encoded)); + ServerStoreOpMessage.GetMessage decodedGetMessage = (ServerStoreOpMessage.GetMessage) decodedMsg; + + assertThat(decodedGetMessage.getKey(), is(2L)); + assertThat(decodedGetMessage.getMessageType(), is(EhcacheMessageType.GET_STORE)); + } + + @Test + public void testGetAndAppendMessageCodec() { + ServerStoreOpMessage getAndAppendMessage = new ServerStoreOpMessage.GetAndAppendMessage(10L, createPayload(10L)); + + byte[] encoded = STORE_OP_CODEC.encode(getAndAppendMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(getAndAppendMessage.getMessageType(), wrap(encoded)); + ServerStoreOpMessage.GetAndAppendMessage decodedGetAndAppendMessage = (ServerStoreOpMessage.GetAndAppendMessage) decodedMsg; + + assertThat(decodedGetAndAppendMessage.getKey(), is(10L)); + assertThat(readPayload(decodedGetAndAppendMessage.getPayload()), is(10L)); + assertThat(decodedGetAndAppendMessage.getMessageType(), is(EhcacheMessageType.GET_AND_APPEND)); + } + + @Test + public void testReplaceAtHeadMessageCodec() { + ServerStoreOpMessage replaceAtHeadMessage = new ServerStoreOpMessage.ReplaceAtHeadMessage(10L, + sequencedChainOf(createPayload(10L), createPayload(100L), createPayload(1000L)), + chainOf(createPayload(2000L))); + + byte[] encoded = STORE_OP_CODEC.encode(replaceAtHeadMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(replaceAtHeadMessage.getMessageType(), wrap(encoded)); + ServerStoreOpMessage.ReplaceAtHeadMessage decodedReplaceAtHeadMessage = (ServerStoreOpMessage.ReplaceAtHeadMessage) decodedMsg; + + assertThat(decodedReplaceAtHeadMessage.getKey(), is(10L)); + assertThat(decodedReplaceAtHeadMessage.getExpect(), hasPayloads(10L, 100L, 1000L)); + assertThat(decodedReplaceAtHeadMessage.getUpdate(), hasPayloads(2000L)); + assertThat(decodedReplaceAtHeadMessage.getMessageType(), is(EhcacheMessageType.REPLACE)); + } + + @Test + public void testClearMessageCodec() throws Exception { + ServerStoreOpMessage clearMessage = new ServerStoreOpMessage.ClearMessage(); + + byte[] encoded = STORE_OP_CODEC.encode(clearMessage); + ServerStoreOpMessage decodedMsg = (ServerStoreOpMessage) STORE_OP_CODEC.decode(clearMessage.getMessageType(), wrap(encoded)); + + assertThat(decodedMsg.getMessageType(), is(EhcacheMessageType.CLEAR)); + } + + @Test + public void testClientInvalidationAckMessageCodec() throws Exception { + ServerStoreOpMessage invalidationAckMessage = new ServerStoreOpMessage.ClientInvalidationAck(42L,123); + + byte[] encoded = STORE_OP_CODEC.encode(invalidationAckMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(invalidationAckMessage.getMessageType(), wrap(encoded)); + ServerStoreOpMessage.ClientInvalidationAck decodedInvalidationAckMessage = (ServerStoreOpMessage.ClientInvalidationAck)decodedMsg; + + assertThat(decodedInvalidationAckMessage.getKey(), is(42L)); + assertThat(decodedInvalidationAckMessage.getInvalidationId(), is(123)); + assertThat(decodedInvalidationAckMessage.getMessageType(), is(EhcacheMessageType.CLIENT_INVALIDATION_ACK)); + } + + @Test + public void testLockMessage() throws Exception { + ServerStoreOpMessage lockMessage = new ServerStoreOpMessage.LockMessage(2L); + + byte[] encoded = STORE_OP_CODEC.encode(lockMessage); + EhcacheEntityMessage decoded = STORE_OP_CODEC.decode(lockMessage.getMessageType(), wrap(encoded)); + + ServerStoreOpMessage.LockMessage decodedLockMessage = (ServerStoreOpMessage.LockMessage) decoded; + + assertThat(decodedLockMessage.getHash(), is(2L)); + assertThat(decodedLockMessage.getMessageType(), is(EhcacheMessageType.LOCK)); + } + + @Test + public void testUnlockMessage() throws Exception { + ServerStoreOpMessage unlockMessage = new ServerStoreOpMessage.UnlockMessage(2L); + + byte[] encoded = STORE_OP_CODEC.encode(unlockMessage); + EhcacheEntityMessage decoded = STORE_OP_CODEC.decode(unlockMessage.getMessageType(), wrap(encoded)); + + ServerStoreOpMessage.UnlockMessage decodedLockMessage = (ServerStoreOpMessage.UnlockMessage) decoded; + + assertThat(decodedLockMessage.getHash(), is(2L)); + assertThat(decodedLockMessage.getMessageType(), is(EhcacheMessageType.UNLOCK)); + } + + @Test + public void testIteratorOpenMessage() { + ServerStoreOpMessage iteratorOpenMessage = new ServerStoreOpMessage.IteratorOpenMessage(42); + + byte[] encoded = STORE_OP_CODEC.encode(iteratorOpenMessage); + ServerStoreOpMessage.IteratorOpenMessage decoded = (ServerStoreOpMessage.IteratorOpenMessage) STORE_OP_CODEC.decode(iteratorOpenMessage.getMessageType(), wrap(encoded)); + + assertThat(decoded.getMessageType(), is(EhcacheMessageType.ITERATOR_OPEN)); + assertThat(decoded.getBatchSize(), is(42)); + } + + @Test + public void testIteratorCloseMessage() { + UUID uuid = UUID.randomUUID(); + ServerStoreOpMessage iteratorCloseMessage = new ServerStoreOpMessage.IteratorCloseMessage(uuid); + + byte[] encoded = STORE_OP_CODEC.encode(iteratorCloseMessage); + ServerStoreOpMessage.IteratorCloseMessage decoded = (ServerStoreOpMessage.IteratorCloseMessage) STORE_OP_CODEC.decode(iteratorCloseMessage.getMessageType(), wrap(encoded)); + + assertThat(decoded.getMessageType(), is(EhcacheMessageType.ITERATOR_CLOSE)); + assertThat(decoded.getIdentity(), is(uuid)); + } + + @Test + public void testIteratorAdvanceMessage() { + UUID uuid = UUID.randomUUID(); + ServerStoreOpMessage iteratorAdvanceMessage = new ServerStoreOpMessage.IteratorAdvanceMessage(uuid, 42); + + byte[] encoded = STORE_OP_CODEC.encode(iteratorAdvanceMessage); + ServerStoreOpMessage.IteratorAdvanceMessage decoded = (ServerStoreOpMessage.IteratorAdvanceMessage) STORE_OP_CODEC.decode(iteratorAdvanceMessage.getMessageType(), wrap(encoded)); + + assertThat(decoded.getMessageType(), is(EhcacheMessageType.ITERATOR_ADVANCE)); + assertThat(decoded.getIdentity(), is(uuid)); + assertThat(decoded.getBatchSize(), is(42)); + } + + @Test + public void testEnableEventListenerMessage() { + ServerStoreOpMessage enableEventListenerMessage = new ServerStoreOpMessage.EnableEventListenerMessage(true); + + byte[] encoded = STORE_OP_CODEC.encode(enableEventListenerMessage); + ServerStoreOpMessage.EnableEventListenerMessage decoded = (ServerStoreOpMessage.EnableEventListenerMessage) STORE_OP_CODEC.decode(enableEventListenerMessage.getMessageType(), wrap(encoded)); + + assertThat(decoded.getMessageType(), is(EhcacheMessageType.ENABLE_EVENT_LISTENER)); + assertThat(decoded.isEnable(), is(true)); + } +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java similarity index 81% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java rename to clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java index 332b0c6b40..ce0c886ed3 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java +++ b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java @@ -16,13 +16,10 @@ package org.ehcache.clustered.common.internal.messages; -import org.ehcache.clustered.common.internal.store.Element; import org.junit.Test; -import java.util.Collections; - -import static org.ehcache.clustered.common.internal.store.Util.createPayload; -import static org.ehcache.clustered.common.internal.store.Util.getChain; +import static org.ehcache.clustered.ChainUtils.chainOf; +import static org.ehcache.clustered.ChainUtils.createPayload; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; @@ -30,17 +27,17 @@ public class ServerStoreOpMessageTest { @Test - public void testConcurrencyKeysEqualForSameCacheAndKey() throws Exception { + public void testConcurrencyKeysEqualForSameCacheAndKey() { ConcurrentEntityMessage m1 = new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L)); ConcurrentEntityMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage(1L, createPayload(1L)); - ConcurrentEntityMessage m3 = new ServerStoreOpMessage.ReplaceAtHeadMessage(1L, getChain(Collections.emptyList()), getChain(Collections.emptyList())); + ConcurrentEntityMessage m3 = new ServerStoreOpMessage.ReplaceAtHeadMessage(1L, chainOf(), chainOf()); assertThat(m1.concurrencyKey(), is(m2.concurrencyKey())); assertThat(m2.concurrencyKey(), is(m3.concurrencyKey())); } @Test - public void testConcurrencyKeysEqualForDifferentCachesSameKey() throws Exception { + public void testConcurrencyKeysEqualForDifferentCachesSameKey() { ConcurrentEntityMessage m1 = new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L)); ConcurrentEntityMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage(1L, createPayload(1L)); @@ -48,7 +45,7 @@ public void testConcurrencyKeysEqualForDifferentCachesSameKey() throws Exception } @Test - public void testConcurrencyKeysNotEqualForDifferentCachesAndKeys() throws Exception { + public void testConcurrencyKeysNotEqualForDifferentCachesAndKeys() { ConcurrentEntityMessage m1 = new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L)); ConcurrentEntityMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage(2L, createPayload(1L)); ConcurrentEntityMessage m3 = new ServerStoreOpMessage.AppendMessage(3L, createPayload(1L)); diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/store/WhitelistedUnmarshallingTest.java b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/store/WhitelistedUnmarshallingTest.java similarity index 95% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/store/WhitelistedUnmarshallingTest.java rename to clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/store/WhitelistedUnmarshallingTest.java index 920ac6a627..77862c7fe5 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/store/WhitelistedUnmarshallingTest.java +++ b/clustered/ehcache-common/src/test/java/org/ehcache/clustered/common/internal/store/WhitelistedUnmarshallingTest.java @@ -14,12 +14,11 @@ * limitations under the License. */ -package org.ehcache.clustered.common.internal.Store; +package org.ehcache.clustered.common.internal.store; import org.ehcache.clustered.common.internal.store.Util; import org.ehcache.clustered.common.internal.store.ValueWrapper; import org.hamcrest.Matchers; -import org.junit.Assert; import org.junit.Test; import java.io.ObjectStreamClass; @@ -29,6 +28,7 @@ import java.util.function.Predicate; import static org.ehcache.clustered.common.internal.messages.StateRepositoryOpCodec.WHITELIST_PREDICATE; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -52,7 +52,7 @@ public void unmarshallingNonWhitelistedClassTest() { private void unmarshallingCheck(T t, Predicate> isClassPermitted) { @SuppressWarnings("unchecked") T unmarshalled = (T) Util.unmarshall(ByteBuffer.wrap(Util.marshall(t)), isClassPermitted); - Assert.assertThat(unmarshalled, Matchers.is(t)); + assertThat(unmarshalled, Matchers.is(t)); } private void unmarshallingStateRepoMessagesCheck(T t) { diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index 100d653363..56aab2750d 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -14,66 +14,67 @@ * limitations under the License. */ +plugins { + id 'org.ehcache.build.conventions.java' +} + configurations { serverLibs } dependencies { - // not required by gradle but required by the IDE because 'dist' do not has any transitive dependencies - testCompileOnly project(':clustered:client') - testCompileOnly project(':clustered:common') - testCompileOnly project(':impl') - testCompileOnly project(':xml') - testCompileOnly "org.terracotta.internal:client-runtime:$terracottaCoreVersion" - testCompileOnly "org.terracotta:runnel:$terracottaPlatformVersion" - - testCompile project(':management') - testCompile "org.terracotta.management.dist:mnm-nms:$terracottaPlatformVersion" - testCompile "org.terracotta.management.dist:mnm-nms-agent:$terracottaPlatformVersion" - testCompile "com.fasterxml.jackson.core:jackson-databind:2.8.0" - testRuntime project(':clustered:clustered-dist') - testRuntime project(':dist') - - testCompile (group:'org.terracotta.internal', name:'galvan-support', version: terracottaCoreVersion) { - // galvan-support depends on junit 4.11 and version enforcement plugin won't allow that + testImplementation project(':clustered:ehcache-client') + testImplementation project(':clustered:ehcache-common') + testImplementation project(':ehcache-impl') + testImplementation project(':ehcache-xml') + testImplementation project(':ehcache-107') + testImplementation "org.terracotta.internal:client-runtime:$terracottaCoreVersion" + testImplementation "org.terracotta:runnel:$terracottaPlatformVersion" + testImplementation "org.terracotta:lease-api:$terracottaPlatformVersion" + testImplementation("javax.cache:cache-tests:$jcacheTckVersion") { exclude group:'junit', module:'junit' } - testCompile (group:'com.google.code.tempus-fugit', name:'tempus-fugit', version:'1.1') { + testImplementation("javax.cache:cache-tests:$jcacheTckVersion:tests") { exclude group:'junit', module:'junit' - exclude group:'org.hamcrest', module:'hamcrest-core' } - testCompile group: 'javax.cache', name: 'cache-api', version: jcacheVersion - serverLibs ("org.terracotta.management.dist:mnm-server:$terracottaPlatformVersion") { - exclude group:'org.terracotta.management.dist', module:'mnm-common' - } + testImplementation project(':ehcache-management') + testImplementation "org.terracotta.management:nms-entity-client:$terracottaPlatformVersion" + testImplementation "org.terracotta.management:nms-agent-entity-client:$terracottaPlatformVersion" + testImplementation "com.fasterxml.jackson.core:jackson-databind:$jacksonVersion" + testImplementation "org.terracotta:galvan-platform-support:$terracottaPlatformVersion" + testImplementation "javax.cache:cache-api:$jcacheVersion" } -task unzipKit(type: Copy) { - dependsOn project(':clustered:clustered-dist').distZip - from zipTree(project(':clustered:clustered-dist').distZip.archivePath) +task unzipKit(type: Sync) { + dependsOn project(':clustered:ehcache-clustered').distZip + from zipTree(project(':clustered:ehcache-clustered').distZip.archivePath) into 'build/ehcache-kit' } -task copyServerLibs(type: Copy) { +task copyServerLibs(type: Sync) { dependsOn unzipKit from project.configurations.serverLibs - into "$unzipKit.destinationDir/${project(':clustered:clustered-dist').archivesBaseName}-$project.version-kit/server/plugins/lib" + into "$unzipKit.destinationDir/${project(':clustered:ehcache-clustered').archivesBaseName}-$project.version-kit/server/plugins/lib" } test { + maxHeapSize = '512m' + maxParallelForks = 8 dependsOn copyServerLibs environment 'JAVA_HOME', testJava.javaHome - if (testJava.javaVersion.isJava9Compatible()) { - jvmArgs += ['--add-modules', 'java.xml.bind'] - environment 'JAVA_OPTS', '--add-modules java.xml.bind' - } //If this directory does not exist, tests will fail with a cryptic assert failure - systemProperty 'kitInstallationPath', "$unzipKit.destinationDir/${project(':clustered:clustered-dist').archivesBaseName}-$project.version-kit" + systemProperty 'kitInstallationPath', "$unzipKit.destinationDir/${project(':clustered:ehcache-clustered').archivesBaseName}-$project.version-kit" // Uncomment to include client logging in console output // testLogging.showStandardStreams = true } -tasks.withType(JavaCompile) { - options.compilerArgs += ['-Werror'] +configurations.all { + resolutionStrategy { + dependencySubstitution { + substitute(module('junit:junit:4.12')) + .because('CVE-2020-15250') + .with(module('junit:junit:4.13.1')) + } + } } diff --git a/clustered/integration-test/gradle.properties b/clustered/integration-test/gradle.properties deleted file mode 100644 index bdb75fe1c9..0000000000 --- a/clustered/integration-test/gradle.properties +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -subPomName = Ehcache 3 Clustered Integration Test module -subPomDesc = The Clustering Integration Test module of Ehcache 3 diff --git a/clustered/integration-test/src/test/java/org/ehcache/Diagnostics.java b/clustered/integration-test/src/test/java/org/ehcache/Diagnostics.java deleted file mode 100644 index 1891c1fbb3..0000000000 --- a/clustered/integration-test/src/test/java/org/ehcache/Diagnostics.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache; - -import com.sun.management.HotSpotDiagnosticMXBean; - -import java.io.File; -import java.io.IOException; -import java.io.PrintStream; -import java.lang.management.LockInfo; -import java.lang.management.ManagementFactory; -import java.lang.management.MonitorInfo; -import java.lang.management.ThreadInfo; -import java.lang.management.ThreadMXBean; -import java.util.Calendar; -import java.util.Date; - -import javax.management.MBeanServer; - -/** - * Provides methods to produce diagnostic output. - */ -@SuppressWarnings({ "UnusedDeclaration", "WeakerAccess" }) -public final class Diagnostics { - - private static final String HOTSPOT_DIAGNOSTIC_MXBEAN_NAME = "com.sun.management:type=HotSpotDiagnostic"; - private static final String HEAP_DUMP_FILENAME_TEMPLATE = "java_%1$04d_%2$tFT%2$tH%2$tM%2$tS.%2$tL.hprof"; - private static final File WORKING_DIRECTORY = new File(System.getProperty("user.dir")); - - /** - * Private niladic constructor to prevent instantiation. - */ - private Diagnostics() { - } - - /** - * Writes a complete thread dump to {@code System.err}. - */ - public static void threadDump() { - threadDump(System.err); - } - - /** - * Writes a complete thread dump to the designated {@code PrintStream}. - * - * @param out the {@code PrintStream} to which the thread dump is written - */ - public static void threadDump(final PrintStream out) { - if (out == null) { - throw new NullPointerException("out"); - } - - final ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean(); - - final Calendar when = Calendar.getInstance(); - final ThreadInfo[] threadInfos = threadMXBean.dumpAllThreads( - threadMXBean.isObjectMonitorUsageSupported(), threadMXBean.isSynchronizerUsageSupported()); - - out.format("%nFull thread dump %1$tF %1$tT.%1$tL %1$tz%n", when); - for (final ThreadInfo threadInfo : threadInfos) { - out.print(format(threadInfo)); - } - } - - /** - * Format a {@code ThreadInfo} instance without a stack depth limitation. This method reproduces the - * formatting performed in {@code java.lang.management.ThreadInfo.toString()} without the stack depth limit. - * - * @param threadInfo the {@code ThreadInfo} instance to foramt - * - * @return a {@code CharSequence} instance containing the formatted {@code ThreadInfo} - */ - private static CharSequence format(final ThreadInfo threadInfo) { - StringBuilder sb = new StringBuilder(4096); - - Thread.State threadState = threadInfo.getThreadState(); - sb.append('"') - .append(threadInfo.getThreadName()) - .append('"') - .append(" Id=") - .append(threadInfo.getThreadId()) - .append(' ') - .append(threadState); - - if (threadInfo.getLockName() != null) { - sb.append(" on ").append(threadInfo.getLockName()); - } - if (threadInfo.getLockOwnerName() != null) { - sb.append(" owned by ").append('"').append(threadInfo.getLockOwnerName()).append('"') - .append(" Id=").append(threadInfo.getLockOwnerId()); - } - - if (threadInfo.isSuspended()) { - sb.append(" (suspended)"); - } - if (threadInfo.isInNative()) { - sb.append(" (in native)"); - } - sb.append('\n'); - - StackTraceElement[] stackTrace = threadInfo.getStackTrace(); - for (int i = 0; i < stackTrace.length; i++) { - StackTraceElement element = stackTrace[i]; - sb.append("\tat ").append(element); - sb.append('\n'); - if (i == 0) { - if (threadInfo.getLockInfo() != null) { - switch (threadState) { - case BLOCKED: - sb.append("\t- blocked on ").append(threadInfo.getLockInfo()); - sb.append('\n'); - break; - case WAITING: - sb.append("\t- waiting on ").append(threadInfo.getLockInfo()); - sb.append('\n'); - break; - case TIMED_WAITING: - sb.append("\t- waiting on ").append(threadInfo.getLockInfo()); - sb.append('\n'); - break; - default: - } - } - } - - for (MonitorInfo monitorInfo : threadInfo.getLockedMonitors()) { - if (monitorInfo.getLockedStackDepth() == i) { - sb.append("\t- locked ").append(monitorInfo); - sb.append('\n'); - } - } - } - - LockInfo[] lockedSynchronizers = threadInfo.getLockedSynchronizers(); - if (lockedSynchronizers.length > 0) { - sb.append("\n\tNumber of locked synchronizers = ").append(lockedSynchronizers.length); - sb.append('\n'); - for (LockInfo lockedSynchronizer : lockedSynchronizers) { - sb.append("\t- ").append(lockedSynchronizer); - sb.append('\n'); - } - } - - sb.append('\n'); - return sb; - } - - /** - * Take a Java heap dump into a file whose name is produced from the template - * {@value #HEAP_DUMP_FILENAME_TEMPLATE} where {@code 1$} is the PID of - * the current process obtained from {@link #getPid()}. - * - * @param dumpLiveObjects if {@code true}, only "live" (reachable) objects are dumped; - * if {@code false}, all objects in the heap are dumped - * - * @return the name of the dump file; the file is written to the current directory (generally {@code user.dir}) - */ - public static String dumpHeap(final boolean dumpLiveObjects) { - - String dumpName; - final int pid = getPid(); - final Date currentTime = new Date(); - if (pid > 0) { - dumpName = String.format(HEAP_DUMP_FILENAME_TEMPLATE, pid, currentTime); - } else { - dumpName = String.format(HEAP_DUMP_FILENAME_TEMPLATE, 0, currentTime); - } - - dumpName = new File(WORKING_DIRECTORY, dumpName).getAbsolutePath(); - - try { - dumpHeap(dumpLiveObjects, dumpName); - } catch (IOException e) { - System.err.printf("Unable to write heap dump to %s: %s%n", dumpName, e); - e.printStackTrace(System.err); - return null; - } - - return dumpName; - } - - /** - * Write a Java heap dump to the named file. If the dump file exists, this method will - * fail. - * - * @param dumpLiveObjects if {@code true}, only "live" (reachable) objects are dumped; - * if {@code false}, all objects in the heap are dumped - * @param dumpName the name of the file to which the heap dump is written; relative names - * are relative to the current directory ({@code user.dir}). If the value - * of {@code dumpName} does not end in {@code .hprof}, it is appended. - * - * @throws IOException if thrown while loading the HotSpot Diagnostic MXBean or writing the heap dump - * - * @see - * com.sun.management.HotSpotDiagnosticMXBean - */ - public static void dumpHeap(final boolean dumpLiveObjects, String dumpName) throws IOException { - if (dumpName == null) { - throw new NullPointerException("dumpName"); - } - - if (!dumpName.endsWith(".hprof")) { - dumpName += ".hprof"; - } - - final MBeanServer server = ManagementFactory.getPlatformMBeanServer(); - final HotSpotDiagnosticMXBean hotSpotDiagnosticMXBean = - ManagementFactory.newPlatformMXBeanProxy(server, HOTSPOT_DIAGNOSTIC_MXBEAN_NAME, HotSpotDiagnosticMXBean.class); - hotSpotDiagnosticMXBean.dumpHeap(dumpName, dumpLiveObjects); - } - - /** - * Gets the PID of the current process. This method is dependent upon "common" - * operation of the {@code java.lang.management.RuntimeMXBean#getName()} method. - * - * @return the PID of the current process or {@code -1} if the PID can not be determined - */ - public static int getPid() { - // Expected to be of the form "@" - final String jvmProcessName = ManagementFactory.getRuntimeMXBean().getName(); - try { - return Integer.valueOf(jvmProcessName.substring(0, jvmProcessName.indexOf('@'))); - } catch (NumberFormatException | IndexOutOfBoundsException e) { - return -1; - } - } -} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicCacheOpsMultiThreadedTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicCacheOpsMultiThreadedTest.java new file mode 100644 index 0000000000..8d57c13c93 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicCacheOpsMultiThreadedTest.java @@ -0,0 +1,149 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.Cluster; + +import java.net.URI; +import java.time.Duration; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicLong; + +import static java.util.Collections.nCopies; +import static java.util.stream.Collectors.toList; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.clustered.client.config.builders.TimeoutsBuilder.timeouts; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.ehcache.testing.StandardTimeouts.eventually; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + + +/** + * Simulate multiple clients starting up the same cache manager simultaneously and ensure that puts and gets works just + * fine and nothing get lost or hung, just because multiple cache manager instances of the same cache manager are coming up + * simultaneously. + */ +public class BasicCacheOpsMultiThreadedTest { + + @ClassRule + public static Cluster CLUSTER = + newCluster().in(clusterPath()).withServiceFragment(offheapResource("primary-server-resource", 64)).build(); + + private static final String CLUSTERED_CACHE_NAME = "clustered-cache"; + private static final String SYN_CACHE_NAME = "syn-cache"; + private static final String PRIMARY_SERVER_RESOURCE_NAME = "primary-server-resource"; + private static final String CACHE_MANAGER_NAME = "/crud-cm"; + private static final int PRIMARY_SERVER_RESOURCE_SIZE = 4; //MB + private static final int NUM_THREADS = 8; + private static final int MAX_WAIT_TIME_SECONDS = 30; + + private final AtomicLong idGenerator = new AtomicLong(2L); + + @Test + public void testMultipleClients() { + ExecutorService executorService = Executors.newFixedThreadPool(NUM_THREADS); + try { + List> results = nCopies(NUM_THREADS, content()).stream().map(executorService::submit).collect(toList()); + + results.stream().map(f -> { + try { + f.get(); + return Optional.empty(); + } catch (Exception e) { + return Optional.of(e); + } + }).filter(Optional::isPresent).map(Optional::get).reduce((a, b) -> { + a.addSuppressed(b); + return a; + } + ).ifPresent(t -> { + throw new AssertionError(t); + }); + } finally { + executorService.shutdownNow(); + } + } + + private Callable content() { + return () -> { + try (PersistentCacheManager cacheManager = createCacheManager(CLUSTER.getConnectionURI())) { + Cache synCache = cacheManager.getCache(SYN_CACHE_NAME, String.class, Boolean.class); + Cache customValueCache = cacheManager.getCache(CLUSTERED_CACHE_NAME, Long.class, String.class); + parallelPuts(customValueCache); + String firstClientStartKey = "first_client_start", firstClientEndKey = "first_client_end"; + if (synCache.putIfAbsent(firstClientStartKey, true) == null) { + customValueCache.put(1L, "value"); + assertThat(customValueCache.get(1L), is("value")); + synCache.put(firstClientEndKey, true); + } else { + assertThat(() -> synCache.get(firstClientEndKey), eventually().matches(notNullValue())); + assertThat(customValueCache.get(1L), is("value")); + } + return null; + } + }; + } + + private static PersistentCacheManager createCacheManager(URI clusterURI) { + CacheManagerBuilder clusteredCacheManagerBuilder = newCacheManagerBuilder() + .with(cluster(clusterURI.resolve(CACHE_MANAGER_NAME)) + .timeouts(timeouts().read(Duration.ofSeconds(MAX_WAIT_TIME_SECONDS)).write(Duration.ofSeconds(MAX_WAIT_TIME_SECONDS))) + .autoCreate(server -> server.defaultServerResource(PRIMARY_SERVER_RESOURCE_NAME))) + .withCache(CLUSTERED_CACHE_NAME, newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder() + .with(clusteredDedicated(PRIMARY_SERVER_RESOURCE_SIZE, MemoryUnit.MB))) + .withService(new ClusteredStoreConfiguration(Consistency.STRONG))) + .withCache(SYN_CACHE_NAME, newCacheConfigurationBuilder(String.class, Boolean.class, newResourcePoolsBuilder() + .with(clusteredDedicated(PRIMARY_SERVER_RESOURCE_SIZE, MemoryUnit.MB))) + .withService(new ClusteredStoreConfiguration(Consistency.STRONG))); + return clusteredCacheManagerBuilder.build(true); + } + + private void parallelPuts(Cache customValueCache) { + // make sure each thread gets its own id + long startingId = idGenerator.getAndAdd(10L); + customValueCache.put(startingId + 1, "value1"); + customValueCache.put(startingId + 1, "value11"); + customValueCache.put(startingId + 2, "value2"); + customValueCache.put(startingId + 3, "value3"); + customValueCache.put(startingId + 4, "value4"); + assertThat(customValueCache.get(startingId + 1), is("value11")); + assertThat(customValueCache.get(startingId + 2), is("value2")); + assertThat(customValueCache.get(startingId + 3), is("value3")); + assertThat(customValueCache.get(startingId + 4), is("value4")); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java index b21174d0ea..e9c9cf5e4c 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java @@ -33,7 +33,6 @@ import org.junit.Test; import org.terracotta.testing.rules.Cluster; -import java.io.File; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; @@ -43,37 +42,28 @@ import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; +import static org.hamcrest.collection.IsIterableWithSize.iterableWithSize; -public class BasicClusteredCacheOpsTest extends ClusteredTests { - - private static final String RESOURCE_CONFIG = - "" - + "" - + "64" - + "" + - "\n"; +public class BasicClusteredCacheOpsTest { @ClassRule - public static Cluster CLUSTER = - newCluster().in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); - - @BeforeClass - public static void waitForActive() throws Exception { - CLUSTER.getClusterControl().waitForActive(); - } + public static Cluster CLUSTER = newCluster().in(clusterPath()) + .withServiceFragment(offheapResource("primary-server-resource", 64)).build(); @Test public void basicCacheCRUD() throws Exception { final CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm")) - .autoCreate() - .defaultServerResource("primary-server-resource")); + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); @@ -108,11 +98,11 @@ public void basicCacheCRUD() throws Exception { public void basicCacheCAS() throws Exception { final CacheManagerBuilder clusteredCacheManagerBuilder = newCacheManagerBuilder() - .with(cluster(CLUSTER.getConnectionURI().resolve("/cas-cm")).autoCreate()) + .with(cluster(CLUSTER.getConnectionURI().resolve("/cas-cm")).autoCreate(c -> c)) .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG))); + .withService(new ClusteredStoreConfiguration(Consistency.STRONG))); try (PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true)) { @@ -136,11 +126,11 @@ public void basicCacheCAS() throws Exception { public void basicClusteredBulk() throws Exception { final CacheManagerBuilder clusteredCacheManagerBuilder = newCacheManagerBuilder() - .with(cluster(CLUSTER.getConnectionURI().resolve("/bulk-cm")).autoCreate()) + .with(cluster(CLUSTER.getConnectionURI().resolve("/bulk-cm")).autoCreate(c -> c)) .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG))); + .withService(new ClusteredStoreConfiguration(Consistency.STRONG))); try (PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true)) { @@ -160,6 +150,19 @@ public void basicClusteredBulk() throws Exception { assertThat(all.get(2L), is("two")); assertThat(all.get(3L), is("three")); + Map entries1 = new HashMap<>(); + assertThat(cache1, iterableWithSize(3)); + cache1.forEach(e -> entries1.putIfAbsent(e.getKey(), e.getValue())); + assertThat(entries1, hasEntry(1L, "one")); + assertThat(entries1, hasEntry(2L, "two")); + assertThat(entries1, hasEntry(3L, "three")); + + Map entries2 = new HashMap<>(); + assertThat(cache2, iterableWithSize(3)); + cache2.forEach(e -> entries2.putIfAbsent(e.getKey(), e.getValue())); + assertThat(entries2, hasEntry(1L, "one")); + assertThat(entries2, hasEntry(2L, "two")); + assertThat(entries2, hasEntry(3L, "three")); cache2.removeAll(keySet); all = cache1.getAll(keySet); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicEntityInteractionTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicEntityInteractionTest.java index a6068f56cb..eb5d6d9bcd 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicEntityInteractionTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicEntityInteractionTest.java @@ -15,13 +15,22 @@ */ package org.ehcache.clustered; -import java.io.File; +import java.net.URI; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntity; import org.ehcache.clustered.common.EhcacheEntityVersion; import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ClusterTierManagerConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.management.cluster.DefaultClusteringManagementService; +import org.ehcache.management.statistics.DefaultExtendedStatisticsService; import org.hamcrest.Matchers; -import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Ignore; import org.junit.Rule; @@ -34,33 +43,91 @@ import org.terracotta.testing.rules.Cluster; import static java.util.Collections.emptyMap; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.config.units.EntryUnit.ENTRIES; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; - -public class BasicEntityInteractionTest extends ClusteredTests { - private static final String RESOURCE_CONFIG = - "" - + "" - + "4" - + "" + - "\n"; +public class BasicEntityInteractionTest { @ClassRule - public static Cluster CLUSTER = newCluster().in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); + public static Cluster CLUSTER = newCluster().in(clusterPath()) + .withServiceFragment(offheapResource("primary-server-resource", 4)).build(); private ClusterTierManagerConfiguration blankConfiguration = new ClusterTierManagerConfiguration("identifier", new ServerSideConfiguration(emptyMap())); - @BeforeClass - public static void waitForActive() throws Exception { - CLUSTER.getClusterControl().waitForActive(); - } - @Rule public TestName testName= new TestName(); + @Test + public void testClusteringServiceConfigurationBuilderThrowsNPE() throws Exception { + String cacheName = "myCACHE"; + String offheap = "primary-server-resource"; + URI tsaUri = CLUSTER.getConnectionURI(); + + try (CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache(cacheName, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(100, ENTRIES) + .with(clusteredDedicated(offheap, 2, MemoryUnit.MB))) + ).with(ClusteringServiceConfigurationBuilder.cluster(tsaUri) + .autoCreate(server -> server.defaultServerResource(offheap)) + ).build(true)) { + Cache cache = cacheManager.getCache(cacheName, Long.class, String.class); + cache.put(1L, "one"); + } + + try (CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache(cacheName, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(100, ENTRIES) + .with(clusteredDedicated(offheap, 2, MemoryUnit.MB)) + ) + ).with(ClusteringServiceConfigurationBuilder.cluster(tsaUri) + ).using(new DefaultExtendedStatisticsService() + ).using(new DefaultClusteringManagementService() + ).build(true)) { + Cache cache = cacheManager.getCache(cacheName, Long.class, String.class); + cache.get(1L); + } + + } + + @Test + public void testServicesStoppedTwice() throws Exception { + String cacheName = "myCACHE"; + String offheap = "primary-server-resource"; + URI tsaUri = CLUSTER.getConnectionURI(); + + try (CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache(cacheName, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(100, ENTRIES) + .with(clusteredDedicated(offheap, 2, MemoryUnit.MB))) + ).with(ClusteringServiceConfigurationBuilder.cluster(tsaUri) + .autoCreate(server -> server.defaultServerResource(offheap)) + // manually adding the following two services should work + ).using(new DefaultExtendedStatisticsService() + ).using(new DefaultClusteringManagementService() + ).build(true)) { + Cache cache = cacheManager.getCache(cacheName, Long.class, String.class); + cache.put(1L, "one"); + } + + try (CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache(cacheName, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(100, ENTRIES) + .with(clusteredDedicated(offheap, 2, MemoryUnit.MB)) + ) + ).with(ClusteringServiceConfigurationBuilder.cluster(tsaUri) + ).build(true)) { + Cache cache = cacheManager.getCache(cacheName, Long.class, String.class); + cache.get(1L); + } + + } + @Test public void testAbsentEntityRetrievalFails() throws Throwable { try (Connection client = CLUSTER.newConnection()) { @@ -145,6 +212,7 @@ public void testPresentEntityDestroySucceeds() throws Throwable { @Test @Ignore + @SuppressWarnings("try") public void testPresentEntityDestroyBlockedByHeldReferenceSucceeds() throws Throwable { try (Connection client = CLUSTER.newConnection()) { EntityRef ref = getEntityRef(client); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java index 689d51a7a6..f828722f90 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java @@ -53,27 +53,22 @@ import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManager; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; -public class CacheManagerLifecycleEhcacheIntegrationTest extends ClusteredTests { - - private static final String RESOURCE_CONFIG = - "" - + "" - + "64" - + "" + - "\n"; +public class CacheManagerLifecycleEhcacheIntegrationTest { @ClassRule - public static Cluster CLUSTER = newCluster().in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); + public static Cluster CLUSTER = newCluster().in(clusterPath()) + .withServiceFragment(offheapResource("primary-server-resource", 64)).build(); private static Connection ASSERTION_CONNECTION; @BeforeClass public static void waitForActive() throws Exception { - CLUSTER.getClusterControl().waitForActive(); ASSERTION_CONNECTION = CLUSTER.newConnection(); } @@ -81,7 +76,7 @@ public static void waitForActive() throws Exception { public void testAutoCreatedCacheManager() throws Exception { assertEntityNotExists(ClusterTierManagerClientEntity.class, "testAutoCreatedCacheManager"); PersistentCacheManager manager = newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/testAutoCreatedCacheManager")).autoCreate().build()) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/testAutoCreatedCacheManager")).autoCreate(c -> c).build()) .build(); assertEntityNotExists(ClusterTierManagerClientEntity.class, "testAutoCreatedCacheManager"); manager.init(); @@ -112,7 +107,7 @@ public void testMultipleClientsAutoCreatingCacheManager() throws Exception { assertEntityNotExists(ClusterTierManagerClientEntity.class, "testMultipleClientsAutoCreatingCacheManager"); final CacheManagerBuilder managerBuilder = newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/testMultipleClientsAutoCreatingCacheManager")).autoCreate().build()); + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/testMultipleClientsAutoCreatingCacheManager")).autoCreate(c -> c).build()); Callable task = () -> { PersistentCacheManager manager = managerBuilder.build(); @@ -186,6 +181,10 @@ public EntityRef getEntityRef(Class cls, lo public void close() throws IOException { //no-op } + + public boolean isValid() { + return true; + } }; } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusterTierManagerClientEntityFactoryIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusterTierManagerClientEntityFactoryIntegrationTest.java index 0912e00fa6..ab470bbfd4 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusterTierManagerClientEntityFactoryIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusterTierManagerClientEntityFactoryIntegrationTest.java @@ -15,7 +15,6 @@ */ package org.ehcache.clustered; -import java.io.File; import java.io.IOException; import java.util.Collections; import java.util.Map; @@ -34,31 +33,28 @@ import org.terracotta.exception.EntityNotFoundException; import org.terracotta.testing.rules.Cluster; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNull.notNullValue; -import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; -public class ClusterTierManagerClientEntityFactoryIntegrationTest extends ClusteredTests { - private static final Map EMPTY_RESOURCE_MAP = Collections.emptyMap(); +public class ClusterTierManagerClientEntityFactoryIntegrationTest { - private static final String RESOURCE_CONFIG = - "" - + "" - + "64" - + "" + - "\n"; + private static final Map EMPTY_RESOURCE_MAP = Collections.emptyMap(); @ClassRule - public static Cluster CLUSTER = - newCluster().in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); + public static Cluster CLUSTER = newCluster().in(clusterPath()) + .withServiceFragment(offheapResource("primary", 64)).build(); private static Connection CONNECTION; @BeforeClass - public static void waitForActive() throws Exception { - CLUSTER.getClusterControl().waitForActive(); + public static void initConnection() throws Exception { CONNECTION = CLUSTER.newConnection(); } @@ -69,14 +65,14 @@ public static void closeConnection() throws IOException { @Test public void testCreate() throws Exception { - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION, Runnable::run); factory.create("testCreate", new ServerSideConfiguration(EMPTY_RESOURCE_MAP)); } @Test public void testCreateWhenExisting() throws Exception { - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION, Runnable::run); factory.create("testCreateWhenExisting", new ServerSideConfiguration(EMPTY_RESOURCE_MAP)); try { factory.create("testCreateWhenExisting", @@ -89,7 +85,7 @@ public void testCreateWhenExisting() throws Exception { @Test public void testCreateWithBadConfigCleansUp() throws Exception { - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION, Runnable::run); try { factory.create("testCreateWithBadConfigCleansUp", new ServerSideConfiguration("flargle", EMPTY_RESOURCE_MAP)); @@ -106,7 +102,7 @@ public void testCreateWithBadConfigCleansUp() throws Exception { @Test public void testRetrieveWithGoodConfig() throws Exception { - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION, Runnable::run); factory.create("testRetrieveWithGoodConfig", new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(43L, "primary")))); assertThat(factory.retrieve("testRetrieveWithGoodConfig", @@ -115,7 +111,7 @@ public void testRetrieveWithGoodConfig() throws Exception { @Test public void testRetrieveWithBadConfig() throws Exception { - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION, Runnable::run); factory.create("testRetrieveWithBadConfig", new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(42L, "primary")))); try { @@ -129,7 +125,7 @@ public void testRetrieveWithBadConfig() throws Exception { @Test public void testRetrieveWhenNotExisting() throws Exception { - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION, Runnable::run); try { factory.retrieve("testRetrieveWhenNotExisting", null); fail("Expected EntityNotFoundException"); @@ -140,53 +136,48 @@ public void testRetrieveWhenNotExisting() throws Exception { @Test public void testDestroy() throws Exception { - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION, Runnable::run); factory.create("testDestroy", new ServerSideConfiguration(Collections.emptyMap())); factory.destroy("testDestroy"); } @Test public void testDestroyWhenNotExisting() throws Exception { - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION, Runnable::run); factory.destroy("testDestroyWhenNotExisting"); } @Test public void testAbandonLeadershipWhenNotOwning() throws Exception { - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION); - try { - factory.abandonLeadership("testAbandonLeadershipWhenNotOwning"); - fail("Expected IllegalMonitorStateException"); - } catch (IllegalMonitorStateException e) { - //expected - } + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION, Runnable::run); + assertFalse(factory.abandonLeadership("testAbandonLeadershipWhenNotOwning", true)); } @Test public void testAcquireLeadershipWhenAlone() throws Exception { - ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION); + ClusterTierManagerClientEntityFactory factory = new ClusterTierManagerClientEntityFactory(CONNECTION, Runnable::run); assertThat(factory.acquireLeadership("testAcquireLeadershipWhenAlone"), is(true)); } @Test public void testAcquireLeadershipWhenTaken() throws Exception { - ClusterTierManagerClientEntityFactory factoryA = new ClusterTierManagerClientEntityFactory(CONNECTION); + ClusterTierManagerClientEntityFactory factoryA = new ClusterTierManagerClientEntityFactory(CONNECTION, Runnable::run); assertThat(factoryA.acquireLeadership("testAcquireLeadershipWhenTaken"), is(true)); try (Connection clientB = CLUSTER.newConnection()) { - ClusterTierManagerClientEntityFactory factoryB = new ClusterTierManagerClientEntityFactory(clientB); + ClusterTierManagerClientEntityFactory factoryB = new ClusterTierManagerClientEntityFactory(clientB, Runnable::run); assertThat(factoryB.acquireLeadership("testAcquireLeadershipWhenTaken"), is(false)); } } @Test public void testAcquireLeadershipAfterAbandoned() throws Exception { - ClusterTierManagerClientEntityFactory factoryA = new ClusterTierManagerClientEntityFactory(CONNECTION); + ClusterTierManagerClientEntityFactory factoryA = new ClusterTierManagerClientEntityFactory(CONNECTION, Runnable::run); factoryA.acquireLeadership("testAcquireLeadershipAfterAbandoned"); - factoryA.abandonLeadership("testAcquireLeadershipAfterAbandoned"); + assertTrue(factoryA.abandonLeadership("testAcquireLeadershipAfterAbandoned", true)); try (Connection clientB = CLUSTER.newConnection()) { - ClusterTierManagerClientEntityFactory factoryB = new ClusterTierManagerClientEntityFactory(clientB); + ClusterTierManagerClientEntityFactory factoryB = new ClusterTierManagerClientEntityFactory(clientB, Runnable::run); assertThat(factoryB.acquireLeadership("testAcquireLeadershipAfterAbandoned"), is(true)); } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusteredIterationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusteredIterationTest.java new file mode 100644 index 0000000000..704d87f9cc --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusteredIterationTest.java @@ -0,0 +1,148 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.units.MemoryUnit; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestName; +import org.terracotta.testing.rules.Cluster; + +import java.util.HashSet; +import java.util.Iterator; +import java.util.NoSuchElementException; +import java.util.Set; + +import static java.util.stream.LongStream.range; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.collection.IsIn.isIn; +import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsInstanceOf.any; +import static org.hamcrest.core.IsNull.notNullValue; +import static org.junit.Assert.fail; + +public class ClusteredIterationTest { + + @ClassRule + public static Cluster CLUSTER = newCluster().in(clusterPath()) + .withServiceFragment(offheapResource("primary-server-resource", 64)).build(); + + @Rule + public final TestName testName = new TestName(); + + @Test + public void testIterationTerminatedWithException() { + try (CacheManager cacheManager = createTestCacheManager()) { + Cache cache = cacheManager.getCache(testName.getMethodName(), Long.class, byte[].class); + + byte[] data = new byte[101 * 1024]; + cache.put(1L, data); + cache.put(2L, data); + + Iterator> iterator = cache.iterator(); + + assertThat(iterator.next(), notNullValue()); + assertThat(iterator.next(), notNullValue()); + + try { + iterator.next(); + fail("Expected NoSuchElementException"); + } catch (NoSuchElementException e) { + //expected + } + } + } + + @Test @SuppressWarnings("unchecked") + public void testIterationWithSingleLastBatchIsBroken() { + try (CacheManager cacheManager = createTestCacheManager()) { + Cache cache = cacheManager.getCache(testName.getMethodName(), Long.class, byte[].class); + + byte[] data = new byte[101 * 1024]; + cache.put(1L, data); + cache.put(2L, data); + + assertThat(cache, containsInAnyOrder( + isEntry(is(1L), any(byte[].class)), + isEntry(is(2L), any(byte[].class)) + )); + } + } + + @Test + public void testIterationWithConcurrentClearedCacheException() { + try (CacheManager cacheManager = createTestCacheManager()) { + Cache cache = cacheManager.getCache(testName.getMethodName(), Long.class, byte[].class); + + byte[] data = new byte[10 * 1024]; + Set initialKeySet = new HashSet<>(); + range(0, 20).forEach(k -> { + cache.put(k, data); + initialKeySet.add(k); + }); + + Iterator> iterator = cache.iterator(); + + cache.clear(); + + HashSet foundKeys = new HashSet<>(); + try { + while (true) { + assertThat(foundKeys.add(iterator.next().getKey()), is(true)); + } + } catch (NoSuchElementException e) { + //expected + } + foundKeys.forEach(k -> assertThat(k, isIn(initialKeySet))); + } + } + + private CacheManager createTestCacheManager() { + return newCacheManagerBuilder().with(cluster(CLUSTER.getConnectionURI().resolve("/iteration-cm")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) + .withCache(testName.getMethodName(), newCacheConfigurationBuilder(Long.class, byte[].class, newResourcePoolsBuilder() + .with(clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB)))).build(true); + } + + private static Matcher> isEntry(Matcher keyMatcher, Matcher valueMatcher) { + return new TypeSafeMatcher>() { + @Override + public void describeTo(Description description) { + description.appendText(" a cache entry { key ").appendDescriptionOf(keyMatcher).appendText(": value ").appendDescriptionOf(valueMatcher).appendText(" }"); + } + + @Override + protected boolean matchesSafely(Cache.Entry item) { + return keyMatcher.matches(item.getKey()) && valueMatcher.matches(item.getValue()); + } + }; + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusteredLoaderWriterTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusteredLoaderWriterTest.java new file mode 100644 index 0000000000..74b954a42d --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusteredLoaderWriterTest.java @@ -0,0 +1,185 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.util.TestCacheLoaderWriter; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.resilience.ThrowingResilienceStrategy; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.testing.rules.Cluster; + +import java.time.Duration; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.is; + + +@RunWith(Parameterized.class) +public class ClusteredLoaderWriterTest { + + @Parameterized.Parameters(name = "consistency={0}") + public static Consistency[] data() { + return Consistency.values(); + } + + @Parameterized.Parameter + public Consistency cacheConsistency; + + private static CacheManager cacheManager; + private Cache client1; + private CacheConfiguration configuration; + + private ConcurrentMap sor; + + @ClassRule + public static Cluster CLUSTER = newCluster().in(clusterPath()) + .withServiceFragment(offheapResource("primary-server-resource", 64)).build(); + + @BeforeClass + public static void initCacheManager() throws Exception { + cacheManager = newCacheManager(); + } + + private static PersistentCacheManager newCacheManager() { + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager1"); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + return CacheManagerBuilder.newCacheManagerBuilder() + .with(cluster(CLUSTER.getConnectionURI()) + .timeouts(TimeoutsBuilder.timeouts() + .read(Duration.ofSeconds(30)) + .write(Duration.ofSeconds(30))) + .autoCreate(c -> c) + .build()) + .using(managementRegistry) + .build(true); + } + + @Before + public void setUp() throws Exception { + + sor = new ConcurrentHashMap<>(); + configuration = getCacheConfig(); + } + + private CacheConfiguration getCacheConfig() { + return CacheConfigurationBuilder + .newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder + .heap(20) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) + .withLoaderWriter(new TestCacheLoaderWriter(sor)) + .withService(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) + .withResilienceStrategy(new ThrowingResilienceStrategy<>()) + .build(); + } + + @Test + public void testBasicOps() { + client1 = cacheManager.createCache("basicops" + cacheConsistency.name(), configuration); + assertThat(sor.isEmpty(), is(true)); + + Set keys = new HashSet<>(); + ThreadLocalRandom.current().longs(10).forEach(x -> { + keys.add(x); + client1.put(x, Long.toString(x)); + }); + + assertThat(sor.size(), is(10)); + + CacheManager anotherCacheManager = newCacheManager(); + Cache client2 = anotherCacheManager.createCache("basicops" + cacheConsistency.name(), + getCacheConfig()); + Map all = client2.getAll(keys); + assertThat(all.keySet(), containsInAnyOrder(keys.toArray())); + + keys.stream().limit(3).forEach(client2::remove); + + assertThat(sor.size(), is(7)); + } + + @Test + public void testCASOps() { + client1 = cacheManager.createCache("casops" + cacheConsistency.name(), configuration); + assertThat(sor.isEmpty(), is(true)); + + Set keys = new HashSet<>(); + ThreadLocalRandom.current().longs(10).forEach(x -> { + keys.add(x); + client1.put(x, Long.toString(x)); + }); + assertThat(sor.size(), is(10)); + + CacheManager anotherCacheManager = newCacheManager(); + Cache client2 = anotherCacheManager.createCache("casops" + cacheConsistency.name(), + getCacheConfig()); + + keys.forEach(x -> assertThat(client2.putIfAbsent(x, "Again" + x), is(Long.toString(x)))); + + assertThat(sor.size(), is(10)); + + keys.stream().limit(5).forEach(x -> + assertThat(client2.replace(x , "Replaced" + x), is(Long.toString(x)))); + + assertThat(sor.size(), is(10)); + + keys.forEach(x -> client1.remove(x, Long.toString(x))); + + assertThat(sor.size(), is(5)); + + AtomicInteger success = new AtomicInteger(0); + + keys.forEach(x -> { + if (client2.replace(x, "Replaced" + x, "Again")) { + success.incrementAndGet(); + } + }); + + assertThat(success.get(), is(5)); + + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusteredTests.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusteredTests.java deleted file mode 100644 index 63e573d6c8..0000000000 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusteredTests.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Comparator; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.fail; - -/** - * Base class for all clustered tests. It makes sure the environment is correctly configured to launch the servers. Especially - * in the IDE. - */ -public abstract class ClusteredTests { - - private static final boolean FORCE_KIT_REFRESH = false; - - static { - initInstallationPath(); - } - - private static void initInstallationPath() { - if(System.getProperty("kitInstallationPath") != null) { - return; // nothing to do, all set - } - - String currentDir = System.getProperty("user.dir"); - - // We might have the root of ehcache or in the integration-test directory - // as current working directory - String diskPrefix; - if(Paths.get(currentDir).getFileName().toString().equals("integration-test")) { - diskPrefix = ""; - } - else { - diskPrefix = "clustered/integration-test/"; - } - - String kitInstallationPath = getKitInstallationPath(diskPrefix); - - if (kitInstallationPath == null || FORCE_KIT_REFRESH) { - installKit(diskPrefix); - kitInstallationPath = getKitInstallationPath(diskPrefix); - } - - System.setProperty("kitInstallationPath", kitInstallationPath); - } - - private static void installKit(String diskPrefix) { - try { - Process process = new ProcessBuilder(diskPrefix + "../../gradlew", "copyServerLibs") - .redirectError(ProcessBuilder.Redirect.INHERIT) - .redirectOutput(ProcessBuilder.Redirect.INHERIT) - .start(); - int status = process.waitFor(); - assertThat(status).isZero(); - } catch (IOException e) { - fail("Failed to start gradle to install kit", e); - } catch (InterruptedException e) { - fail("Interrupted while installing kit", e); - } - } - - private static String getKitInstallationPath(String diskPrefix) { - String basedir = diskPrefix + "build/ehcache-kit"; - if(!new File(basedir).exists()) { - return null; - } - try { - return Files.list(Paths.get(basedir)) - .sorted(Comparator.naturalOrder().reversed()) // the last one should be the one with the highest version - .findFirst() - .map(path -> path.toAbsolutePath().normalize().toString()) - .orElse(null); - } catch (IOException e) { - fail("Failed to set kitInstallationPath from " + basedir, e); - return null; - } - } -} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusteredTestsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusteredTestsTest.java deleted file mode 100644 index 7acfa1dddd..0000000000 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/ClusteredTestsTest.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered; - -import org.junit.Test; - -import java.io.File; - -import static org.assertj.core.api.Assertions.assertThat; - -public class ClusteredTestsTest extends ClusteredTests { - - @Test - public void test() { - String value = System.getProperty("kitInstallationPath"); - assertThat(new File(value)).exists(); - assertThat(new File(value)).isAbsolute(); - assertThat(new File(value).toString()).doesNotContain(".."); - } -} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/DestroyLoopTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/DestroyLoopTest.java new file mode 100644 index 0000000000..8e13c95a3f --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/DestroyLoopTest.java @@ -0,0 +1,104 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered; + +import org.ehcache.CachePersistenceException; +import org.ehcache.PersistentCacheManager; +import org.ehcache.StateTransitionException; +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.Cluster; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.function.Supplier; + +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; + +public class DestroyLoopTest { + + private static final String CACHE_MANAGER_NAME = "/destroy-cm"; + private static final String CACHE_NAME = "clustered-cache"; + + @ClassRule + public static Cluster CLUSTER = newCluster().in(clusterPath()) + .withServiceFragment(offheapResource("primary-server-resource", 64)).build(); + + @Test + public void testDestroyLoop() throws Exception { + for (int i = 0; i < 10; i++) { + try (CacheManagerContainer cmc = new CacheManagerContainer(10, this::createCacheManager)) { + // just put in one and get from another + cmc.cacheManagerList.get(0).getCache(CACHE_NAME, Long.class, String.class).put(1L, "value"); + assertThat(cmc.cacheManagerList.get(5).getCache(CACHE_NAME, Long.class, String.class).get(1L), + is("value")); + } + destroyCacheManager(); + } + } + + private void destroyCacheManager() throws CachePersistenceException { + PersistentCacheManager cacheManager = newCacheManagerBuilder().with( + ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve(CACHE_MANAGER_NAME)) + .expecting(c -> c)).build(false); + cacheManager.destroy(); + } + + private PersistentCacheManager createCacheManager() { + CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .with(cluster(CLUSTER.getConnectionURI().resolve(CACHE_MANAGER_NAME)).autoCreate(c -> c)) + .withCache(CACHE_NAME, newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) + .withService(new ClusteredStoreConfiguration(Consistency.STRONG))); + return clusteredCacheManagerBuilder.build(true); + } + + private static class CacheManagerContainer implements AutoCloseable { + private final List cacheManagerList; + + private CacheManagerContainer(int numCacheManagers, Supplier cmSupplier) { + List cm = new ArrayList<>(); + for (int i = 0; i < numCacheManagers; i++) { + cm.add(cmSupplier.get()); + } + cacheManagerList = Collections.unmodifiableList(cm); + } + + @Override + public void close() throws StateTransitionException { + cacheManagerList.forEach(PersistentCacheManager::close); + } + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/EventsFailureBehaviorTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/EventsFailureBehaviorTest.java new file mode 100644 index 0000000000..29e55f0ba9 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/EventsFailureBehaviorTest.java @@ -0,0 +1,275 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.StateTransitionException; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.reconnect.ThrowingResiliencyStrategy; +import org.ehcache.clustered.util.ParallelTestCluster; +import org.ehcache.clustered.util.runners.Parallel; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.event.CacheEvent; +import org.ehcache.event.CacheEventListener; +import org.ehcache.event.EventType; +import org.ehcache.expiry.ExpiryPolicy; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Ignore; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestName; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.util.EnumSet; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; + +import static java.util.stream.LongStream.range; +import static org.ehcache.clustered.client.config.builders.TimeoutsBuilder.timeouts; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.ehcache.testing.StandardTimeouts.eventually; +import static org.ehcache.event.EventType.CREATED; +import static org.ehcache.event.EventType.EVICTED; +import static org.ehcache.event.EventType.EXPIRED; +import static org.ehcache.event.EventType.REMOVED; +import static org.ehcache.event.EventType.UPDATED; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isOneOf; +import static org.hamcrest.Matchers.nullValue; + + +/* + * Eventing behavior is broken across a failover due to actives and passives + * evicting independently. Until this behavior is fixed or at least detectable + * this test cannot reliably assert anything. + */ +@Ignore("Eventing is broken across failover") +@RunWith(Parallel.class) +public class EventsFailureBehaviorTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(EventsFailureBehaviorTest.class); + + private static final int KEYS = 500; + private static final Duration TIMEOUT = Duration.ofSeconds(5); + private static final Duration FAILOVER_TIMEOUT = Duration.ofMinutes(1); + + @ClassRule @Rule + public static final ParallelTestCluster CLUSTER = new ParallelTestCluster(newCluster(2).in(clusterPath()) + .withServiceFragment(offheapResource("primary-server-resource", 64)).build()); + @Rule + public final TestName testName = new TestName(); + + private PersistentCacheManager cacheManager1; + private PersistentCacheManager cacheManager2; + + @Before + public void waitForActive() throws Exception { + CLUSTER.getClusterControl().startAllServers(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve(testName.getMethodName())) + .timeouts(timeouts().read(Duration.ofSeconds(20)).write(Duration.ofSeconds(20))) + .autoCreate(s -> s.defaultServerResource("primary-server-resource"))).build(true); + + cacheManager2 = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve(testName.getMethodName())) + .timeouts(timeouts().read(Duration.ofSeconds(20)).write(Duration.ofSeconds(20))) + .autoCreate(s -> s.defaultServerResource("primary-server-resource"))).build(true); + } + + @After + public void tearDown() { + try { + try { + cacheManager1.close(); + } catch (StateTransitionException e) { + LOGGER.warn("Failed to shutdown cache manager", e); + } + } finally { + try { + cacheManager2.close(); + } catch (StateTransitionException e) { + LOGGER.warn("Failed to shutdown cache manager", e); + } + } + } + + private static Cache createCache(CacheManager cacheManager, CacheEventListener cacheEventListener, ExpiryPolicy expiryPolicy) { + return cacheManager.createCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, byte[].class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .withResilienceStrategy(new ThrowingResiliencyStrategy<>()) + .withService(CacheEventListenerConfigurationBuilder + .newEventListenerConfiguration(cacheEventListener, EnumSet.allOf(EventType.class)) + .unordered().asynchronous()) + .withExpiry(expiryPolicy) + .build()); + } + + private void failover(Cache cache1, Cache cache2) throws Exception { + // failover passive -> active + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.getClusterControl().terminateActive(); + + // wait for clients to be back in business + assertThat(() -> { + try { + cache1.replace(1L, new byte[0], new byte[0]); + cache2.replace(1L, new byte[0], new byte[0]); + return true; + } catch (Exception e) { + return false; + } + }, eventually().is(true)); + } + + @Test @SuppressWarnings("unchecked") + public void testEventsFailover() throws Exception { + AccountingCacheEventListener accountingCacheEventListener1 = new AccountingCacheEventListener<>(); + Cache cache1 = createCache(cacheManager1, accountingCacheEventListener1, ExpiryPolicyBuilder.noExpiration()); + AccountingCacheEventListener accountingCacheEventListener2 = new AccountingCacheEventListener<>(); + Cache cache2 = createCache(cacheManager2, accountingCacheEventListener2, ExpiryPolicyBuilder.noExpiration()); + + + byte[] value = new byte[10 * 1024]; + + range(0, KEYS).forEach(k -> { + cache1.put(k, value); + }); + eventually().runsCleanly(() -> range(0, KEYS).forEach(k -> { + if (cache1.containsKey(k)) { + assertThat(accountingCacheEventListener1.events, hasEntry(is(k), containsInAnyOrder(CREATED))); + assertThat(accountingCacheEventListener2.events, hasEntry(is(k), containsInAnyOrder(accountingCacheEventListener1.events.get(k).toArray()))); + } else { + assertThat(accountingCacheEventListener1.events, hasEntry(is(k), containsInAnyOrder(CREATED, EVICTED))); + assertThat(accountingCacheEventListener2.events, hasEntry(is(k), containsInAnyOrder(accountingCacheEventListener1.events.get(k).toArray()))); + } + })); + + // failover passive -> active + failover(cache1, cache2); + + range(0, KEYS).forEach(k -> { + cache1.put(k, value); + }); + eventually().runsCleanly(() -> range(0, KEYS).forEach(k -> { + if (cache1.containsKey(k)) { + assertThat(accountingCacheEventListener1.events, hasEntry(is(k), + either(containsInAnyOrder(CREATED, UPDATED)) + .or(containsInAnyOrder(CREATED, EVICTED, CREATED)))); + assertThat(accountingCacheEventListener2.events, hasEntry(is(k), containsInAnyOrder(accountingCacheEventListener1.events.get(k).toArray()))); + } else { + assertThat(accountingCacheEventListener1.events, hasEntry(is(k), + either(containsInAnyOrder(CREATED, UPDATED, EVICTED)) + .or(containsInAnyOrder(CREATED, EVICTED, CREATED, EVICTED)))); + assertThat(accountingCacheEventListener2.events, hasEntry(is(k), containsInAnyOrder(accountingCacheEventListener1.events.get(k).toArray()))); + } + })); + + range(0, KEYS).forEach(cache1::remove); + eventually().runsCleanly(() -> range(0, KEYS).forEach(k -> { + assertThat(accountingCacheEventListener1.events, hasEntry(is(k), + either(containsInAnyOrder(CREATED, UPDATED, REMOVED)) + .or(containsInAnyOrder(CREATED, EVICTED, CREATED, REMOVED)) + .or(containsInAnyOrder(CREATED, UPDATED, EVICTED)) + .or(containsInAnyOrder(CREATED, EVICTED, CREATED, EVICTED)))); + assertThat(accountingCacheEventListener2.events, hasEntry(is(k), containsInAnyOrder(accountingCacheEventListener1.events.get(k).toArray()))); + })); + + range(KEYS, KEYS * 2).forEach(k -> { + cache1.put(k, value); + }); + eventually().runsCleanly(() -> range(KEYS, KEYS * 2).forEach(k -> { + if (cache1.containsKey(k)) { + assertThat(accountingCacheEventListener1.events, hasEntry(is(k), containsInAnyOrder(CREATED))); + assertThat(accountingCacheEventListener2.events, hasEntry(is(k), containsInAnyOrder(accountingCacheEventListener1.events.get(k).toArray()))); + } else { + assertThat(accountingCacheEventListener1.events, hasEntry(is(k), containsInAnyOrder(CREATED, EVICTED))); + assertThat(accountingCacheEventListener2.events, hasEntry(is(k), containsInAnyOrder(accountingCacheEventListener1.events.get(k).toArray()))); + } + })); + } + + @Test @SuppressWarnings("unchecked") + public void testExpirationFailover() throws Exception { + AccountingCacheEventListener accountingCacheEventListener1 = new AccountingCacheEventListener<>(); + Cache cache1 = createCache(cacheManager1, accountingCacheEventListener1, ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1))); + AccountingCacheEventListener accountingCacheEventListener2 = new AccountingCacheEventListener<>(); + Cache cache2 = createCache(cacheManager2, accountingCacheEventListener2, ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1))); + + + byte[] value = new byte[10 * 1024]; + + range(0, KEYS).forEach(k -> cache1.put(k, value)); + + eventually().runsCleanly(() -> range(0, KEYS).forEach(k -> { + if (cache1.containsKey(k)) { + assertThat(accountingCacheEventListener1.events, hasEntry(is(k), containsInAnyOrder(CREATED))); + assertThat(accountingCacheEventListener2.events, hasEntry(is(k), containsInAnyOrder(accountingCacheEventListener1.events.get(k).toArray()))); + } else { + assertThat(accountingCacheEventListener1.events, hasEntry(is(k), containsInAnyOrder(is(CREATED), isOneOf(EVICTED, EXPIRED)))); + //assertThat(accountingCacheEventListener2.events, hasEntry(is(k), containsInAnyOrder(accountingCacheEventListener1.events.get(k).toArray()))); + assertThat(accountingCacheEventListener2.events, hasEntry(is(k), containsInAnyOrder(is(CREATED), isOneOf(EVICTED, EXPIRED)))); + } + })); + + // failover passive -> active + failover(cache1, cache2); + + range(0, KEYS).forEach(k -> { + assertThat(cache1.get(k), is(nullValue())); + }); + + eventually().runsCleanly(() -> range(0, KEYS).forEach(k -> { + assertThat(accountingCacheEventListener1.events, hasEntry(is(k), containsInAnyOrder(is(CREATED), isOneOf(EVICTED, EXPIRED)))); + //assertThat(accountingCacheEventListener2.events, hasEntry(is(k), containsInAnyOrder(accountingCacheEventListener1.events.get(k).toArray()))); + assertThat(accountingCacheEventListener2.events, hasEntry(is(k), containsInAnyOrder(is(CREATED), isOneOf(EVICTED, EXPIRED)))); + })); + } + + + + static class AccountingCacheEventListener implements CacheEventListener { + private final Map> events = new ConcurrentHashMap<>(); + + @Override + public void onEvent(CacheEvent event) { + events.computeIfAbsent(event.getKey(), key -> new CopyOnWriteArrayList<>()).add(event.getType()); + } + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/IterationFailureBehaviorTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/IterationFailureBehaviorTest.java new file mode 100644 index 0000000000..5386e8432e --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/IterationFailureBehaviorTest.java @@ -0,0 +1,191 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered; + +import org.ehcache.Cache; +import org.ehcache.CacheIterationException; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.store.ServerStoreProxyException; +import org.ehcache.clustered.common.internal.exceptions.InvalidOperationException; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.spi.resilience.StoreAccessException; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.terracotta.exception.ConnectionClosedException; +import org.terracotta.testing.rules.Cluster; +import org.terracotta.utilities.test.rules.TestRetryer; + +import java.time.Duration; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import static java.time.Duration.ofSeconds; +import static java.util.function.Function.identity; +import static java.util.stream.Collectors.toMap; +import static java.util.stream.LongStream.range; +import static org.ehcache.clustered.client.config.builders.TimeoutsBuilder.timeouts; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.leaseLength; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.core.IsNull.notNullValue; +import static org.junit.Assert.fail; + +import static org.terracotta.utilities.test.rules.TestRetryer.OutputIs.CLASS_RULE; + +public class IterationFailureBehaviorTest { + + private static final int KEYS = 100; + + @ClassRule @Rule + public static final TestRetryer CLUSTER = TestRetryer.tryValues(ofSeconds(1), ofSeconds(10), ofSeconds(30)) + .map(leaseLength -> newCluster(2).in(clusterPath()).withServiceFragment( + offheapResource("primary-server-resource", 64) + leaseLength(leaseLength)).build()) + .outputIs(CLASS_RULE); + + @Before + public void startAllServers() throws Exception { + CLUSTER.get().getClusterControl().startAllServers(); + } + + @Test + public void testIteratorFailover() throws Exception { + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/iterator-cm")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource")) + .timeouts(timeouts().read(ofSeconds(10)))); + try (PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { + CacheConfiguration smallConfig = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))).build(); + + Cache smallCache = cacheManager.createCache("small-cache", smallConfig); + range(0, KEYS).forEach(k -> smallCache.put(k, Long.toString(k))); + + CacheConfiguration largeConfig = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, byte[].class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))).build(); + + Cache largeCache = cacheManager.createCache("large-cache", largeConfig); + byte[] value = new byte[10 * 1024]; + range(0, KEYS).forEach(k -> { + largeCache.put(k, value); + }); + + Map smallMap = new HashMap<>(); + + Iterator> smallIterator = smallCache.iterator(); + Cache.Entry smallNext = smallIterator.next(); + smallMap.put(smallNext.getKey(), smallNext.getValue()); + + Iterator> largeIterator = largeCache.iterator(); + Cache.Entry largeNext = largeIterator.next(); + assertThat(largeCache.get(largeNext.getKey()), notNullValue()); + + CLUSTER.get().getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.get().getClusterControl().terminateActive(); + + //large iterator fails + try { + largeIterator.forEachRemaining(k -> {}); + fail("Expected CacheIterationException"); + } catch (CacheIterationException e) { + assertThat(e.getCause(), instanceOf(StoreAccessException.class)); + assertThat(e.getCause().getCause(), instanceOf(ServerStoreProxyException.class)); + assertThat(e.getCause().getCause().getCause(), + either(instanceOf(ConnectionClosedException.class)) //lost in the space between active and passive + .or(instanceOf(InvalidOperationException.class))); //picked up by the passive - it doesn't have our iterator + } + + //small iterator completes... it fetched the entire batch in one shot + smallIterator.forEachRemaining(k -> smallMap.put(k.getKey(), k.getValue())); + + assertThat(smallMap, is(range(0, KEYS).boxed().collect(toMap(identity(), k -> Long.toString(k))))); + } + } + + @Test + public void testIteratorReconnect() throws Exception { + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/iterator-cm")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); + try (PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { + CacheConfiguration smallConfig = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))).build(); + + Cache smallCache = cacheManager.createCache("small-cache", smallConfig); + range(0, KEYS).forEach(k -> smallCache.put(k, Long.toString(k))); + + CacheConfiguration largeConfig = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, byte[].class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))).build(); + + Cache largeCache = cacheManager.createCache("large-cache", largeConfig); + byte[] value = new byte[10 * 1024]; + range(0, KEYS).forEach(k -> { + largeCache.put(k, value); + }); + + Map smallMap = new HashMap<>(); + + Iterator> smallIterator = smallCache.iterator(); + Cache.Entry smallNext = smallIterator.next(); + smallMap.put(smallNext.getKey(), smallNext.getValue()); + + Iterator> largeIterator = largeCache.iterator(); + Cache.Entry largeNext = largeIterator.next(); + assertThat(largeCache.get(largeNext.getKey()), notNullValue()); + + CLUSTER.get().getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.get().getClusterControl().terminateAllServers(); + Thread.sleep(CLUSTER.input().multipliedBy(2L).toMillis()); + CLUSTER.get().getClusterControl().startAllServers(); + + //large iterator fails + try { + largeIterator.forEachRemaining(k -> {}); + fail("Expected CacheIterationException"); + } catch (CacheIterationException e) { + assertThat(e.getCause(), instanceOf(StoreAccessException.class)); + assertThat(e.getCause().getCause(), instanceOf(ServerStoreProxyException.class)); + assertThat(e.getCause().getCause().getCause(), + either(instanceOf(ConnectionClosedException.class)) //lost in the space between the two cluster executions + .or(instanceOf(InvalidOperationException.class))); //picked up by the new cluster - it doesn't have our iterator + } + + //small iterator completes... it fetched the entire batch in one shot + smallIterator.forEachRemaining(k -> smallMap.put(k.getKey(), k.getValue())); + + assertThat(smallMap, is(range(0, KEYS).boxed().collect(toMap(identity(), k -> Long.toString(k))))); + } + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java index 0d8474405f..60959d9c5a 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java @@ -16,45 +16,64 @@ package org.ehcache.clustered; +import org.ehcache.testing.ExternalTests; +import org.jsr107.tck.event.CacheListenerTest; +import org.jsr107.tck.spi.CachingProviderTest; +import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; -import org.junit.Test; +import org.junit.runner.RunWith; import org.terracotta.testing.rules.Cluster; -import java.io.File; -import java.net.URL; - import javax.cache.Caching; import javax.cache.spi.CachingProvider; +import java.net.URL; +import java.util.Properties; import static org.ehcache.clustered.CacheManagerLifecycleEhcacheIntegrationTest.substitute; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; + /** - * JCacheClusteredTest + * JCacheClusteredTest - runs the TCK test suite using clustered caches */ -public class JCacheClusteredTest extends ClusteredTests { - - private static final String RESOURCE_CONFIG = - "" - + "" - + "64" - + "" + - "\n"; +@RunWith(ExternalTests.class) +@ExternalTests.From(javax.cache.CachingTest.class) +@ExternalTests.Ignore(value=CachingProviderTest.class, method="getCacheManagerUsingDefaultURI") +@ExternalTests.Ignore(value= CacheListenerTest.class) +public class JCacheClusteredTest { + private static final Properties TCK_PROPERTIES = new Properties(); + static { + TCK_PROPERTIES.setProperty("java.net.preferIPv4Stack", "true"); + TCK_PROPERTIES.setProperty("javax.management.builder.initial", "org.ehcache.jsr107.internal.tck.Eh107MBeanServerBuilder"); + TCK_PROPERTIES.setProperty("org.jsr107.tck.management.agentId", "Eh107MBeanServer"); + TCK_PROPERTIES.setProperty("javax.cache.CacheManager", "org.ehcache.CacheManager"); + TCK_PROPERTIES.setProperty("javax.cache.Cache", "org.ehcache.Cache"); + TCK_PROPERTIES.setProperty("javax.cache.Cache.Entry", "org.ehcache.Cache$Entry"); + TCK_PROPERTIES.setProperty("javax.cache.annotation.CacheInvocationContext", "javax.cache.annotation.impl.cdi.CdiCacheKeyInvocationContextImpl"); + } @ClassRule - public static Cluster CLUSTER = newCluster().in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); + public static Cluster CLUSTER = newCluster().in(clusterPath()) + .withServiceFragment(offheapResource("primary", 256)).build(); @BeforeClass - public static void waitForActive() throws Exception { - CLUSTER.getClusterControl().waitForActive(); - } - - @Test - public void testJCacheClustered() throws Exception { + public static void configureEnvironment() throws Exception { URL xml = CacheManagerLifecycleEhcacheIntegrationTest.class.getResource("/configs/jcache-clustered.xml"); URL substitutedXml = substitute(xml, "cluster-uri", CLUSTER.getConnectionURI().toString()); - CachingProvider cachingProvider = Caching.getCachingProvider(); - cachingProvider.getCacheManager(substitutedXml.toURI(), getClass().getClassLoader()); + System.setProperty("ehcache.jsr107.config.default", substitutedXml.toURI().toString()); + TCK_PROPERTIES.forEach((k, v) -> System.setProperty(k.toString(), v.toString())); + } + + @AfterClass + public static void cleanup() { + try { + Caching.getCachingProviders().forEach(CachingProvider::close); + } finally { + System.clearProperty("ehcache.jsr107.config.default"); + TCK_PROPERTIES.forEach((k, v) -> System.clearProperty(k.toString())); + } } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/LeaseTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/LeaseTest.java new file mode 100644 index 0000000000..1e2c1e541e --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/LeaseTest.java @@ -0,0 +1,116 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; +import org.ehcache.clustered.util.TCPProxyManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.testing.rules.Cluster; +import org.terracotta.utilities.test.rules.TestRetryer; + +import java.net.URI; +import java.time.Duration; + +import static java.time.Duration.ofSeconds; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.leaseLength; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.ehcache.testing.StandardTimeouts.eventually; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; + +import static org.terracotta.utilities.test.rules.TestRetryer.OutputIs.CLASS_RULE; +import static org.terracotta.utilities.test.rules.TestRetryer.tryValues; + +@RunWith(Parameterized.class) +public class LeaseTest { + + @ClassRule + @Rule + public static final TestRetryer CLUSTER = tryValues(ofSeconds(1), ofSeconds(10), ofSeconds(30)) + .map(leaseLength -> newCluster().in(clusterPath()).withServiceFragment( + offheapResource("primary-server-resource", 64) + leaseLength(leaseLength)).build()) + .outputIs(CLASS_RULE); + + @Parameterized.Parameters + public static ResourcePoolsBuilder[] data() { + return new ResourcePoolsBuilder[]{ + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB)), + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .with(clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB)) + }; + } + + @Parameterized.Parameter + public ResourcePoolsBuilder resourcePoolsBuilder; + + @Test + public void leaseExpiry() throws Exception { + try (TCPProxyManager proxyManager = TCPProxyManager.create(CLUSTER.get().getConnectionURI())) { + URI connectionURI = proxyManager.getURI(); + + CacheManagerBuilder clusteredCacheManagerBuilder = newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(connectionURI.resolve("/crud-cm")) + .timeouts(TimeoutsBuilder.timeouts().connection(Duration.ofSeconds(20))) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); + PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); + cacheManager.init(); + + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + resourcePoolsBuilder).build(); + + Cache cache = cacheManager.createCache("clustered-cache", config); + cache.put(1L, "The one"); + cache.put(2L, "The two"); + cache.put(3L, "The three"); + assertThat(cache.get(1L), equalTo("The one")); + assertThat(cache.get(2L), equalTo("The two")); + assertThat(cache.get(3L), equalTo("The three")); + + long delay = CLUSTER.input().plusSeconds(1L).toMillis(); + proxyManager.setDelay(delay); + try { + Thread.sleep(delay); + } finally { + proxyManager.setDelay(0); + } + + eventually().runsCleanly(() -> { + assertThat(cache.get(1L), equalTo("The one")); + assertThat(cache.get(2L), equalTo("The two")); + assertThat(cache.get(3L), equalTo("The three")); + }); + } + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/NoOffheapTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/NoOffheapTest.java new file mode 100644 index 0000000000..9821243cb8 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/NoOffheapTest.java @@ -0,0 +1,55 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered; + +import org.ehcache.StateTransitionException; +import org.ehcache.config.units.MemoryUnit; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.Cluster; + +import static java.util.function.UnaryOperator.identity; +import static org.assertj.core.api.Assertions.assertThat; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.junit.Assert.fail; + + +public class NoOffheapTest { + + @ClassRule + public static Cluster CLUSTER = newCluster().in(clusterPath()).build(); + + @Test + public void testNoOffheap() throws InterruptedException { + try { + newCacheManagerBuilder().with(cluster(CLUSTER.getConnectionURI().resolve("/no-offheap-cm")) + .autoCreate(identity())) + .withCache("testNoOffheap", newCacheConfigurationBuilder(Long.class, byte[].class, newResourcePoolsBuilder() + .with(clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB)) + )).build(true).close(); + fail(); + } catch (StateTransitionException e) { + assertThat(e).hasMessage("Could not create the cluster tier manager 'no-offheap-cm'."); + } + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/OversizedCacheOpsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/OversizedCacheOpsTest.java new file mode 100644 index 0000000000..9ceb14c2a3 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/OversizedCacheOpsTest.java @@ -0,0 +1,81 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.Cluster; + +import java.util.Arrays; + +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class OversizedCacheOpsTest { + + @ClassRule + public static Cluster CLUSTER = newCluster().in(clusterPath()) + .withServiceFragment(offheapResource("primary-server-resource", 2)).build(); + + @Test + public void overSizedCacheOps() throws Exception { + CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); + + try (PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))).build(); + + Cache cache = cacheManager.createCache("clustered-cache", config); + cache.put(1L, "The one"); + cache.put(2L, "The two"); + cache.put(1L, "Another one"); + cache.put(3L, "The three"); + assertThat(cache.get(1L), equalTo("Another one")); + assertThat(cache.get(2L), equalTo("The two")); + assertThat(cache.get(3L), equalTo("The three")); + cache.put(1L, buildLargeString(2)); + assertThat(cache.get(1L), is(nullValue())); + // ensure others are not evicted + assertThat(cache.get(2L), equalTo("The two")); + assertThat(cache.get(3L), equalTo("The three")); + } + } + + private String buildLargeString(int sizeInMB) { + char[] filler = new char[sizeInMB * 1024 * 1024]; + Arrays.fill(filler, '0'); + return new String(filler); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/ReconnectDuringDestroyTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/ReconnectDuringDestroyTest.java new file mode 100644 index 0000000000..12867958d1 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/ReconnectDuringDestroyTest.java @@ -0,0 +1,203 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntity; +import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLock; +import org.ehcache.clustered.client.service.EntityBusyException; +import org.ehcache.clustered.common.internal.ClusterTierManagerConfiguration; +import org.ehcache.clustered.reconnect.ThrowingResiliencyStrategy; +import org.ehcache.clustered.util.TCPProxyManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.terracotta.connection.Connection; +import org.terracotta.connection.entity.EntityRef; +import org.terracotta.exception.EntityNotFoundException; +import org.terracotta.lease.connection.LeasedConnectionFactory; +import org.terracotta.testing.rules.Cluster; + +import org.terracotta.utilities.test.rules.TestRetryer; + +import java.time.Duration; +import java.util.Properties; + +import static java.time.Duration.ofSeconds; +import static org.ehcache.clustered.common.EhcacheEntityVersion.ENTITY_VERSION; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.leaseLength; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.ehcache.testing.StandardTimeouts.eventually; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; + +import static org.terracotta.utilities.test.rules.TestRetryer.OutputIs.CLASS_RULE; +import static org.terracotta.utilities.test.rules.TestRetryer.tryValues; + +/** + * ReconnectDuringDestroyTest + */ +public class ReconnectDuringDestroyTest { + + private static TCPProxyManager proxyManager; + PersistentCacheManager cacheManager; + + @ClassRule @Rule + public static final TestRetryer CLUSTER = tryValues(ofSeconds(1), ofSeconds(10), ofSeconds(3)) + .map(leaseLength -> newCluster().in(clusterPath()).withServiceFragment( + offheapResource("primary-server-resource", 64) + leaseLength(leaseLength)).build()) + .outputIs(CLASS_RULE); + + @BeforeClass + public static void initializeProxy() throws Exception { + proxyManager = TCPProxyManager.create(CLUSTER.get().getConnectionURI()); + } + + @AfterClass + public static void closeProxy() { + proxyManager.close(); + } + + @Before + public void initializeCacheManager() { + ClusteringServiceConfiguration clusteringConfiguration = + ClusteringServiceConfigurationBuilder.cluster(proxyManager.getURI().resolve("/crud-cm")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource")).build(); + + CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder().with(clusteringConfiguration); + cacheManager = clusteredCacheManagerBuilder.build(false); + cacheManager.init(); + } + + /* + This is to test the scenario in which reconnect happens while cache manager + destruction is in progress. This test checks whether the cache manager + gets destructed properly in the reconnect path once the connection is closed + after the prepareForDestroy() call. + */ + @Test + public void reconnectDuringDestroyTest() throws Exception { + cacheManager.close(); + Connection client = null; + try { + client = LeasedConnectionFactory.connect(proxyManager.getURI(), new Properties()); + VoltronReadWriteLock voltronReadWriteLock = new VoltronReadWriteLock(client, "crud-cm"); + try (VoltronReadWriteLock.Hold localMaintenance = voltronReadWriteLock.tryWriteLock()) { + if (localMaintenance == null) { + throw new EntityBusyException("Unable to obtain maintenance lease for " + "crud-cm"); + } + EntityRef ref = getEntityRef(client); + try { + ClusterTierManagerClientEntity entity = ref.fetchEntity(null); + entity.prepareForDestroy(); + entity.close(); + } catch (EntityNotFoundException e) { + Assert.fail(); + } + } + // For reconnection. + long delay = CLUSTER.input().plusSeconds(1).toMillis(); + proxyManager.setDelay(delay); + try { + Thread.sleep(delay); + } finally { + proxyManager.setDelay(0); + } + client = LeasedConnectionFactory.connect(proxyManager.getURI(), new Properties()); + + // For mimicking the cacheManager.destroy() in the reconnect path. + voltronReadWriteLock = new VoltronReadWriteLock(client, "crud-cm"); + try (VoltronReadWriteLock.Hold localMaintenance = voltronReadWriteLock.tryWriteLock()) { + if (localMaintenance == null) { + throw new EntityBusyException("Unable to obtain maintenance lease for " + "crud-cm"); + } + EntityRef ref = getEntityRef(client); + try { + ClusterTierManagerClientEntity entity = ref.fetchEntity(null); + entity.prepareForDestroy(); + entity.close(); + } catch (EntityNotFoundException e) { + Assert.fail("Unexpected exception " + e.getMessage()); + } + if (!ref.destroy()) { + Assert.fail("Unexpected exception while trying to destroy cache manager"); + } + } + } finally { + if (client != null) { + client.close(); + } + } + } + + @Test + public void reconnectAfterDestroyOneOfTheCache() throws Exception { + try { + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder. + clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) + .withResilienceStrategy(new ThrowingResiliencyStrategy<>()) + .build(); + Cache cache1 = cacheManager.createCache("clustered-cache-1", config); + Cache cache2 = cacheManager.createCache("clustered-cache-2", config); + cache1.put(1L, "The one"); + cache1.put(2L, "The two"); + cache2.put(1L, "The one"); + cache2.put(2L, "The two"); + cacheManager.destroyCache("clustered-cache-1"); + + // For reconnection. + long delay = CLUSTER.input().plusSeconds(1L).toMillis(); + proxyManager.setDelay(delay); + try { + Thread.sleep(delay); + } finally { + proxyManager.setDelay(0); + } + + Cache cache2Again = cacheManager.getCache("clustered-cache-2", Long.class, String.class); + eventually().runsCleanly(() -> { + assertThat(cache2Again.get(1L), equalTo("The one")); + assertThat(cache2Again.get(2L), equalTo("The two")); + }); + cache2Again.put(3L, "The three"); + assertThat(cache2Again.get(3L), equalTo("The three")); + } finally { + cacheManager.close(); + } + } + + private EntityRef getEntityRef(Connection client) throws org.terracotta.exception.EntityNotProvidedException { + return client.getEntityRef(ClusterTierManagerClientEntity.class, ENTITY_VERSION, "crud-cm"); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/ResourcePoolAllocationFailureTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/ResourcePoolAllocationFailureTest.java index d067541f17..8799fc36eb 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/ResourcePoolAllocationFailureTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/ResourcePoolAllocationFailureTest.java @@ -16,48 +16,35 @@ package org.ehcache.clustered; -import org.ehcache.CachePersistenceException; +import org.ehcache.clustered.client.internal.PerpetualCachePersistenceException; import org.ehcache.PersistentCacheManager; import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; import org.ehcache.clustered.client.config.DedicatedClusteredResourcePool; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; -import org.ehcache.clustered.client.config.builders.ServerSideConfigurationBuilder; import org.ehcache.clustered.common.Consistency; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; -import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.terracotta.testing.rules.Cluster; -import java.io.File; - +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; - -public class ResourcePoolAllocationFailureTest extends ClusteredTests { - private static final String RESOURCE_CONFIG = - "" - + "" - + "64" - + "" + - "\n"; +public class ResourcePoolAllocationFailureTest { @ClassRule - public static Cluster CLUSTER = - newCluster().in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); - - @BeforeClass - public static void waitForActive() throws Exception { - CLUSTER.getClusterControl().waitForActive(); - } + public static Cluster CLUSTER = newCluster().in(clusterPath()) + .withServiceFragment(offheapResource("primary-server-resource", 64)).build(); @Test public void testTooLowResourceException() throws InterruptedException { @@ -69,7 +56,7 @@ public void testTooLowResourceException() throws InterruptedException { cacheManagerBuilder.build(true); fail("InvalidServerStoreConfigurationException expected"); } catch (Exception e) { - Throwable cause = getCause(e, CachePersistenceException.class); + Throwable cause = getCause(e, PerpetualCachePersistenceException.class); assertThat(cause, notNullValue()); assertThat(cause.getMessage(), startsWith("Unable to create")); } @@ -85,15 +72,14 @@ public void testTooLowResourceException() throws InterruptedException { private CacheManagerBuilder getPersistentCacheManagerCacheManagerBuilder(DedicatedClusteredResourcePool resourcePool) { ClusteringServiceConfigurationBuilder clusteringServiceConfigurationBuilder = ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm")); - ServerSideConfigurationBuilder serverSideConfigurationBuilder = clusteringServiceConfigurationBuilder.autoCreate() - .defaultServerResource("primary-server-resource"); + ClusteringServiceConfiguration clusteringConfiguration = clusteringServiceConfigurationBuilder.autoCreate(server -> server.defaultServerResource("primary-server-resource")).build(); return CacheManagerBuilder.newCacheManagerBuilder() - .with(serverSideConfigurationBuilder) + .with(clusteringConfiguration) .withCache("test-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(resourcePool) - ).add(new ClusteredStoreConfiguration(Consistency.EVENTUAL))); + ).withService(new ClusteredStoreConfiguration(Consistency.EVENTUAL))); } private static Throwable getCause(Throwable e, Class causeClass) { diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java index d5a23b64a2..793d03afcf 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java @@ -16,22 +16,23 @@ package org.ehcache.clustered; -import com.google.code.tempusfugit.concurrency.ConcurrentTestRunner; +import org.assertj.core.api.ThrowableAssertAlternative; import org.ehcache.Cache; import org.ehcache.CacheManager; import org.ehcache.CachePersistenceException; -import org.ehcache.Diagnostics; import org.ehcache.PersistentCacheManager; import org.ehcache.StateTransitionException; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; +import org.ehcache.clustered.util.ParallelTestCluster; +import org.ehcache.clustered.util.runners.Parallel; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.spi.store.StoreAccessTimeoutException; -import org.hamcrest.Matchers; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -39,45 +40,37 @@ import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExternalResource; -import org.junit.rules.RuleChain; import org.junit.rules.TestName; -import org.junit.rules.TestRule; -import org.junit.runner.Description; import org.junit.runner.RunWith; -import org.junit.runners.model.Statement; -import org.terracotta.connection.ConnectionException; -import org.terracotta.testing.rules.Cluster; +import org.terracotta.utilities.test.Diagnostics; import com.tc.net.protocol.transport.ClientMessageTransport; import com.tc.properties.TCProperties; import com.tc.properties.TCPropertiesConsts; import com.tc.properties.TCPropertiesImpl; +import org.terracotta.utilities.test.rules.TestRetryer; -import java.io.File; import java.time.Duration; import java.time.temporal.ChronoUnit; -import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.concurrent.Future; import java.util.concurrent.FutureTask; -import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.junit.Assume.assumeNoException; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; +import static java.time.Duration.ofSeconds; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; + +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.leaseLength; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.terracotta.utilities.test.rules.TestRetryer.OutputIs.CLASS_RULE; +import static org.terracotta.utilities.test.rules.TestRetryer.OutputIs.RULE; +import static org.terracotta.utilities.test.rules.TestRetryer.tryValues; /** * Provides integration tests in which the server is terminated before the Ehcache operation completes. @@ -91,44 +84,10 @@ // and stopping a server for each test. Each test and the environment supporting it must have // no side effects which can affect another test. // ============================================================================================= -@RunWith(ConcurrentTestRunner.class) -public class TerminatedServerTest extends ClusteredTests { - - /** - * Determines the level of test concurrency. The number of allowed concurrent tests - * is set in {@link #setConcurrency()}. - */ - private static final Semaphore TEST_PERMITS = new Semaphore(0); - - @ClassRule - public static final TestCounter TEST_COUNTER = new TestCounter(); - - @BeforeClass - public static void setConcurrency() { - int availableProcessors = Runtime.getRuntime().availableProcessors(); - int testCount = TEST_COUNTER.getTestCount(); - /* - * Some build environments can't reliably handle running tests in this class concurrently. - * If the 'disable.concurrent.tests' system property is 'true', restrict the tests to - * single operation using a single test permit. - */ - boolean disableConcurrentTests = Boolean.getBoolean("disable.concurrent.tests"); - if (disableConcurrentTests) { - TEST_PERMITS.release(1); - } else { - TEST_PERMITS.release(Math.min(Math.max(1, testCount / 2), availableProcessors)); - } - System.out.format("TerminatedServerTest:" + - " disableConcurrentTests=%b, testCount=%d, availableProcessors=%d, TEST_PERMITS.availablePermits()=%d%n", - disableConcurrentTests, testCount, availableProcessors, TEST_PERMITS.availablePermits()); - } +@RunWith(Parallel.class) +public class TerminatedServerTest { - private static final String RESOURCE_CONFIG = - "" - + "" - + "64" - + "" + - "\n"; + private static final int CLIENT_MAX_PENDING_REQUESTS = 5; private static Map OLD_PROPERTIES; @@ -148,6 +107,9 @@ public static void setProperties() { overrideProperty(oldProperties, TCPropertiesConsts.L1_SHUTDOWN_THREADGROUP_GRACETIME, "1000"); overrideProperty(oldProperties, TCPropertiesConsts.TC_TRANSPORT_HANDSHAKE_TIMEOUT, "1000"); + // Used only by testTerminationFreezesTheClient to be able to fill the inflight queue + overrideProperty(oldProperties, TCPropertiesConsts.CLIENT_MAX_PENDING_REQUESTS, Integer.toString(CLIENT_MAX_PENDING_REQUESTS)); + OLD_PROPERTIES = oldProperties; } @@ -161,34 +123,24 @@ public static void restoreProperties() { } } - private static Cluster createCluster() { - try { - return newCluster().in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); - } catch (IllegalArgumentException e) { - assumeNoException(e); - return null; - } + private ThrowableAssertAlternative assertExceptionOccurred(Class exception, TimeLimitedTask task) { + return assertThatExceptionOfType(exception) + .isThrownBy(() -> task.run()); } - @Rule - public final TestName testName = new TestName(); - - // Included in 'ruleChain' below. - private final Cluster cluster = createCluster(); + @ClassRule @Rule + public static final TestRetryer CLUSTER = tryValues(ofSeconds(2), ofSeconds(10), ofSeconds(30)) + .map(leaseLength -> new ParallelTestCluster( + newCluster().in(clusterPath()).withServiceFragment( + offheapResource("primary-server-resource", 64) + leaseLength(leaseLength)).build())) + .outputIs(CLASS_RULE, RULE); - - // The TestRule.apply method is called on the inner-most Rule first with the result being passed to each - // successively outer rule until the outer-most rule is reached. For ExternalResource rules, the before - // method of each rule is called from outer-most rule to inner-most rule; the after method is called from - // inner-most to outer-most. @Rule - public final RuleChain ruleChain = RuleChain - .outerRule(new TestConcurrencyLimiter()) - .around(cluster); + public final TestName testName = new TestName(); @Before - public void waitForActive() throws Exception { - cluster.getClusterControl().waitForActive(); + public void startAllServers() throws Exception { + CLUSTER.get().getClusterControl().startAllServers(); } /** @@ -198,15 +150,14 @@ public void waitForActive() throws Exception { public void testTerminationBeforeCacheManagerClose() throws Exception { CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) - .autoCreate() - .defaultServerResource("primary-server-resource")); - final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); + PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); - new TimeLimitedTask(2, TimeUnit.SECONDS) { + new TimeLimitedTask(CLUSTER.input().plusSeconds(10)) { @Override Void runTask() throws Exception { cacheManager.close(); @@ -218,114 +169,98 @@ Void runTask() throws Exception { } @Test - @Ignore("Need to decide if we close cache entity in a daemon thread") public void testTerminationBeforeCacheManagerCloseWithCaches() throws Exception { CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) - .autoCreate() - .defaultServerResource("primary-server-resource")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) .withCache("simple-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated(4, MemoryUnit.MB)))); - final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); + PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); + + cacheManager.close(); - new TimeLimitedTask(5, TimeUnit.SECONDS) { - @Override - Void runTask() throws Exception { - cacheManager.close(); - return null; - } - }.run(); } @Test public void testTerminationBeforeCacheManagerRetrieve() throws Exception { // Close all servers - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); // Try to retrieve an entity (that doesn't exist but I don't care... the server is not running anyway CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) .timeouts(TimeoutsBuilder.timeouts().connection(Duration.ofSeconds(1))) // Need a connection timeout shorter than the TimeLimitedTask timeout - .expecting() - .defaultServerResource("primary-server-resource")); - final PersistentCacheManager cacheManagerExisting = clusteredCacheManagerBuilder.build(false); + .expecting(server -> server.defaultServerResource("primary-server-resource"))); + PersistentCacheManager cacheManagerExisting = clusteredCacheManagerBuilder.build(false); // Base test time limit on observed TRANSPORT_HANDSHAKE_SYNACK_TIMEOUT; might not have been set in time to be effective long synackTimeout = TimeUnit.MILLISECONDS.toSeconds(ClientMessageTransport.TRANSPORT_HANDSHAKE_SYNACK_TIMEOUT); - try { - new TimeLimitedTask(3 + synackTimeout, TimeUnit.SECONDS) { + + assertExceptionOccurred(StateTransitionException.class, + new TimeLimitedTask(ofSeconds(3 + synackTimeout)) { @Override - Void runTask() throws Exception { + Void runTask() { cacheManagerExisting.init(); return null; } - }.run(); - fail("Expecting StateTransitionException"); - } catch (StateTransitionException e) { - assertThat(getCausalChain(e), hasItem(Matchers.instanceOf(ConnectionException.class))); - } + }) + .withRootCauseInstanceOf(TimeoutException.class); } @Test - @Ignore("In multi entity, destroy cache is a blocking operation") + @Ignore("Works but by sending a really low level exception. Need to be fixed to get the expected CachePersistenceException") public void testTerminationBeforeCacheManagerDestroyCache() throws Exception { CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) - .autoCreate() - .defaultServerResource("primary-server-resource")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) .withCache("simple-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated(4, MemoryUnit.MB)))); - final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); + PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); - final Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); + Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); cache.put(1L, "un"); cache.put(2L, "deux"); cache.put(3L, "trois"); cacheManager.removeCache("simple-cache"); - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); - try { - new TimeLimitedTask(5, TimeUnit.SECONDS) { + assertExceptionOccurred(CachePersistenceException.class, + new TimeLimitedTask(ofSeconds(10)) { @Override Void runTask() throws Exception { cacheManager.destroyCache("simple-cache"); return null; } - }.run(); - fail("Expecting CachePersistenceException"); - } catch (CachePersistenceException e) { - assertThat(getUltimateCause(e), is(instanceOf(TimeoutException.class))); - } + }); } @Test - @Ignore("Multi entity means this is now a blocking operation") + @Ignore("There are no timeout on the create cache right now. It waits until the server comes back") public void testTerminationBeforeCacheCreate() throws Exception { CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) - .autoCreate() - .defaultServerResource("primary-server-resource")); - final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); + PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); - try { - new TimeLimitedTask>(5, TimeUnit.SECONDS) { + assertExceptionOccurred(IllegalStateException.class, + new TimeLimitedTask>(ofSeconds(10)) { @Override Cache runTask() throws Exception { return cacheManager.createCache("simple-cache", @@ -333,48 +268,35 @@ Cache runTask() throws Exception { ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated(4, MemoryUnit.MB)))); } - }.run(); - fail("Expecting IllegalStateException"); - } catch (IllegalStateException e) { - assertThat(getUltimateCause(e), is(instanceOf(TimeoutException.class))); - } + }) + .withRootCauseInstanceOf(TimeoutException.class); } @Test - @Ignore("Need to decide if we close cache entity in a daemon thread") public void testTerminationBeforeCacheRemove() throws Exception { CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) - .autoCreate() - .defaultServerResource("primary-server-resource")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) .withCache("simple-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated(4, MemoryUnit.MB)))); - final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); + PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); - new TimeLimitedTask(5, TimeUnit.SECONDS) { - @Override - Void runTask() throws Exception { - // CacheManager.removeCache silently "fails" when a timeout is recognized - cacheManager.removeCache("simple-cache"); - return null; - } - }.run(); + cacheManager.removeCache("simple-cache"); } @Test public void testTerminationThenGet() throws Exception { CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) .timeouts(TimeoutsBuilder.timeouts().read(Duration.of(1, ChronoUnit.SECONDS)).build()) - .autoCreate() - .defaultServerResource("primary-server-resource")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) .withCache("simple-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() @@ -382,33 +304,32 @@ public void testTerminationThenGet() throws Exception { PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); - final Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); + Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); cache.put(1L, "un"); cache.put(2L, "deux"); cache.put(3L, "trois"); - assertThat(cache.get(2L), is(not(nullValue()))); + assertThat(cache.get(2L)).isNotNull(); - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); - String value = new TimeLimitedTask(5, TimeUnit.SECONDS) { + String value = new TimeLimitedTask(ofSeconds(5)) { @Override String runTask() throws Exception { return cache.get(2L); } }.run(); - assertThat(value, is(nullValue())); + assertThat(value).isNull(); } @Test public void testTerminationThenContainsKey() throws Exception { CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) .timeouts(TimeoutsBuilder.timeouts().read(Duration.of(1, ChronoUnit.SECONDS)).build()) - .autoCreate() - .defaultServerResource("primary-server-resource")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) .withCache("simple-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() @@ -416,23 +337,23 @@ public void testTerminationThenContainsKey() throws Exception { PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); - final Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); + Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); cache.put(1L, "un"); cache.put(2L, "deux"); cache.put(3L, "trois"); - assertThat(cache.containsKey(2L), is(true)); + assertThat(cache.containsKey(2L)).isTrue(); - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); - boolean value = new TimeLimitedTask(5, TimeUnit.SECONDS) { + boolean value = new TimeLimitedTask(ofSeconds(5)) { @Override Boolean runTask() throws Exception { return cache.containsKey(2L); } }.run(); - assertThat(value, is(false)); + assertThat(value).isFalse(); } @Ignore("ClusteredStore.iterator() is not implemented") @@ -440,10 +361,9 @@ Boolean runTask() throws Exception { public void testTerminationThenIterator() throws Exception { CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) .timeouts(TimeoutsBuilder.timeouts().read(Duration.of(1, ChronoUnit.SECONDS)).build()) - .autoCreate() - .defaultServerResource("primary-server-resource")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) .withCache("simple-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() @@ -451,31 +371,30 @@ public void testTerminationThenIterator() throws Exception { PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); - final Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); + Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); cache.put(1L, "un"); cache.put(2L, "deux"); cache.put(3L, "trois"); - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); - Iterator> value = new TimeLimitedTask>>(5, TimeUnit.SECONDS) { + Iterator> value = new TimeLimitedTask>>(ofSeconds(5)) { @Override - Iterator> runTask() throws Exception { + Iterator> runTask() { return cache.iterator(); } }.run(); - assertThat(value.hasNext(), is(false)); + assertThat(value.hasNext()).isFalse(); } @Test public void testTerminationThenPut() throws Exception { CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) .timeouts(TimeoutsBuilder.timeouts().write(Duration.of(1, ChronoUnit.SECONDS)).build()) - .autoCreate() - .defaultServerResource("primary-server-resource")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) .withCache("simple-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() @@ -483,36 +402,30 @@ public void testTerminationThenPut() throws Exception { PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); - final Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); + Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); cache.put(1L, "un"); cache.put(2L, "deux"); cache.put(3L, "trois"); - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); - try { - new TimeLimitedTask(5, TimeUnit.SECONDS) { - @Override - Void runTask() throws Exception { - cache.put(2L, "dos"); - return null; - } - }.run(); - fail("Expecting StoreAccessTimeoutException"); - } catch (StoreAccessTimeoutException e) { - //Final timeout occurs on the cleanup which is a remove (hence a GET_AND_APPEND) - assertThat(e.getMessage(), containsString("Timeout exceeded for GET_AND_APPEND")); - } + // The resilience strategy will pick it up and not exception is thrown + new TimeLimitedTask(ofSeconds(10)) { + @Override + Void runTask() throws Exception { + cache.put(2L, "dos"); + return null; + } + }.run(); } @Test public void testTerminationThenPutIfAbsent() throws Exception { CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) .timeouts(TimeoutsBuilder.timeouts().write(Duration.of(1, ChronoUnit.SECONDS)).build()) - .autoCreate() - .defaultServerResource("primary-server-resource")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) .withCache("simple-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() @@ -520,34 +433,29 @@ public void testTerminationThenPutIfAbsent() throws Exception { PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); - final Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); + Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); cache.put(1L, "un"); cache.put(2L, "deux"); cache.put(3L, "trois"); - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); - try { - new TimeLimitedTask(5, TimeUnit.SECONDS) { - @Override - String runTask() throws Exception { - return cache.putIfAbsent(2L, "dos"); - } - }.run(); - fail("Expecting StoreAccessTimeoutException"); - } catch (StoreAccessTimeoutException e) { - assertThat(e.getMessage(), containsString("Timeout exceeded for GET_AND_APPEND")); - } + // The resilience strategy will pick it up and not exception is thrown + new TimeLimitedTask(ofSeconds(10)) { + @Override + String runTask() throws Exception { + return cache.putIfAbsent(2L, "dos"); + } + }.run(); } @Test public void testTerminationThenRemove() throws Exception { CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) .timeouts(TimeoutsBuilder.timeouts().write(Duration.of(1, ChronoUnit.SECONDS)).build()) - .autoCreate() - .defaultServerResource("primary-server-resource")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) .withCache("simple-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() @@ -555,35 +463,31 @@ public void testTerminationThenRemove() throws Exception { PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); - final Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); + Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); cache.put(1L, "un"); cache.put(2L, "deux"); cache.put(3L, "trois"); - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); - try { - new TimeLimitedTask(5, TimeUnit.SECONDS) { - @Override - Void runTask() throws Exception { - cache.remove(2L); - return null; - } - }.run(); - fail("Expecting StoreAccessTimeoutException"); - } catch (StoreAccessTimeoutException e) { - assertThat(e.getMessage(), containsString("Timeout exceeded for GET_AND_APPEND")); - } + new TimeLimitedTask(ofSeconds(10)) { + @Override + Void runTask() throws Exception { + cache.remove(2L); + return null; + } + }.run(); } @Test public void testTerminationThenClear() throws Exception { + StatisticsService statisticsService = new DefaultStatisticsService(); CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(cluster.getConnectionURI().resolve("/MyCacheManagerName")) + .using(statisticsService) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) .timeouts(TimeoutsBuilder.timeouts().write(Duration.of(1, ChronoUnit.SECONDS)).build()) - .autoCreate() - .defaultServerResource("primary-server-resource")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) .withCache("simple-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() @@ -591,93 +495,73 @@ public void testTerminationThenClear() throws Exception { PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); - final Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); + Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); cache.put(1L, "un"); cache.put(2L, "deux"); cache.put(3L, "trois"); - cluster.getClusterControl().terminateAllServers(); + CLUSTER.get().getClusterControl().terminateAllServers(); - try { - new TimeLimitedTask(5, TimeUnit.SECONDS) { + // The resilience strategy will pick it up and not exception is thrown + new TimeLimitedTask(ofSeconds(10)) { @Override - Void runTask() throws Exception { + Void runTask() { cache.clear(); return null; } }.run(); - fail("Expecting StoreAccessTimeoutException"); - } catch (StoreAccessTimeoutException e) { - assertThat(e.getMessage(), containsString("Timeout exceeded for CLEAR")); - } - } - - private Throwable getUltimateCause(Throwable t) { - Throwable ultimateCause = t; - while (ultimateCause.getCause() != null) { - ultimateCause = ultimateCause.getCause(); - } - return ultimateCause; - } - - private List getCausalChain(Throwable t) { - ArrayList causalChain = new ArrayList<>(); - for (Throwable cause = t; cause != null; cause = cause.getCause()) { - causalChain.add(cause); - } - return causalChain; - } - - private static void overrideProperty(Map oldProperties, String propertyName, String propertyValue) { - TCProperties tcProperties = TCPropertiesImpl.getProperties(); - oldProperties.put(propertyName, tcProperties.getProperty(propertyName)); - tcProperties.setProperty(propertyName, propertyValue); } /** - * Used as a {@link Rule @Rule} to limit the number of concurrently executing tests. + * If the server goes down, the client should not freeze on a server call. It should timeout and answer using + * the resilience strategy. Whatever the number of calls is done afterwards. + * + * @throws Exception */ - private final class TestConcurrencyLimiter extends ExternalResource { + @Test + public void testTerminationFreezesTheClient() throws Exception { + Duration readOperationTimeout = Duration.ofMillis(100); + + try(PersistentCacheManager cacheManager = + CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.get().getConnectionURI().resolve("/").resolve(testName.getMethodName())) + .timeouts(TimeoutsBuilder.timeouts() + .read(readOperationTimeout)) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) + .withCache("simple-cache", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated(4, MemoryUnit.MB)))) + .build(true)) { - @Override - protected void before() throws Throwable { - try { - TEST_PERMITS.acquire(); - } catch (InterruptedException e) { - throw new AssertionError(e); - } - } + Cache cache = cacheManager.getCache("simple-cache", Long.class, String.class); + cache.put(1L, "un"); - @Override - protected void after() { - TEST_PERMITS.release(); - } - } + CLUSTER.get().getClusterControl().terminateAllServers(); - /** - * Used as a {@link org.junit.ClassRule @ClassRule} to determine the number of tests to - * be run from the class. - */ - private static final class TestCounter implements TestRule { - - private int testCount; + // Fill the inflight queue and check that we wait no longer than the read timeout + for (int i = 0; i < CLIENT_MAX_PENDING_REQUESTS; i++) { + cache.get(1L); + } - @Override - public Statement apply(Statement base, Description description) { - int testCount = 0; - for (Description child : description.getChildren()) { - if (child.isTest()) { - testCount++; + // The resilience strategy will pick it up and not exception is thrown + new TimeLimitedTask(readOperationTimeout.multipliedBy(2)) { // I multiply by 2 to let some room after the expected timeout + @Override + Void runTask() { + cache.get(1L); // the call that could block + return null; } - } - this.testCount = testCount; + }.run(); - return base; + } catch(StateTransitionException e) { + // On the cacheManager.close(), it waits for the lease to expire and then throw this exception } + } - private int getTestCount() { - return testCount; - } + private static void overrideProperty(Map oldProperties, String propertyName, String propertyValue) { + TCProperties tcProperties = TCPropertiesImpl.getProperties(); + oldProperties.put(propertyName, tcProperties.getProperty(propertyName, true)); + tcProperties.setProperty(propertyName, propertyValue); } /** @@ -693,14 +577,12 @@ private abstract class TimeLimitedTask { * and test task completion & thread interrupt clear. */ private final byte[] lock = new byte[0]; - private final long timeLimit; - private final TimeUnit unit; + private final Duration timeLimit; private volatile boolean isDone = false; private volatile boolean isExpired = false; - private TimeLimitedTask(long timeLimit, TimeUnit unit) { + private TimeLimitedTask(Duration timeLimit) { this.timeLimit = timeLimit; - this.unit = unit; } /** @@ -713,7 +595,7 @@ private TimeLimitedTask(long timeLimit, TimeUnit unit) { /** * Invokes {@link #runTask()} under a time limit. If {@code runTask} execution exceeds the amount of - * time specified in the {@link TimeLimitedTask#TimeLimitedTask(long, TimeUnit) constructor}, the task + * time specified in the {@link TimeLimitedTask#TimeLimitedTask(Duration) constructor}, the task * {@code Thread} is first interrupted and, if the thread remains alive for another duration of the time * limit, the thread is forcefully stopped using {@link Thread#stop()}. * @@ -726,7 +608,7 @@ private TimeLimitedTask(long timeLimit, TimeUnit unit) { V run() throws Exception { V result; - Future future = interruptAfter(timeLimit, unit); + Future future = interruptAfter(timeLimit); try { result = this.runTask(); } finally { @@ -735,7 +617,7 @@ V run() throws Exception { future.cancel(true); Thread.interrupted(); // Reset interrupted status } - assertThat(testName.getMethodName() + " test thread exceeded its time limit of " + timeLimit + " " + unit, isExpired, is(false)); + assertThat(isExpired).describedAs( "%s test thread exceeded its time limit of %s", testName.getMethodName(), timeLimit).isFalse(); } return result; @@ -746,15 +628,14 @@ V run() throws Exception { * If the timeout expires, a thread dump is taken and the current thread interrupted. * * @param interval the amount of time to wait - * @param unit the unit for {@code interval} * * @return a {@code Future} that may be used to cancel the timeout. */ - private Future interruptAfter(final long interval, final TimeUnit unit) { + private Future interruptAfter(Duration interval) { final Thread targetThread = Thread.currentThread(); FutureTask killer = new FutureTask<>(() -> { try { - unit.sleep(interval); + Thread.sleep(interval.toMillis()); if (!isDone && targetThread.isAlive()) { synchronized (lock) { if (isDone) { @@ -773,7 +654,7 @@ private Future interruptAfter(final long interval, final TimeUnit unit) { * looping wait where the interrupt status is recorded but ignored until the awaited event * occurs. */ - unit.timedJoin(targetThread, interval); + targetThread.join(interval.toMillis()); if (!isDone && targetThread.isAlive()) { System.out.format("%s test thread did not respond to Thread.interrupt; forcefully stopping %s%n", testName.getMethodName(), targetThread); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockIntegrationTest.java index ef9ad659c4..889dd93c01 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockIntegrationTest.java @@ -15,7 +15,6 @@ */ package org.ehcache.clustered.lock; -import java.io.File; import java.util.Collections; import java.util.List; import java.util.concurrent.Callable; @@ -25,8 +24,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; - -import org.ehcache.clustered.ClusteredTests; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLock; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLock.Hold; import org.junit.BeforeClass; @@ -35,20 +32,17 @@ import org.terracotta.connection.Connection; import org.terracotta.testing.rules.Cluster; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; -public class VoltronReadWriteLockIntegrationTest extends ClusteredTests { - @ClassRule - public static Cluster CLUSTER = newCluster().in(new File("build/cluster")).build(); +public class VoltronReadWriteLockIntegrationTest { - @BeforeClass - public static void waitForActive() throws Exception { - CLUSTER.getClusterControl().waitForActive(); - } + @ClassRule + public static Cluster CLUSTER = newCluster().in(clusterPath()).build(); @Test public void testSingleThreadSingleClientInteraction() throws Throwable { diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockPassiveIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockPassiveIntegrationTest.java index 619bfcf010..f728c96e95 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockPassiveIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockPassiveIntegrationTest.java @@ -15,45 +15,49 @@ */ package org.ehcache.clustered.lock; -import java.io.File; -import java.util.concurrent.Callable; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - -import org.ehcache.clustered.ClusteredTests; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLock; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLock.Hold; +import org.ehcache.clustered.util.ParallelTestCluster; +import org.ehcache.clustered.util.runners.Parallel; import org.junit.Before; import org.junit.ClassRule; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TestName; +import org.junit.runner.RunWith; import org.terracotta.connection.Connection; -import org.terracotta.testing.rules.Cluster; import static org.ehcache.clustered.lock.VoltronReadWriteLockIntegrationTest.async; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; import static org.junit.Assert.fail; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; -public class VoltronReadWriteLockPassiveIntegrationTest extends ClusteredTests { - @ClassRule - public static Cluster CLUSTER = newCluster(2).in(new File("build/cluster")).build(); +@RunWith(Parallel.class) +public class VoltronReadWriteLockPassiveIntegrationTest { + + @ClassRule @Rule + public static final ParallelTestCluster CLUSTER = new ParallelTestCluster(newCluster(2).in(clusterPath()).build()); + @Rule + public final TestName testName = new TestName(); @Before - public void waitForActive() throws Exception { - CLUSTER.getClusterControl().waitForActive(); - CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + public void startAllServers() throws Exception { + CLUSTER.getClusterControl().startAllServers(); } @Test public void testSingleThreadSingleClientInteraction() throws Throwable { try (Connection client = CLUSTER.newConnection()) { - VoltronReadWriteLock lock = new VoltronReadWriteLock(client, "test"); + VoltronReadWriteLock lock = new VoltronReadWriteLock(client, testName.getMethodName()); Hold hold = lock.writeLock(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().startOneServer(); hold.unlock(); } @@ -62,7 +66,7 @@ public void testSingleThreadSingleClientInteraction() throws Throwable { @Test public void testMultipleThreadsSingleConnection() throws Throwable { try (Connection client = CLUSTER.newConnection()) { - final VoltronReadWriteLock lock = new VoltronReadWriteLock(client, "test"); + final VoltronReadWriteLock lock = new VoltronReadWriteLock(client, testName.getMethodName()); Hold hold = lock.writeLock(); @@ -78,8 +82,8 @@ public void testMultipleThreadsSingleConnection() throws Throwable { //expected } + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().startOneServer(); try { waiter.get(100, TimeUnit.MILLISECONDS); @@ -98,12 +102,12 @@ public void testMultipleThreadsSingleConnection() throws Throwable { public void testMultipleClients() throws Throwable { try (Connection clientA = CLUSTER.newConnection(); Connection clientB = CLUSTER.newConnection()) { - VoltronReadWriteLock lockA = new VoltronReadWriteLock(clientA, "test"); + VoltronReadWriteLock lockA = new VoltronReadWriteLock(clientA, testName.getMethodName()); Hold hold = lockA.writeLock(); Future waiter = async(() -> { - new VoltronReadWriteLock(clientB, "test").writeLock().unlock(); + new VoltronReadWriteLock(clientB, testName.getMethodName()).writeLock().unlock(); return null; }); @@ -114,8 +118,8 @@ public void testMultipleClients() throws Throwable { //expected } + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().startOneServer(); try { waiter.get(100, TimeUnit.MILLISECONDS); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index 634bb4e0b6..fef40076f7 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -15,107 +15,108 @@ */ package org.ehcache.clustered.management; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; import org.ehcache.CacheManager; import org.ehcache.Status; -import org.ehcache.clustered.ClusteredTests; +import org.ehcache.clustered.util.BeforeAll; +import org.ehcache.clustered.util.BeforeAllRule; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.junit.AfterClass; import org.junit.Before; -import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; +import org.junit.rules.RuleChain; import org.junit.rules.Timeout; -import org.terracotta.connection.Connection; -import org.terracotta.management.entity.nms.NmsConfig; -import org.terracotta.management.entity.nms.client.DefaultNmsService; -import org.terracotta.management.entity.nms.client.NmsEntity; -import org.terracotta.management.entity.nms.client.NmsEntityFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.terracotta.management.entity.nms.client.NmsService; +import org.terracotta.management.model.cluster.AbstractManageableNode; import org.terracotta.management.model.cluster.Client; import org.terracotta.management.model.cluster.ClientIdentifier; +import org.terracotta.management.model.cluster.ServerEntity; import org.terracotta.management.model.cluster.ServerEntityIdentifier; import org.terracotta.management.model.context.Context; -import org.terracotta.management.model.message.Message; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.testing.rules.Cluster; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import java.io.File; -import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Scanner; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; +import static java.lang.Thread.sleep; +import static java.util.Collections.unmodifiableMap; import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredShared; import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResources; import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertTrue; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; +import static org.junit.rules.RuleChain.outerRule; + -public abstract class AbstractClusteringManagementTest extends ClusteredTests { +@SuppressWarnings("rawtypes") // Need to suppress because of a Javac bug giving a rawtype on AbstractManageableNode::isManageable. +public abstract class AbstractClusteringManagementTest { - private static final String RESOURCE_CONFIG = - "" - + "" - + "64" - + "64" - + "" + - "\n"; + private static final Logger LOGGER = LoggerFactory.getLogger(AbstractClusteringManagementTest.class); + + private static final Map resources; + static { + HashMap map = new HashMap<>(); + map.put("primary-server-resource", 64L); + map.put("secondary-server-resource", 64L); + resources = unmodifiableMap(map); + } protected static CacheManager cacheManager; protected static ClientIdentifier ehcacheClientIdentifier; protected static ServerEntityIdentifier clusterTierManagerEntityIdentifier; - protected static ObjectMapper mapper = new ObjectMapper(); - - protected static NmsService nmsService; - protected static ServerEntityIdentifier tmsServerEntityIdentifier; - protected static Connection managementConnection; + protected static final ObjectMapper mapper = new ObjectMapper().configure(SerializationFeature.INDENT_OUTPUT, true); @ClassRule - public static Cluster CLUSTER = newCluster().in(new File("build/cluster")) - .withServiceFragment(RESOURCE_CONFIG).build(); + public static final ClusterWithManagement CLUSTER = new ClusterWithManagement(newCluster(2) + .in(clusterPath()).withServiceFragment(offheapResources(resources)).build()); - @BeforeClass - public static void beforeClass() throws Exception { - mapper.configure(SerializationFeature.INDENT_OUTPUT, true); + @Rule + public final RuleChain rules = outerRule(Timeout.seconds(90)).around(new BeforeAllRule(this)); - CLUSTER.getClusterControl().waitForActive(); + @BeforeAll + public void beforeAllTests() throws Exception { + initCM(); + initIdentifiers(); + } - // simulate a TMS client - managementConnection = CLUSTER.newConnection(); - NmsEntityFactory entityFactory = new NmsEntityFactory(managementConnection, AbstractClusteringManagementTest.class.getName()); - NmsEntity tmsAgentEntity = entityFactory.retrieveOrCreate(new NmsConfig()); - nmsService = new DefaultNmsService(tmsAgentEntity); - nmsService.setOperationTimeout(5, TimeUnit.SECONDS); + @Before + public void init() { + CLUSTER.getNmsService().readMessages(); + } - tmsServerEntityIdentifier = readTopology() - .activeServerEntityStream() - .filter(serverEntity -> serverEntity.getType().equals(NmsConfig.ENTITY_TYPE)) - .findFirst() - .get() // throws if not found - .getServerEntityIdentifier(); + @AfterClass + public static void afterClass() throws Exception { + tearDownCacheManagerAndStatsCollector(); + } + private static void initCM() throws InterruptedException { cacheManager = newCacheManagerBuilder() // cluster config - .with(cluster(CLUSTER.getConnectionURI().resolve("/my-server-entity-1")) - .autoCreate() - .defaultServerResource("primary-server-resource") - .resourcePool("resource-pool-a", 28, MemoryUnit.MB, "secondary-server-resource") // <2> - .resourcePool("resource-pool-b", 16, MemoryUnit.MB)) // will take from primary-server-resource + .with(cluster(CLUSTER.getCluster().getConnectionURI().resolve("/my-server-entity-1")) + .autoCreate(server -> server + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 10, MemoryUnit.MB, "secondary-server-resource") // <2> + .resourcePool("resource-pool-b", 8, MemoryUnit.MB))) // will take from primary-server-resource // management config .using(new DefaultManagementRegistryConfiguration() .addTags("webapp-1", "server-node-1") @@ -146,23 +147,13 @@ public static void beforeClass() throws Exception { // ensure the CM is running and get its client id assertThat(cacheManager.getStatus(), equalTo(Status.AVAILABLE)); - ehcacheClientIdentifier = readTopology().getClients().values() - .stream() - .filter(client -> client.getName().equals("Ehcache:my-server-entity-1")) - .findFirst() - .map(Client::getClientIdentifier) - .get(); - - clusterTierManagerEntityIdentifier = readTopology() - .activeServerEntityStream() - .filter(serverEntity -> serverEntity.getName().equals("my-server-entity-1")) - .findFirst() - .get() // throws if not found - .getServerEntityIdentifier(); // test_notifs_sent_at_CM_init waitForAllNotifications( "CLIENT_CONNECTED", + "CLIENT_PROPERTY_ADDED", + "CLIENT_PROPERTY_ADDED", + "CLIENT_PROPERTY_ADDED", "CLIENT_REGISTRY_AVAILABLE", "CLIENT_TAGS_UPDATED", "EHCACHE_RESOURCE_POOLS_CONFIGURED", @@ -171,72 +162,79 @@ public static void beforeClass() throws Exception { "SERVER_ENTITY_CREATED", "SERVER_ENTITY_CREATED", "SERVER_ENTITY_CREATED", "SERVER_ENTITY_CREATED", "SERVER_ENTITY_CREATED", "SERVER_ENTITY_CREATED", "SERVER_ENTITY_DESTROYED", "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_FETCHED", - "SERVER_ENTITY_UNFETCHED" - ); - - sendManagementCallOnEntityToCollectStats(); - } + "SERVER_ENTITY_UNFETCHED", + "EHCACHE_RESOURCE_POOLS_CONFIGURED", - @AfterClass - public static void afterClass() throws Exception { - if (cacheManager != null && cacheManager.getStatus() == Status.AVAILABLE) { + "SERVER_ENTITY_DESTROYED", + "SERVER_ENTITY_CREATED", + "SERVER_ENTITY_CREATED", "SERVER_ENTITY_CREATED", "SERVER_ENTITY_CREATED", "SERVER_ENTITY_CREATED", "SERVER_ENTITY_CREATED", + "ENTITY_REGISTRY_AVAILABLE", "ENTITY_REGISTRY_AVAILABLE", "ENTITY_REGISTRY_AVAILABLE", "ENTITY_REGISTRY_AVAILABLE", + "EHCACHE_SERVER_STORE_CREATED", "EHCACHE_SERVER_STORE_CREATED", "EHCACHE_SERVER_STORE_CREATED" - if (nmsService != null) { - Context ehcacheClient = readTopology().getClient(ehcacheClientIdentifier).get().getContext().with("cacheManagerName", "my-super-cache-manager"); - nmsService.stopStatisticCollector(ehcacheClient).waitForReturn(); - } + ); + } - cacheManager.close(); + private static void initIdentifiers() throws Exception { + while ((ehcacheClientIdentifier = readTopology().getClients().values() + .stream() + .filter(client -> client.getName().equals("Ehcache:my-server-entity-1")) + .filter(AbstractManageableNode::isManageable) + .findFirst() + .map(Client::getClientIdentifier) + .orElse(null)) == null) { + sleep(200); } - if (nmsService != null) { - Context context = readTopology().getSingleStripe().getActiveServerEntity(tmsServerEntityIdentifier).get().getContext(); - nmsService.stopStatisticCollector(context); - - managementConnection.close(); + while ((clusterTierManagerEntityIdentifier = readTopology() + .activeServerEntityStream() + .filter(serverEntity -> serverEntity.getName().equals("my-server-entity-1")) + .filter(AbstractManageableNode::isManageable) + .map(ServerEntity::getServerEntityIdentifier) + .findFirst() + .orElse(null)) == null) { + sleep(200); } } - @Rule - public final Timeout globalTimeout = Timeout.seconds(60); + private static void tearDownCacheManagerAndStatsCollector() throws Exception { + if (cacheManager != null && cacheManager.getStatus() == Status.AVAILABLE) { - @Before - public void init() throws Exception { - if (nmsService != null) { - // this call clear the CURRRENT arrived messages, but be aware that some other messages can arrive just after the drain - nmsService.readMessages(); + readTopology().getClient(ehcacheClientIdentifier) + .ifPresent(client -> { + try { + CLUSTER.getNmsService().stopStatisticCollector(client.getContext().with("cacheManagerName", "my-super-cache-manager")).waitForReturn(); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + + cacheManager.close(); } } - protected static org.terracotta.management.model.cluster.Cluster readTopology() throws Exception { - return nmsService.readTopology(); + public static org.terracotta.management.model.cluster.Cluster readTopology() throws Exception { + org.terracotta.management.model.cluster.Cluster cluster = CLUSTER.getNmsService().readTopology(); + //System.out.println(mapper.writeValueAsString(cluster.toMap())); + return cluster; } - protected static void sendManagementCallOnClientToCollectStats() throws Exception { - Context ehcacheClient = readTopology().getClient(ehcacheClientIdentifier).get().getContext() - .with("cacheManagerName", "my-super-cache-manager"); - nmsService.startStatisticCollector(ehcacheClient, 1, TimeUnit.SECONDS).waitForReturn(); + public static void sendManagementCallOnClientToCollectStats() throws Exception { + org.terracotta.management.model.cluster.Cluster topology = readTopology(); + Client manageableClient = topology.getClient(ehcacheClientIdentifier).filter(AbstractManageableNode::isManageable).get(); + Context cmContext = manageableClient.getContext(); + CLUSTER.getNmsService().startStatisticCollector(cmContext, 1, TimeUnit.SECONDS).waitForReturn(); } - protected static List waitForNextStats() throws Exception { - // uses the monitoring consumre entity to get the content of the stat buffer when some stats are collected - return nmsService.waitForMessage(message -> message.getType().equals("STATISTICS")) + public static List waitForNextStats() throws Exception { + // uses the monitoring to get the content of the stat buffer when some stats are collected + return CLUSTER.getNmsService().waitForMessage(message -> message.getType().equals("STATISTICS")) .stream() .filter(message -> message.getType().equals("STATISTICS")) .flatMap(message -> message.unwrap(ContextualStatistics.class).stream()) .collect(Collectors.toList()); } - protected static List notificationTypes(List messages) { - return messages - .stream() - .filter(message -> "NOTIFICATION".equals(message.getType())) - .flatMap(message -> message.unwrap(ContextualNotification.class).stream()) - .map(ContextualNotification::getType) - .collect(Collectors.toList()); - } - - protected static String read(String path) throws FileNotFoundException { + protected static String read(String path) { try (Scanner scanner = new Scanner(AbstractClusteringManagementTest.class.getResourceAsStream(path), "UTF-8")) { return scanner.useDelimiter("\\A").next(); } @@ -246,13 +244,15 @@ protected static String normalizeForLineEndings(String stringToNormalize) { return stringToNormalize.replace("\r\n", "\n").replace("\r", "\n"); } - private static void sendManagementCallOnEntityToCollectStats() throws Exception { - Context context = readTopology().getSingleStripe().getActiveServerEntity(tmsServerEntityIdentifier).get().getContext(); - nmsService.startStatisticCollector(context, 1, TimeUnit.SECONDS).waitForReturn(); + public static void waitForAllNotifications(String... notificationTypes) throws InterruptedException { + waitForAllNotifications(CLUSTER.getNmsService(), notificationTypes); } - protected static void waitForAllNotifications(String... notificationTypes) throws InterruptedException, TimeoutException { + public static void waitForAllNotifications(NmsService nmsService, String... notificationTypes) throws InterruptedException { List waitingFor = new ArrayList<>(Arrays.asList(notificationTypes)); + List missingOnes = new ArrayList<>(); + List existingOnes = new ArrayList<>(); + // please keep these sout because it is really hard to troubleshoot blocking tests in the beforeClass method in the case we do not receive all notifs. // System.out.println("waitForAllNotifications: " + waitingFor); @@ -261,9 +261,15 @@ protected static void waitForAllNotifications(String... notificationTypes) throw nmsService.waitForMessage(message -> { if (message.getType().equals("NOTIFICATION")) { for (ContextualNotification notification : message.unwrap(ContextualNotification.class)) { - if (waitingFor.remove(notification.getType())) { -// System.out.println("Remove " + notification.getType()); -// System.out.println("Still waiting for: " + waitingFor); + if ("org.terracotta.management.entity.nms.client.NmsEntity".equals(notification.getContext().get("entityType"))) { + LOGGER.info("IGNORE:" + notification); // this is the passive NmsEntity, sometimes we catch it, sometimes not + } else if (waitingFor.remove(notification.getType())) { + existingOnes.add(notification); + LOGGER.debug("Remove " + notification); + LOGGER.debug("Still waiting for: " + waitingFor); + } else { + LOGGER.debug("Extra: " + notification); + missingOnes.add(notification); } } } @@ -277,6 +283,7 @@ protected static void waitForAllNotifications(String... notificationTypes) throw t.join(30_000); // should be way enough to receive all messages t.interrupt(); // we interrupt the thread that is waiting on the message queue - assertTrue("Still waiting for: " + waitingFor, waitingFor.isEmpty()); + assertTrue("Still waiting for: " + waitingFor + ", only got: " + existingOnes, waitingFor.isEmpty()); + assertTrue("Unexpected notification: " + missingOnes + ", only got: " + existingOnes, missingOnes.isEmpty()); } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AfterFailoverManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AfterFailoverManagementServiceTest.java new file mode 100644 index 0000000000..499bc0697a --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AfterFailoverManagementServiceTest.java @@ -0,0 +1,33 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import org.ehcache.clustered.util.BeforeAll; +import org.junit.FixMethodOrder; +import org.junit.runners.MethodSorters; + +@FixMethodOrder(MethodSorters.NAME_ASCENDING) +public class AfterFailoverManagementServiceTest extends ClusteringManagementServiceTest { + + @BeforeAll + @Override + public void beforeAllTests() throws Exception { + super.beforeAllTests(); + CLUSTER.getCluster().getClusterControl().terminateActive(); + CLUSTER.getCluster().getClusterControl().waitForActive(); + CLUSTER.startCollectingServerEntityStats(); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/CMClosedEventSentTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/CMClosedEventSentTest.java new file mode 100644 index 0000000000..0687961e13 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/CMClosedEventSentTest.java @@ -0,0 +1,100 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import org.ehcache.CacheManager; +import org.ehcache.Status; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.management.model.message.Message; +import org.terracotta.management.model.notification.ContextualNotification; + +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.unmodifiableMap; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResources; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertFalse; + + +public class CMClosedEventSentTest { + + private static final Map resources; + static { + HashMap map = new HashMap<>(); + map.put("primary-server-resource", 64L); + map.put("secondary-server-resource", 64L); + resources = unmodifiableMap(map); + } + + @ClassRule + public static ClusterWithManagement CLUSTER = new ClusterWithManagement( + newCluster().in(clusterPath()).withServiceFragment(offheapResources(resources)).build()); + + @Test(timeout = 60_000) + public void test_CACHE_MANAGER_CLOSED() throws Exception { + try (CacheManager cacheManager = newCacheManagerBuilder().with(cluster(CLUSTER.getCluster().getConnectionURI().resolve("/my-server-entity-1")) + .autoCreate(server -> server + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 10, MemoryUnit.MB, "secondary-server-resource") // <2> + .resourcePool("resource-pool-b", 10, MemoryUnit.MB))) // will take from primary-server-resource + // management config + .using(new DefaultManagementRegistryConfiguration() + .addTags("webapp-1", "server-node-1") + .setCacheManagerAlias("my-super-cache-manager")) + // cache config + .withCache("dedicated-cache-1", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .build()) + .build(true)) { + + assertThat(cacheManager.getStatus(), equalTo(Status.AVAILABLE)); + waitFor("CACHE_MANAGER_AVAILABLE"); + + } + waitFor("CACHE_MANAGER_CLOSED"); + } + + private void waitFor(String notifType) throws InterruptedException { + while (!Thread.currentThread().isInterrupted()) { + Message message = CLUSTER.getNmsService().waitForMessage(); + if (message.getType().equals("NOTIFICATION")) { + ContextualNotification notification = message.unwrap(ContextualNotification.class).get(0); + if (notification.getType().equals(notifType)) { + break; + } + } + } + assertFalse(Thread.currentThread().isInterrupted()); + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusterWithManagement.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusterWithManagement.java new file mode 100644 index 0000000000..5603c629e5 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusterWithManagement.java @@ -0,0 +1,130 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.MultipleFailureException; +import org.junit.runners.model.Statement; +import org.terracotta.connection.Connection; +import org.terracotta.connection.ConnectionException; +import org.terracotta.exception.EntityConfigurationException; +import org.terracotta.management.entity.nms.NmsConfig; +import org.terracotta.management.entity.nms.client.DefaultNmsService; +import org.terracotta.management.entity.nms.client.IllegalManagementCallException; +import org.terracotta.management.entity.nms.client.NmsEntity; +import org.terracotta.management.entity.nms.client.NmsEntityFactory; +import org.terracotta.management.entity.nms.client.NmsService; +import org.terracotta.management.model.cluster.ServerEntity; +import org.terracotta.management.model.cluster.ServerEntityIdentifier; +import org.terracotta.testing.rules.Cluster; + +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import static java.lang.Thread.sleep; +import static java.util.Arrays.asList; + +public final class ClusterWithManagement implements TestRule { + + private final Cluster cluster; + + private Connection managementConnection; + private NmsService nmsService; + private ServerEntityIdentifier tmsServerEntityIdentifier; + + public ClusterWithManagement(Cluster cluster) { + this.cluster = cluster; + } + + protected void before() throws Throwable { + this.managementConnection = cluster.newConnection(); + this.nmsService = createNmsService(managementConnection); + while ((tmsServerEntityIdentifier = nmsService.readTopology() + .activeServerEntityStream() + .filter(serverEntity -> serverEntity.getType().equals(NmsConfig.ENTITY_TYPE)) + .filter(ServerEntity::isManageable) + .map(ServerEntity::getServerEntityIdentifier) + .findFirst() + .orElse(null)) == null) { + sleep(100); + } + startCollectingServerEntityStats(); + } + + protected void after() throws Exception { + try { + nmsService.readTopology().getSingleStripe().getActiveServerEntity(tmsServerEntityIdentifier) + .ifPresent(client -> { + try { + nmsService.stopStatisticCollector(client.getContext()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } finally { + managementConnection.close(); + } + } + + @Override + public Statement apply(Statement base, Description description) { + return cluster.apply(new Statement() { + @Override + public void evaluate() throws Throwable { + before(); + try { + base.evaluate(); + after(); + } catch (Throwable t) { + try { + after(); + throw t; + } catch (Exception e) { + throw new MultipleFailureException(asList(t, e)); + } + } + } + }, description); + } + + public Cluster getCluster() { + return cluster; + } + + public NmsService getNmsService() { + return nmsService; + } + + public ServerEntityIdentifier getTmsServerEntityIdentifier() { + return tmsServerEntityIdentifier; + } + + private static NmsService createNmsService(Connection connection) throws ConnectionException, EntityConfigurationException { + NmsEntityFactory entityFactory = new NmsEntityFactory(connection, AbstractClusteringManagementTest.class.getName()); + NmsEntity tmsAgentEntity = entityFactory.retrieveOrCreate(new NmsConfig()); + + NmsService nmsService = new DefaultNmsService(tmsAgentEntity); + nmsService.setOperationTimeout(10, TimeUnit.SECONDS); + return nmsService; + } + + public void startCollectingServerEntityStats() throws InterruptedException, ExecutionException, TimeoutException, IllegalManagementCallException { + ServerEntity manageableEntity = nmsService.readTopology().getSingleStripe().getActiveServerEntity(tmsServerEntityIdentifier).filter(ServerEntity::isManageable).get(); + nmsService.startStatisticCollector(manageableEntity.getContext(), 1, TimeUnit.SECONDS).waitForReturn(); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java index 560fed6df2..9e59520677 100755 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java @@ -16,13 +16,13 @@ package org.ehcache.clustered.management; import org.ehcache.Cache; -import org.junit.Assert; import org.junit.Test; import org.terracotta.management.model.stats.ContextualStatistics; import java.util.List; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; public class ClusteredStatisticsCountTest extends AbstractClusteringManagementTest { @@ -61,25 +61,29 @@ public void countTest() throws Exception { if (stat.getContext().contains("cacheName") && stat.getContext().get("cacheName").equals("dedicated-cache-1")) { // please leave it there - it's really useful to see what's coming - /*System.out.println("stats:"); - for (Map.Entry> entry : stat.getStatistics().entrySet()) { + /* + System.out.println("stats:"); + + Set>> entries = stat.getStatistics().entrySet(); + for (Map.Entry> entry : entries) { System.out.println(" - " + entry.getKey() + " : " + entry.getValue()); }*/ - cacheHitCount = stat.getStatistic("Cache:HitCount").longValue(); - clusteredHitCount = stat.getStatistic("Clustered:HitCount").longValue(); - clusteredMissCount = stat.getStatistic("Clustered:MissCount").longValue(); - cacheMissCount = stat.getStatistic("Cache:MissCount").longValue(); + cacheHitCount = stat.getLatestSampleValue("Cache:HitCount").get(); + clusteredHitCount = stat.getLatestSampleValue("Clustered:HitCount").get(); + clusteredMissCount = stat.getLatestSampleValue("Clustered:MissCount").get(); + cacheMissCount = stat.getLatestSampleValue("Cache:MissCount").get(); } } } while(!Thread.currentThread().isInterrupted() && - (cacheHitCount != CACHE_HIT_COUNT) && (clusteredHitCount != CLUSTERED_HIT_COUNT) && - (cacheMissCount != CACHE_MISS_COUNT) && (clusteredMissCount != CLUSTERED_MISS_COUNT)); + ((cacheHitCount != CACHE_HIT_COUNT) || (clusteredHitCount != CLUSTERED_HIT_COUNT) || + (cacheMissCount != CACHE_MISS_COUNT) || (clusteredMissCount != CLUSTERED_MISS_COUNT))); + - Assert.assertThat(cacheHitCount,is(CACHE_HIT_COUNT)); - Assert.assertThat(clusteredHitCount,is(CLUSTERED_HIT_COUNT)); - Assert.assertThat(cacheMissCount,is(CACHE_MISS_COUNT)); - Assert.assertThat(clusteredMissCount,is(CLUSTERED_MISS_COUNT)); + assertThat(cacheHitCount,is(CACHE_HIT_COUNT)); + assertThat(clusteredHitCount,is(CLUSTERED_HIT_COUNT)); + assertThat(cacheMissCount,is(CACHE_MISS_COUNT)); + assertThat(clusteredMissCount,is(CLUSTERED_MISS_COUNT)); } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index 521eaefcc1..91955e8130 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -18,7 +18,7 @@ import org.ehcache.Cache; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; -import org.junit.BeforeClass; +import org.hamcrest.MatcherAssert; import org.junit.FixMethodOrder; import org.junit.Ignore; import org.junit.Test; @@ -44,31 +44,112 @@ import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.testing.StandardTimeouts.eventually; +import static org.hamcrest.Matchers.arrayContainingInAnyOrder; @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class ClusteringManagementServiceTest extends AbstractClusteringManagementTest { - private static final Collection ONHEAP_DESCRIPTORS = new ArrayList<>(); - private static final Collection OFFHEAP_DESCRIPTORS = new ArrayList<>(); - private static final Collection DISK_DESCRIPTORS = new ArrayList<>(); - private static final Collection CLUSTERED_DESCRIPTORS = new ArrayList<>(); - private static final Collection CACHE_DESCRIPTORS = new ArrayList<>(); - private static final Collection POOL_DESCRIPTORS = new ArrayList<>(); - private static final Collection SERVER_STORE_DESCRIPTORS = new ArrayList<>(); - private static final Collection OFFHEAP_RES_DESCRIPTORS = new ArrayList<>(); + private static final Collection ONHEAP_DESCRIPTORS = Arrays.asList( + new StatisticDescriptor("OnHeap:EvictionCount" , "COUNTER"), + new StatisticDescriptor("OnHeap:ExpirationCount" , "COUNTER"), + new StatisticDescriptor("OnHeap:MissCount" , "COUNTER"), + new StatisticDescriptor("OnHeap:MappingCount" , "GAUGE"), + new StatisticDescriptor("OnHeap:HitCount" , "COUNTER"), + new StatisticDescriptor("OnHeap:PutCount" , "COUNTER"), + new StatisticDescriptor("OnHeap:RemovalCount" , "COUNTER") + ); + private static final Collection OFFHEAP_DESCRIPTORS = Arrays.asList( + new StatisticDescriptor("OffHeap:MissCount", "COUNTER"), + new StatisticDescriptor("OffHeap:OccupiedByteSize", "GAUGE"), + new StatisticDescriptor("OffHeap:AllocatedByteSize", "GAUGE"), + new StatisticDescriptor("OffHeap:MappingCount", "GAUGE"), + new StatisticDescriptor("OffHeap:EvictionCount", "COUNTER"), + new StatisticDescriptor("OffHeap:ExpirationCount", "COUNTER"), + new StatisticDescriptor("OffHeap:HitCount", "COUNTER"), + new StatisticDescriptor("OffHeap:PutCount", "COUNTER"), + new StatisticDescriptor("OffHeap:RemovalCount", "COUNTER") + ); + private static final Collection DISK_DESCRIPTORS = Arrays.asList( + new StatisticDescriptor("Disk:OccupiedByteSize", "GAUGE"), + new StatisticDescriptor("Disk:AllocatedByteSize", "GAUGE"), + new StatisticDescriptor("Disk:HitCount", "COUNTER"), + new StatisticDescriptor("Disk:EvictionCount", "COUNTER"), + new StatisticDescriptor("Disk:ExpirationCount", "COUNTER"), + new StatisticDescriptor("Disk:MissCount", "COUNTER"), + new StatisticDescriptor("Disk:MappingCount", "GAUGE"), + new StatisticDescriptor("Disk:PutCount", "COUNTER"), + new StatisticDescriptor("Disk:RemovalCount", "COUNTER") + ); + private static final Collection CLUSTERED_DESCRIPTORS = Arrays.asList( + new StatisticDescriptor("Clustered:MissCount", "COUNTER"), + new StatisticDescriptor("Clustered:HitCount", "COUNTER"), + new StatisticDescriptor("Clustered:PutCount", "COUNTER"), + new StatisticDescriptor("Clustered:RemovalCount", "COUNTER"), + new StatisticDescriptor("Clustered:EvictionCount", "COUNTER"), + new StatisticDescriptor("Clustered:ExpirationCount", "COUNTER") + ); + private static final Collection CACHE_DESCRIPTORS = Arrays.asList( + new StatisticDescriptor("Cache:HitCount", "COUNTER"), + new StatisticDescriptor("Cache:MissCount", "COUNTER"), + new StatisticDescriptor("Cache:PutCount", "COUNTER"), + new StatisticDescriptor("Cache:RemovalCount", "COUNTER"), + new StatisticDescriptor("Cache:EvictionCount", "COUNTER"), + new StatisticDescriptor("Cache:ExpirationCount", "COUNTER"), + new StatisticDescriptor("Cache:GetHitLatency#100", "GAUGE"), + new StatisticDescriptor("Cache:GetHitLatency#50", "GAUGE"), + new StatisticDescriptor("Cache:GetHitLatency#95", "GAUGE"), + new StatisticDescriptor("Cache:GetHitLatency#99", "GAUGE"), + new StatisticDescriptor("Cache:GetMissLatency#100", "GAUGE"), + new StatisticDescriptor("Cache:GetMissLatency#50", "GAUGE"), + new StatisticDescriptor("Cache:GetMissLatency#95", "GAUGE"), + new StatisticDescriptor("Cache:GetMissLatency#99", "GAUGE"), + new StatisticDescriptor("Cache:PutLatency#100", "GAUGE"), + new StatisticDescriptor("Cache:PutLatency#50", "GAUGE"), + new StatisticDescriptor("Cache:PutLatency#95", "GAUGE"), + new StatisticDescriptor("Cache:PutLatency#99", "GAUGE"), + new StatisticDescriptor("Cache:RemoveLatency#100", "GAUGE"), + new StatisticDescriptor("Cache:RemoveLatency#50", "GAUGE"), + new StatisticDescriptor("Cache:RemoveLatency#95", "GAUGE"), + new StatisticDescriptor("Cache:RemoveLatency#99", "GAUGE") + ); + private static final Collection POOL_DESCRIPTORS = Arrays.asList( + new StatisticDescriptor("Pool:AllocatedSize", "GAUGE") + ); + private static final Collection SERVER_STORE_DESCRIPTORS = Arrays.asList( + new StatisticDescriptor("Store:AllocatedMemory", "GAUGE"), + new StatisticDescriptor("Store:DataAllocatedMemory", "GAUGE"), + new StatisticDescriptor("Store:OccupiedMemory", "GAUGE"), + new StatisticDescriptor("Store:DataOccupiedMemory", "GAUGE"), + new StatisticDescriptor("Store:Entries", "COUNTER"), + new StatisticDescriptor("Store:UsedSlotCount", "COUNTER"), + new StatisticDescriptor("Store:DataVitalMemory", "GAUGE"), + new StatisticDescriptor("Store:VitalMemory", "GAUGE"), + new StatisticDescriptor("Store:RemovedSlotCount", "COUNTER"), + new StatisticDescriptor("Store:DataSize", "GAUGE"), + new StatisticDescriptor("Store:TableCapacity", "GAUGE") + ); + private static final Collection OFFHEAP_RES_DESCRIPTORS = Arrays.asList( + new StatisticDescriptor("OffHeapResource:AllocatedMemory", "GAUGE") + ); @Test @Ignore("This is not a test, but something useful to show a json print of a cluster topology with all management metadata inside") public void test_A_topology() throws Exception { - Cluster cluster = nmsService.readTopology(); + Cluster cluster = CLUSTER.getNmsService().readTopology(); String json = mapper.writeValueAsString(cluster.toMap()); //System.out.println(json); } @Test public void test_A_client_tags_exposed() throws Exception { - String[] tags = readTopology().getClient(ehcacheClientIdentifier).get().getTags().toArray(new String[0]); - assertThat(tags).containsOnly("server-node-1", "webapp-1"); + MatcherAssert.assertThat(() -> { + try { + return readTopology().getClient(ehcacheClientIdentifier).get().getTags().toArray(new String[0]); + } catch (Exception e) { + throw new AssertionError(e); + } + }, eventually().matches(arrayContainingInAnyOrder("server-node-1", "webapp-1"))); } @Test @@ -150,14 +231,14 @@ public void test_D_server_capabilities_exposed() throws Exception { assertThat(settings.get("alias")).isEqualTo("resource-pool-b"); assertThat(settings.get("type")).isEqualTo("Pool"); assertThat(settings.get("serverResource")).isEqualTo("primary-server-resource"); - assertThat(settings.get("size")).isEqualTo(16 * 1024 * 1024L); + assertThat(settings.get("size")).isEqualTo(8 * 1024 * 1024L); assertThat(settings.get("allocationType")).isEqualTo("shared"); settings = (Settings) descriptors.get(1); assertThat(settings.get("alias")).isEqualTo("resource-pool-a"); assertThat(settings.get("type")).isEqualTo("Pool"); assertThat(settings.get("serverResource")).isEqualTo("secondary-server-resource"); - assertThat(settings.get("size")).isEqualTo(28 * 1024 * 1024L); + assertThat(settings.get("size")).isEqualTo(10 * 1024 * 1024L); assertThat(settings.get("allocationType")).isEqualTo("shared"); // Dedicated PoolSettings @@ -189,7 +270,7 @@ public void test_D_server_capabilities_exposed() throws Exception { // tms entity - managerCapabilities = readTopology().activeServerEntityStream().filter(serverEntity -> serverEntity.is(tmsServerEntityIdentifier)).findFirst().get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); + managerCapabilities = readTopology().activeServerEntityStream().filter(serverEntity -> serverEntity.is(CLUSTER.getTmsServerEntityIdentifier())).findFirst().get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); assertThat(managerCapabilities.length).isEqualTo(3); assertThat(managerCapabilities[0].getName()).isEqualTo("OffHeapResourceSettings"); @@ -211,20 +292,28 @@ public void test_E_notifs_on_add_cache() throws Exception { .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) .build()); - ContextContainer contextContainer = readTopology().getClient(ehcacheClientIdentifier).get().getManagementRegistry().get().getContextContainer(); + Cluster cluster = readTopology(); + ContextContainer contextContainer = cluster.getClient(ehcacheClientIdentifier).get().getManagementRegistry().get().getContextContainer(); assertThat(contextContainer.getSubContexts()).hasSize(4); TreeSet cNames = contextContainer.getSubContexts().stream().map(ContextContainer::getValue).collect(Collectors.toCollection(TreeSet::new)); assertThat(cNames).isEqualTo(new TreeSet<>(Arrays.asList("cache-2", "dedicated-cache-1", "shared-cache-2", "shared-cache-3"))); - waitForAllNotifications("SERVER_ENTITY_CREATED", "ENTITY_REGISTRY_AVAILABLE", "EHCACHE_SERVER_STORE_CREATED", "SERVER_ENTITY_FETCHED", "CACHE_ADDED"); + if (cluster.serverStream().count() == 2) { + waitForAllNotifications( + "SERVER_ENTITY_CREATED", "ENTITY_REGISTRY_AVAILABLE", "EHCACHE_SERVER_STORE_CREATED", "SERVER_ENTITY_FETCHED", "CACHE_ADDED", + "SERVER_ENTITY_CREATED", "ENTITY_REGISTRY_AVAILABLE", "EHCACHE_SERVER_STORE_CREATED", "CLIENT_REGISTRY_AVAILABLE"); // passive server + } else { + waitForAllNotifications( + "SERVER_ENTITY_CREATED", "ENTITY_REGISTRY_AVAILABLE", "EHCACHE_SERVER_STORE_CREATED", "SERVER_ENTITY_FETCHED", "CACHE_ADDED", "CLIENT_REGISTRY_AVAILABLE"); + } } @Test public void test_F_notifs_on_remove_cache() throws Exception { cacheManager.removeCache("cache-2"); - waitForAllNotifications("CACHE_REMOVED", "SERVER_ENTITY_UNFETCHED"); + waitForAllNotifications("CACHE_REMOVED", "SERVER_ENTITY_UNFETCHED", "CLIENT_REGISTRY_AVAILABLE"); } @Test @@ -256,7 +345,7 @@ public void test_G_stats_collection() throws Exception { .collect(Collectors.toList()); for (ContextualStatistics stat : stats) { - val = stat.getStatistic("Cache:HitCount").longValue(); + val = stat.getLatestSampleValue("Cache:HitCount").get(); } } while(!Thread.currentThread().isInterrupted() && val != 2); @@ -274,23 +363,25 @@ public void test_G_stats_collection() throws Exception { .collect(Collectors.toList()); for (ContextualStatistics stat : stats) { - val = stat.getStatistic("Cache:HitCount").longValue(); + val = stat.getLatestSampleValue("Cache:HitCount").get(); } } while(!Thread.currentThread().isInterrupted() && val != 4); // wait until we have some stats coming from the server entity - while (!Thread.currentThread().isInterrupted() && !allStats.stream().filter(statistics -> statistics.getContext().contains("consumerId")).findFirst().isPresent()) { + while (!Thread.currentThread().isInterrupted() && !allStats.stream().anyMatch(statistics -> statistics.getContext().contains("consumerId"))) { allStats.addAll(waitForNextStats()); } - List serverStats = allStats.stream().filter(statistics -> statistics.getContext().contains("consumerId")).collect(Collectors.toList()); + List serverStats = allStats.stream() + .filter(statistics -> statistics.getContext().contains("consumerId")) + .collect(Collectors.toList()); // server-side stats TreeSet capabilities = serverStats.stream() .map(ContextualStatistics::getCapability) .collect(Collectors.toCollection(TreeSet::new)); - assertThat(capabilities).containsOnly("PoolStatistics", "ServerStoreStatistics", "OffHeapResourceStatistics"); + assertThat(capabilities).contains("PoolStatistics", "ServerStoreStatistics", "OffHeapResourceStatistics"); // ensure we collect stats from all registered objects (pools and stores) @@ -333,72 +424,4 @@ public void test_G_stats_collection() throws Exception { assertThat(offHeapResourceDescriptors).isEqualTo(OFFHEAP_RES_DESCRIPTORS.stream().map(StatisticDescriptor::getName).collect(Collectors.toSet())); } - @BeforeClass - public static void initDescriptors() throws ClassNotFoundException { - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionCount" , "COUNTER")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:ExpirationCount" , "COUNTER")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissCount" , "COUNTER")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , "COUNTER")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:OccupiedByteSize", "SIZE")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitCount" , "COUNTER")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:PutCount" , "COUNTER")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:RemovalCount" , "COUNTER")); - - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedByteSize", "SIZE")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedByteSize", "SIZE")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:ExpirationCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MaxMappingCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:PutCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:RemovalCount", "COUNTER")); - - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MaxMappingCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedByteSize", "SIZE")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedByteSize", "SIZE")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:ExpirationCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MappingCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:PutCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:RemovalCount", "COUNTER")); - - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissCount", "COUNTER")); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitCount", "COUNTER")); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:PutCount", "COUNTER")); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:RemovalCount", "COUNTER")); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MaxMappingCount", "COUNTER")); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionCount", "COUNTER")); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:ExpirationCount", "COUNTER")); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:OccupiedByteSize", "SIZE")); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:AllocatedByteSize", "SIZE")); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MappingCount", "COUNTER")); - - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitCount", "COUNTER")); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissCount", "COUNTER")); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:PutCount", "COUNTER")); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:RemovalCount", "COUNTER")); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:EvictionCount", "COUNTER")); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ExpirationCount", "COUNTER")); - - POOL_DESCRIPTORS.add(new StatisticDescriptor("Pool:AllocatedSize", "SIZE")); - - SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:AllocatedMemory", "SIZE")); - SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataAllocatedMemory", "SIZE")); - SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:OccupiedMemory", "SIZE")); - SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataOccupiedMemory", "SIZE")); - SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:Entries", "COUNTER")); - SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:UsedSlotCount", "COUNTER")); - SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataVitalMemory", "SIZE")); - SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:VitalMemory", "SIZE")); - SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:RemovedSlotCount", "COUNTER")); - SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataSize", "SIZE")); - SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:TableCapacity", "SIZE")); - - OFFHEAP_RES_DESCRIPTORS.add(new StatisticDescriptor("OffHeapResource:AllocatedMemory", "SIZE")); - } - } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/DiagnosticTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/DiagnosticTest.java index b456b113cd..7b94d69697 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/DiagnosticTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/DiagnosticTest.java @@ -23,6 +23,7 @@ import org.terracotta.connection.ConnectionFactory; import org.terracotta.connection.ConnectionPropertyNames; import org.terracotta.connection.entity.EntityRef; +import org.terracotta.management.model.cluster.Server; import java.net.URI; import java.util.Properties; @@ -38,7 +39,7 @@ public class DiagnosticTest extends AbstractClusteringManagementTest { private static final String PROP_REQUEST_TIMEOUTMESSAGE = "request.timeoutMessage"; @Test - public void test_state_dump() throws Exception { + public void test_CACHE_MANAGER_CLOSED() throws Exception { cacheManager.createCache("cache-2", newCacheConfigurationBuilder( String.class, String.class, newResourcePoolsBuilder() @@ -47,12 +48,14 @@ public void test_state_dump() throws Exception { .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) .build()); + int activePort = readTopology().serverStream().filter(Server::isActive).findFirst().get().getBindPort(); + Properties properties = new Properties(); - properties.setProperty(ConnectionPropertyNames.CONNECTION_TIMEOUT, String.valueOf("5000")); + properties.setProperty(ConnectionPropertyNames.CONNECTION_TIMEOUT, String.valueOf("10000")); properties.setProperty(ConnectionPropertyNames.CONNECTION_NAME, "diagnostic"); - properties.setProperty(PROP_REQUEST_TIMEOUT, "5000"); + properties.setProperty(PROP_REQUEST_TIMEOUT, "10000"); properties.setProperty(PROP_REQUEST_TIMEOUTMESSAGE, "timed out"); - URI uri = URI.create("diagnostic://" + CLUSTER.getConnectionURI().getAuthority()); + URI uri = URI.create("diagnostic://localhost:" + activePort); Connection connection = ConnectionFactory.connect(uri, properties); EntityRef ref = connection.getEntityRef(Diagnostics.class, 1, "root"); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java index fd941deb85..5fdc428695 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java @@ -16,7 +16,6 @@ package org.ehcache.clustered.management; import org.ehcache.CacheManager; -import org.ehcache.clustered.ClusteredTests; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; @@ -25,44 +24,44 @@ import org.junit.Test; import org.terracotta.testing.rules.Cluster; -import java.io.File; +import java.util.HashMap; +import java.util.Map; +import static java.util.Collections.unmodifiableMap; import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredShared; import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResources; -public class EhcacheConfigWithManagementTest extends ClusteredTests { - private static final String RESOURCE_CONFIG = - "" - + "" - + "64" - + "64" - + "" + - "\n"; +public class EhcacheConfigWithManagementTest { - @ClassRule - public static Cluster CLUSTER = newCluster().in(new File("build/cluster")) - .withServiceFragment(RESOURCE_CONFIG).build(); - - @BeforeClass - public static void beforeClass() throws Exception { - CLUSTER.getClusterControl().waitForActive(); + private static final Map resources; + static { + HashMap map = new HashMap<>(); + map.put("primary-server-resource", 64L); + map.put("secondary-server-resource", 64L); + resources = unmodifiableMap(map); } + @ClassRule + public static Cluster CLUSTER = newCluster().in(clusterPath()) + .withServiceFragment(offheapResources(resources)).build(); + @Test public void create_cache_manager() throws Exception { CacheManager cacheManager = newCacheManagerBuilder() // cluster config .with(cluster(CLUSTER.getConnectionURI().resolve("/my-server-entity-3")) - .autoCreate() - .defaultServerResource("primary-server-resource") - .resourcePool("resource-pool-a", 28, MemoryUnit.MB, "secondary-server-resource") // <2> - .resourcePool("resource-pool-b", 16, MemoryUnit.MB)) // will take from primary-server-resource + .autoCreate(server -> server + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 10, MemoryUnit.MB, "secondary-server-resource") // <2> + .resourcePool("resource-pool-b", 8, MemoryUnit.MB))) // will take from primary-server-resource // management config .using(new DefaultManagementRegistryConfiguration() .addTags("webapp-1", "server-node-1") diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java index 270ca16f23..5da65a69e6 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java @@ -36,9 +36,9 @@ import java.util.concurrent.TimeUnit; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.junit.Assert.assertThat; public class EhcacheManagerToStringTest extends AbstractClusteringManagementTest { @@ -56,7 +56,7 @@ public void simpleOnHeapToString() throws Exception { .offheap(1, MemoryUnit.MB) .disk(2, MemoryUnit.MB, true)) .withLoaderWriter(new SampleLoaderWriter<>()) - .add(WriteBehindConfigurationBuilder + .withService(WriteBehindConfigurationBuilder .newBatchedWriteBehindConfiguration(1, TimeUnit.SECONDS, 3) .queueSize(3) .concurrencyLevel(1) @@ -82,14 +82,13 @@ public void simpleOnHeapToString() throws Exception { @Test public void clusteredToString() throws Exception { - URI uri = CLUSTER.getConnectionURI().resolve("/my-server-entity-2"); + URI uri = CLUSTER.getCluster().getConnectionURI().resolve("/my-server-entity-2"); try (CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() // cluster config .with(ClusteringServiceConfigurationBuilder.cluster(uri) - .autoCreate() - .defaultServerResource("primary-server-resource") - .resourcePool("resource-pool-a", 32, MemoryUnit.MB)) + .autoCreate(server -> server.defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 10, MemoryUnit.MB))) // management config .using(new DefaultManagementRegistryConfiguration() .addTags("webapp-1", "server-node-1") @@ -133,32 +132,32 @@ public void clusteredToString() throws Exception { public static class SampleLoaderWriter implements CacheLoaderWriter { @Override - public V load(K key) throws Exception { + public V load(K key) { throw new UnsupportedOperationException("Implement Me"); } @Override - public Map loadAll(Iterable keys) throws Exception { + public Map loadAll(Iterable keys) { throw new UnsupportedOperationException("Implement me!"); } @Override - public void write(K key, V value) throws Exception { + public void write(K key, V value) { throw new UnsupportedOperationException("Implement me!"); } @Override - public void writeAll(Iterable> entries) throws Exception { + public void writeAll(Iterable> entries) { throw new UnsupportedOperationException("Implement me!"); } @Override - public void delete(K key) throws Exception { + public void delete(K key) { throw new UnsupportedOperationException("Implement me!"); } @Override - public void deleteAll(Iterable keys) throws Exception { + public void deleteAll(Iterable keys) { throw new UnsupportedOperationException("Implement me!"); } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ManagementClusterConnectionTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ManagementClusterConnectionTest.java new file mode 100644 index 0000000000..86c4cb55ee --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ManagementClusterConnectionTest.java @@ -0,0 +1,182 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.Status; +import org.ehcache.clustered.util.TCPProxyManager; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.hamcrest.Matchers; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.terracotta.utilities.test.rules.TestRetryer; + +import java.net.URI; +import java.time.Duration; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +import static java.time.Duration.ofSeconds; +import static java.util.Collections.unmodifiableMap; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.clustered.management.AbstractClusteringManagementTest.waitForAllNotifications; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.leaseLength; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResources; +import static org.ehcache.testing.StandardTimeouts.eventually; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.terracotta.utilities.test.rules.TestRetryer.OutputIs.CLASS_RULE; +import static org.terracotta.utilities.test.rules.TestRetryer.tryValues; + +public class ManagementClusterConnectionTest { + + protected static CacheManager cacheManager; + protected static ObjectMapper mapper = new ObjectMapper(); + + private static TCPProxyManager proxyManager; + private static final Map resources; + static { + HashMap map = new HashMap<>(); + map.put("primary-server-resource", 64L); + map.put("secondary-server-resource", 64L); + resources = unmodifiableMap(map); + } + + @ClassRule @Rule + public static TestRetryer CLUSTER = tryValues(ofSeconds(1), ofSeconds(10), ofSeconds(30)) + .map(leaseLength -> new ClusterWithManagement( + newCluster().in(clusterPath()).withServiceFragment( + offheapResources(resources) + leaseLength(leaseLength)).build())) + .outputIs(CLASS_RULE); + + @BeforeClass + public static void beforeClass() throws Exception { + + mapper.configure(SerializationFeature.INDENT_OUTPUT, true); + + CLUSTER.get().getCluster().getClusterControl().waitForActive(); + + proxyManager = TCPProxyManager.create(CLUSTER.get().getCluster().getConnectionURI()); + URI connectionURI = proxyManager.getURI(); + + cacheManager = newCacheManagerBuilder() + // cluster config + .with(cluster(connectionURI.resolve("/my-server-entity-1")) + .autoCreate(server -> server + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 10, MemoryUnit.MB, "secondary-server-resource") // <2> + .resourcePool("resource-pool-b", 10, MemoryUnit.MB))) // will take from primary-server-resource + // management config + .using(new DefaultManagementRegistryConfiguration() + .addTags("webapp-1", "server-node-1") + .setCacheManagerAlias("my-super-cache-manager")) + // cache config + .withCache("dedicated-cache-1", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .build()) + .build(true); + + // ensure the CM is running and get its client id + assertThat(cacheManager.getStatus(), equalTo(Status.AVAILABLE)); + + // test_notifs_sent_at_CM_init + waitForAllNotifications(CLUSTER.get().getNmsService(), + "CLIENT_CONNECTED", + "CLIENT_PROPERTY_ADDED", + "CLIENT_PROPERTY_ADDED", + "CLIENT_REGISTRY_AVAILABLE", + "CLIENT_TAGS_UPDATED", + "EHCACHE_RESOURCE_POOLS_CONFIGURED", + "EHCACHE_SERVER_STORE_CREATED", + "ENTITY_REGISTRY_AVAILABLE", "ENTITY_REGISTRY_AVAILABLE", + "SERVER_ENTITY_CREATED", "SERVER_ENTITY_CREATED", "SERVER_ENTITY_CREATED", "SERVER_ENTITY_CREATED", + "SERVER_ENTITY_DESTROYED", + "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_FETCHED", + "SERVER_ENTITY_UNFETCHED" + ); + } + + @AfterClass + public static void afterClass() { + if (proxyManager != null) { + proxyManager.close(); + } + } + + @Test + public void test_reconnection() throws Exception { + long count = CLUSTER.get().getNmsService().readTopology().clientStream() + .filter(client -> client.getName() + .startsWith("Ehcache:") && client.isManageable() && client.getTags() + .containsAll(Arrays.asList("webapp-1", "server-node-1"))) + .count(); + + assertThat(count, Matchers.equalTo(1L)); + + String instanceId = getInstanceId(); + + long delay = CLUSTER.input().plusSeconds(1L).toMillis(); + proxyManager.setDelay(delay); + try { + Thread.sleep(delay); + } finally { + proxyManager.setDelay(0); + } + + Cache cache = cacheManager.getCache("dedicated-cache-1", String.class, String.class); + String initiate_reconnect = cache.get("initiate reconnect"); + + assertThat(initiate_reconnect, Matchers.nullValue()); + + assertThat(() -> { + try { + return CLUSTER.get().getNmsService().readTopology().clientStream() + .filter(client -> client.getName() + .startsWith("Ehcache:") && client.isManageable() && client.getTags() + .containsAll(Arrays.asList("webapp-1", "server-node-1"))) + .count(); + } catch (Exception e) { + throw new AssertionError(e); + } + }, eventually().is(1L)); + assertThat(getInstanceId(), equalTo(instanceId)); + } + + private String getInstanceId() throws Exception { + return CLUSTER.get().getNmsService().readTopology().clientStream() + .filter(client -> client.getName().startsWith("Ehcache:") && client.isManageable()) + .findFirst().get().getContext().get("instanceId"); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/AutoCreateOnReconnectTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/AutoCreateOnReconnectTest.java new file mode 100644 index 0000000000..9da62dec81 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/AutoCreateOnReconnectTest.java @@ -0,0 +1,70 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.reconnect; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.config.units.MemoryUnit; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.Cluster; + +import java.net.URI; + +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.ehcache.testing.StandardTimeouts.eventually; +import static org.hamcrest.MatcherAssert.assertThat; + + +public class AutoCreateOnReconnectTest { + + @ClassRule + public static Cluster CLUSTER = newCluster(1).in(clusterPath()) + .withServiceFragment(offheapResource("primary-server-resource", 64)).build(); + + @Test + public void cacheManagerCanReconnect() throws Exception { + URI connectionURI = CLUSTER.getConnectionURI(); + + try (PersistentCacheManager cacheManager = newCacheManagerBuilder() + .with(cluster(connectionURI.resolve("/crud-cm")) + .autoCreateOnReconnect(server -> server.defaultServerResource("primary-server-resource"))) + .build(true)) { + + Cache cache = cacheManager.createCache("clustered-cache", + newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder() + .with(clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) + .build()); + + cache.put(1L, "one"); + + CLUSTER.getClusterControl().terminateAllServers(); + CLUSTER.getClusterControl().startAllServers(); + + assertThat(() -> { + cache.put(1L, "two"); + return cache.get(1L); + }, eventually().is("two")); + } + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/BasicCacheReconnectTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/BasicCacheReconnectTest.java new file mode 100644 index 0000000000..04e67af0cb --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/BasicCacheReconnectTest.java @@ -0,0 +1,170 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.reconnect; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.StateTransitionException; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.store.ReconnectInProgressException; +import org.ehcache.clustered.util.TCPProxyManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.terracotta.testing.rules.Cluster; +import org.terracotta.utilities.test.rules.TestRetryer; + +import java.net.URI; +import java.time.Duration; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; + +import static java.time.Duration.ofSeconds; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.leaseLength; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.fail; + +import static org.terracotta.utilities.test.rules.TestRetryer.OutputIs.CLASS_RULE; +import static org.terracotta.utilities.test.rules.TestRetryer.tryValues; + +public class BasicCacheReconnectTest { + + private static TCPProxyManager proxyManager; + private static PersistentCacheManager cacheManager; + + private static CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) + .withResilienceStrategy(new ThrowingResiliencyStrategy<>()) + .build(); + + @ClassRule @Rule + public static final TestRetryer CLUSTER = tryValues(ofSeconds(1), ofSeconds(10), ofSeconds(30)) + .map(leaseLength -> newCluster().in(clusterPath()).withServiceFragment( + offheapResource("primary-server-resource", 64) + leaseLength(leaseLength)).build()) + .outputIs(CLASS_RULE); + + @BeforeClass + public static void initializeCacheManager() throws Exception { + proxyManager = TCPProxyManager.create(CLUSTER.get().getConnectionURI()); + URI connectionURI = proxyManager.getURI(); + + CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(connectionURI.resolve("/crud-cm")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); + cacheManager = clusteredCacheManagerBuilder.build(false); + cacheManager.init(); + } + + @AfterClass + public static void stopProxies() { + proxyManager.close(); + } + + @Test + public void cacheOpsDuringReconnection() throws Exception { + + try { + + Cache cache = cacheManager.createCache("clustered-cache", config); + + CompletableFuture future = CompletableFuture.runAsync(() -> + ThreadLocalRandom.current() + .longs() + .forEach(value -> + cache.put(value, Long.toString(value)))); + + expireLease(); + + try { + future.get(5000, TimeUnit.MILLISECONDS); + fail(); + } catch (ExecutionException e) { + assertThat(e.getCause().getCause().getCause(), instanceOf(ReconnectInProgressException.class)); + } + + CompletableFuture getSucceededFuture = CompletableFuture.runAsync(() -> { + while (true) { + try { + cache.get(1L); + break; + } catch (RuntimeException e) { + + } + } + }); + + getSucceededFuture.get(20000, TimeUnit.MILLISECONDS); + } finally { + cacheManager.destroyCache("clustered-cache"); + } + + } + + @Test + public void reconnectDuringCacheCreation() throws Exception { + + expireLease(); + + Cache cache = cacheManager.createCache("clustered-cache", config); + + assertThat(cache, notNullValue()); + + cacheManager.destroyCache("clustered-cache"); + + } + + @Test + public void reconnectDuringCacheDestroy() throws Exception { + + Cache cache = cacheManager.createCache("clustered-cache", config); + + assertThat(cache, notNullValue()); + + expireLease(); + + cacheManager.destroyCache("clustered-cache"); + assertThat(cacheManager.getCache("clustered-cache", Long.class, String.class), nullValue()); + + } + + private void expireLease() throws InterruptedException { + long delay = CLUSTER.input().plusSeconds(1L).toMillis(); + proxyManager.setDelay(delay); + try { + Thread.sleep(delay); + } finally { + proxyManager.setDelay(0); + } + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/CacheManagerDestroyReconnectTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/CacheManagerDestroyReconnectTest.java new file mode 100644 index 0000000000..ba210f368d --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/CacheManagerDestroyReconnectTest.java @@ -0,0 +1,89 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.reconnect; + +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.util.TCPProxyManager; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.terracotta.testing.rules.Cluster; +import org.terracotta.utilities.test.rules.TestRetryer; + +import java.net.URI; +import java.time.Duration; + +import static java.time.Duration.ofSeconds; + +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.leaseLength; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.terracotta.utilities.test.rules.TestRetryer.OutputIs.CLASS_RULE; +import static org.terracotta.utilities.test.rules.TestRetryer.tryValues; + +public class CacheManagerDestroyReconnectTest { + + private static TCPProxyManager proxyManager; + private static PersistentCacheManager cacheManager; + + @ClassRule @Rule + public static final TestRetryer CLUSTER = tryValues(ofSeconds(1), ofSeconds(10), ofSeconds(30)) + .map(leaseLength -> newCluster().in(clusterPath()).withServiceFragment( + offheapResource("primary-server-resource", 64) + leaseLength(leaseLength)).build()) + .outputIs(CLASS_RULE); + + @BeforeClass + public static void initializeCacheManager() throws Exception { + proxyManager = TCPProxyManager.create(CLUSTER.get().getConnectionURI()); + URI connectionURI = proxyManager.getURI(); + + CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(connectionURI.resolve("/crud-cm")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); + cacheManager = clusteredCacheManagerBuilder.build(false); + cacheManager.init(); + } + + @AfterClass + public static void stopProxies() { + proxyManager.close(); + } + + @Test + public void testDestroyCacheManagerReconnects() throws Exception { + + long delay = CLUSTER.input().plusSeconds(1L).toMillis(); + proxyManager.setDelay(delay); + try { + Thread.sleep(delay); + } finally { + proxyManager.setDelay(0); + } + + cacheManager.close(); + + cacheManager.destroy(); + + System.out.println(cacheManager.getStatus()); + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/EventsReconnectTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/EventsReconnectTest.java new file mode 100644 index 0000000000..91ddbfb521 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/EventsReconnectTest.java @@ -0,0 +1,178 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.reconnect; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.store.ReconnectInProgressException; +import org.ehcache.clustered.util.TCPProxyManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.event.CacheEvent; +import org.ehcache.event.CacheEventListener; +import org.ehcache.event.EventType; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.terracotta.testing.rules.Cluster; +import org.terracotta.utilities.test.rules.TestRetryer; + +import java.net.URI; +import java.time.Duration; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; + +import static java.time.Duration.ofSeconds; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.leaseLength; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.ehcache.testing.StandardTimeouts.eventually; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.junit.Assert.fail; + +import static org.terracotta.utilities.test.rules.TestRetryer.OutputIs.CLASS_RULE; +import static org.terracotta.utilities.test.rules.TestRetryer.tryValues; + +public class EventsReconnectTest { + + private static TCPProxyManager proxyManager; + private static PersistentCacheManager cacheManager; + + private static class AccountingCacheEventListener implements CacheEventListener { + private final Map>> events; + + AccountingCacheEventListener() { + events = new HashMap<>(); + clear(); + } + + @Override + public void onEvent(CacheEvent event) { + events.get(event.getType()).add(event); + } + + final void clear() { + for (EventType value : EventType.values()) { + events.put(value, new CopyOnWriteArrayList<>()); + } + } + + } + + private static AccountingCacheEventListener cacheEventListener = new AccountingCacheEventListener<>(); + + private static CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) + .withService(CacheEventListenerConfigurationBuilder + .newEventListenerConfiguration(cacheEventListener, EnumSet.allOf(EventType.class)) + .unordered().asynchronous()) + .withResilienceStrategy(new ThrowingResiliencyStrategy<>()) + .build(); + + @ClassRule @Rule + public static final TestRetryer CLUSTER = tryValues(ofSeconds(1), ofSeconds(10), ofSeconds(30)) + .map(leaseLength -> newCluster().in(clusterPath()).withServiceFragment( + offheapResource("primary-server-resource", 64) + leaseLength(leaseLength)).build()) + .outputIs(CLASS_RULE); + + @BeforeClass + public static void initializeCacheManager() throws Exception { + proxyManager = TCPProxyManager.create(CLUSTER.get().getConnectionURI()); + URI connectionURI = proxyManager.getURI(); + + CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(connectionURI.resolve("/crud-cm")) + .autoCreate(s -> s.defaultServerResource("primary-server-resource"))); + cacheManager = clusteredCacheManagerBuilder.build(false); + cacheManager.init(); + } + + @AfterClass + public static void stopProxies() { + proxyManager.close(); + } + + @Test + public void eventsFlowAgainAfterReconnection() throws Exception { + try { + Cache cache = cacheManager.createCache("clustered-cache", config); + + CompletableFuture future = CompletableFuture.runAsync(() -> + ThreadLocalRandom.current() + .longs() + .filter(val -> val != Long.MAX_VALUE) + .forEach(value -> + cache.put(value, Long.toString(value)))); + + expireLease(); + + try { + future.get(5000, TimeUnit.MILLISECONDS); + fail(); + } catch (ExecutionException e) { + assertThat(e.getCause().getCause().getCause(), instanceOf(ReconnectInProgressException.class)); + } + int beforeDisconnectionEventCounter = cacheEventListener.events.get(EventType.CREATED).size(); + + CompletableFuture getSucceededFuture = CompletableFuture.runAsync(() -> { + while (true) { + try { + cache.put(Long.MAX_VALUE, ""); + break; + } catch (RuntimeException e) { + + } + } + }); + + assertThat(getSucceededFuture::isDone, eventually().is(true)); + getSucceededFuture.getNow(null); + assertThat(() -> cacheEventListener.events.get(EventType.CREATED), eventually().matches(hasSize(beforeDisconnectionEventCounter + 1))); + } finally { + cacheManager.destroyCache("clustered-cache"); + } + } + + private static void expireLease() throws InterruptedException { + long delay = CLUSTER.input().plusSeconds(1L).toMillis(); + proxyManager.setDelay(delay); + try { + Thread.sleep(delay); + } finally { + proxyManager.setDelay(0); + } + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/ThrowingResiliencyStrategy.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/ThrowingResiliencyStrategy.java new file mode 100644 index 0000000000..eddd4192a6 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/reconnect/ThrowingResiliencyStrategy.java @@ -0,0 +1,95 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.reconnect; + +import org.ehcache.Cache; +import org.ehcache.clustered.client.internal.store.ReconnectInProgressException; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.StoreAccessException; + +import java.util.Map; + +//For tests +public class ThrowingResiliencyStrategy implements ResilienceStrategy { + @Override + public V getFailure(K key, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public boolean containsKeyFailure(K key, StoreAccessException e) { + throw new RuntimeException(e); + } + + // This is specifically for the test so that it only throws due to a ReconnectInProgress, + // sometimes puts might even timeout in build systems + @Override + public void putFailure(K key, V value, StoreAccessException e) { + if (e.getCause() instanceof ReconnectInProgressException) { + throw new RuntimeException(e); + } + } + + @Override + public void removeFailure(K key, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public void clearFailure(StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public Cache.Entry iteratorFailure(StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public V putIfAbsentFailure(K key, V value, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public boolean removeFailure(K key, V value, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public V replaceFailure(K key, V value, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public boolean replaceFailure(K key, V value, V newValue, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public Map getAllFailure(Iterable keys, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public void putAllFailure(Map entries, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public void removeAllFailure(Iterable keys, StoreAccessException e) { + throw new RuntimeException(e); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java index 04faa8a683..6a2693451e 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java @@ -19,12 +19,13 @@ import org.ehcache.Cache; import org.ehcache.PersistentCacheManager; import org.ehcache.Status; -import org.ehcache.clustered.ClusteredTests; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.util.runners.ParallelParameterized; +import org.ehcache.clustered.util.ParallelTestCluster; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; @@ -35,16 +36,16 @@ import org.junit.Before; import org.junit.ClassRule; import org.junit.Ignore; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TestName; import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.terracotta.testing.rules.Cluster; +import org.terracotta.utilities.test.WaitForAssert; -import java.io.File; import java.io.Serializable; import java.time.Duration; import java.util.ArrayList; @@ -62,12 +63,15 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; + /** * This test asserts Active-Passive fail-over with @@ -75,22 +79,16 @@ * Note that fail-over is happening while client threads are still writing * Finally the same key set correctness is asserted. */ -@RunWith(Parameterized.class) -public class BasicClusteredCacheOpsReplicationMultiThreadedTest extends ClusteredTests { +@RunWith(ParallelParameterized.class) +public class BasicClusteredCacheOpsReplicationMultiThreadedTest { private static final int NUM_OF_THREADS = 10; private static final int JOB_SIZE = 100; - private static final String RESOURCE_CONFIG = - "" - + "" - + "16" - + "" + - "\n"; - - private static PersistentCacheManager CACHE_MANAGER1; - private static PersistentCacheManager CACHE_MANAGER2; - private static Cache CACHE1; - private static Cache CACHE2; + + private PersistentCacheManager cacheManager1; + private PersistentCacheManager cacheManager2; + private Cache cache1; + private Cache cache2; @Parameters(name = "consistency={0}") public static Consistency[] data() { @@ -100,9 +98,13 @@ public static Consistency[] data() { @Parameter public Consistency cacheConsistency; - @ClassRule - public static Cluster CLUSTER = - newCluster(2).in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); + @ClassRule @Rule + public static final ParallelTestCluster CLUSTER = new ParallelTestCluster(newCluster(2).in(clusterPath()) + .withServerHeap(512) + .withServiceFragment(offheapResource("primary-server-resource", 16)).build()); + + @Rule + public final TestName testName = new TestName(); private final Logger log = LoggerFactory.getLogger(getClass()); @@ -115,29 +117,26 @@ public static Consistency[] data() { @Before public void startServers() throws Exception { CLUSTER.getClusterControl().startAllServers(); - CLUSTER.getClusterControl().waitForActive(); - CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); final CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm-replication")) .timeouts(TimeoutsBuilder.timeouts() // we need to give some time for the failover to occur .read(Duration.ofMinutes(1)) .write(Duration.ofMinutes(1))) - .autoCreate() - .defaultServerResource("primary-server-resource")); - CACHE_MANAGER1 = clusteredCacheManagerBuilder.build(true); - CACHE_MANAGER2 = clusteredCacheManagerBuilder.build(true); + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); + cacheManager1 = clusteredCacheManagerBuilder.build(true); + cacheManager2 = clusteredCacheManagerBuilder.build(true); CacheConfiguration config = CacheConfigurationBuilder .newCacheConfigurationBuilder(Long.class, BlobValue.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(500, EntryUnit.ENTRIES) .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) + .withService(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) .build(); - CACHE1 = CACHE_MANAGER1.createCache("clustered-cache", config); - CACHE2 = CACHE_MANAGER2.createCache("clustered-cache", config); + cache1 = cacheManager1.createCache(testName.getMethodName(), config); + cache2 = cacheManager2.createCache(testName.getMethodName(), config); - caches = Arrays.asList(CACHE1, CACHE2); + caches = Arrays.asList(cache1, cache2); } @After @@ -146,19 +145,18 @@ public void tearDown() throws Exception { if(!unprocessed.isEmpty()) { log.warn("Tearing down with {} unprocess task", unprocessed); } - if(CACHE_MANAGER1 != null && CACHE_MANAGER1.getStatus() != Status.UNINITIALIZED) { - CACHE_MANAGER1.close(); + if(cacheManager1 != null && cacheManager1.getStatus() != Status.UNINITIALIZED) { + cacheManager1.close(); } - if(CACHE_MANAGER2 != null && CACHE_MANAGER2.getStatus() != Status.UNINITIALIZED) { - CACHE_MANAGER2.close(); - CACHE_MANAGER2.destroy(); + if(cacheManager2 != null && cacheManager2.getStatus() != Status.UNINITIALIZED) { + cacheManager2.close(); } } @Test(timeout=180000) public void testCRUD() throws Exception { Set universalSet = ConcurrentHashMap.newKeySet(); - List futures = new ArrayList<>(); + List> futures = new ArrayList<>(); caches.forEach(cache -> { for (int i = 0; i < NUM_OF_THREADS; i++) { @@ -171,10 +169,11 @@ public void testCRUD() throws Exception { //This step is to add values in local tier randomly to test invalidations happen correctly futures.add(executorService.submit(() -> universalSet.forEach(x -> { - CACHE1.get(x); - CACHE2.get(x); + cache1.get(x); + cache2.get(x); }))); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); drainTasks(futures); @@ -182,10 +181,10 @@ public void testCRUD() throws Exception { Set readKeysByCache1AfterFailOver = new HashSet<>(); Set readKeysByCache2AfterFailOver = new HashSet<>(); universalSet.forEach(x -> { - if (CACHE1.get(x) != null) { + if (cache1.get(x) != null) { readKeysByCache1AfterFailOver.add(x); } - if (CACHE2.get(x) != null) { + if (cache2.get(x) != null) { readKeysByCache2AfterFailOver.add(x); } }); @@ -199,7 +198,7 @@ public void testCRUD() throws Exception { @Test(timeout=180000) public void testBulkOps() throws Exception { Set universalSet = ConcurrentHashMap.newKeySet(); - List futures = new ArrayList<>(); + List> futures = new ArrayList<>(); caches.forEach(cache -> { for (int i = 0; i < NUM_OF_THREADS; i++) { @@ -214,11 +213,12 @@ public void testBulkOps() throws Exception { //This step is to add values in local tier randomly to test invalidations happen correctly futures.add(executorService.submit(() -> { universalSet.forEach(x -> { - CACHE1.get(x); - CACHE2.get(x); + cache1.get(x); + cache2.get(x); }); })); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); drainTasks(futures); @@ -226,10 +226,10 @@ public void testBulkOps() throws Exception { Set readKeysByCache1AfterFailOver = new HashSet<>(); Set readKeysByCache2AfterFailOver = new HashSet<>(); universalSet.forEach(x -> { - if (CACHE1.get(x) != null) { + if (cache1.get(x) != null) { readKeysByCache1AfterFailOver.add(x); } - if (CACHE2.get(x) != null) { + if (cache2.get(x) != null) { readKeysByCache2AfterFailOver.add(x); } }); @@ -245,7 +245,7 @@ public void testBulkOps() throws Exception { "dealing with in-flight invalidation reconstructed from reconnect data") @Test(timeout=180000) public void testClear() throws Exception { - List futures = new ArrayList<>(); + List> futures = new ArrayList<>(); Set universalSet = ConcurrentHashMap.newKeySet(); caches.forEach(cache -> { @@ -261,24 +261,25 @@ public void testClear() throws Exception { drainTasks(futures); universalSet.forEach(x -> { - CACHE1.get(x); - CACHE2.get(x); + cache1.get(x); + cache2.get(x); }); - Future clearFuture = executorService.submit(() -> CACHE1.clear()); + Future clearFuture = executorService.submit(() -> cache1.clear()); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); clearFuture.get(); - universalSet.forEach(x -> assertThat(CACHE2.get(x), nullValue())); + universalSet.forEach(x -> assertThat(cache2.get(x), nullValue())); } - private void drainTasks(List futures) throws InterruptedException, java.util.concurrent.ExecutionException { + private void drainTasks(List> futures) throws InterruptedException, java.util.concurrent.ExecutionException { for (int i = 0; i < futures.size(); i++) { try { - futures.get(i).get(10, TimeUnit.SECONDS); + futures.get(i).get(60, TimeUnit.SECONDS); } catch (TimeoutException e) { fail("Stuck on number " + i); } @@ -286,6 +287,9 @@ private void drainTasks(List futures) throws InterruptedException, java. } private static class BlobValue implements Serializable { + + private static final long serialVersionUID = 1L; + private final byte[] data = new byte[10 * 1024]; } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java index 536e614e93..85528e82b8 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java @@ -18,12 +18,13 @@ import org.ehcache.Cache; import org.ehcache.PersistentCacheManager; -import org.ehcache.clustered.ClusteredTests; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.util.runners.ParallelParameterized; +import org.ehcache.clustered.util.ParallelTestCluster; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; @@ -33,14 +34,13 @@ import org.junit.After; import org.junit.Before; import org.junit.ClassRule; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TestName; import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; -import org.terracotta.testing.rules.Cluster; -import java.io.File; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; @@ -48,25 +48,21 @@ import java.util.Map; import java.util.Set; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; -@RunWith(Parameterized.class) -public class BasicClusteredCacheOpsReplicationTest extends ClusteredTests { - private static final String RESOURCE_CONFIG = - "" - + "" - + "16" - + "" + - "\n"; +@RunWith(ParallelParameterized.class) +public class BasicClusteredCacheOpsReplicationTest { - private static PersistentCacheManager CACHE_MANAGER; - private static Cache CACHE1; - private static Cache CACHE2; + private PersistentCacheManager cacheManager; + private Cache cacheOne; + private Cache cacheTwo; @Parameters(name = "consistency={0}") public static Consistency[] data() { @@ -76,45 +72,45 @@ public static Consistency[] data() { @Parameter public Consistency cacheConsistency; - @ClassRule - public static Cluster CLUSTER = - newCluster(2).in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); + @ClassRule @Rule + public static final ParallelTestCluster CLUSTER = new ParallelTestCluster(newCluster(2).in(clusterPath()) + .withServerHeap(512) + .withServiceFragment(offheapResource("primary-server-resource", 32)).build()); + + @Rule + public final TestName testName = new TestName(); @Before public void startServers() throws Exception { CLUSTER.getClusterControl().startAllServers(); - CLUSTER.getClusterControl().waitForActive(); - CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); final CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/cm-replication")) .timeouts(TimeoutsBuilder.timeouts() // we need to give some time for the failover to occur .read(Duration.ofMinutes(1)) .write(Duration.ofMinutes(1))) - .autoCreate() - .defaultServerResource("primary-server-resource")); - CACHE_MANAGER = clusteredCacheManagerBuilder.build(true); + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); + cacheManager = clusteredCacheManagerBuilder.build(true); CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) + .withService(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) .build(); - CACHE1 = CACHE_MANAGER.createCache("clustered-cache", config); - CACHE2 = CACHE_MANAGER.createCache("another-cache", config); + cacheOne = cacheManager.createCache(testName.getMethodName() + "-1", config); + cacheTwo = cacheManager.createCache(testName.getMethodName() + "-2", config); } @After - public void tearDown() throws Exception { - CACHE_MANAGER.close(); - CACHE_MANAGER.destroy(); + public void tearDown() { + cacheManager.close(); } @Test public void testCRUD() throws Exception { List> caches = new ArrayList<>(); - caches.add(CACHE1); - caches.add(CACHE2); + caches.add(cacheOne); + caches.add(cacheTwo); caches.forEach(x -> { x.put(1L, "The one"); x.put(2L, "The two"); @@ -127,6 +123,7 @@ public void testCRUD() throws Exception { x.remove(4L); }); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); caches.forEach(x -> { @@ -140,8 +137,8 @@ public void testCRUD() throws Exception { @Test public void testBulkOps() throws Exception { List> caches = new ArrayList<>(); - caches.add(CACHE1); - caches.add(CACHE2); + caches.add(cacheOne); + caches.add(cacheTwo); Map entriesMap = new HashMap<>(); entriesMap.put(1L, "one"); @@ -152,6 +149,7 @@ public void testBulkOps() throws Exception { entriesMap.put(6L, "six"); caches.forEach(cache -> cache.putAll(entriesMap)); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); Set keySet = entriesMap.keySet(); @@ -170,8 +168,8 @@ public void testBulkOps() throws Exception { @Test public void testCAS() throws Exception { List> caches = new ArrayList<>(); - caches.add(CACHE1); - caches.add(CACHE2); + caches.add(cacheOne); + caches.add(cacheTwo); caches.forEach(cache -> { assertThat(cache.putIfAbsent(1L, "one"), nullValue()); assertThat(cache.putIfAbsent(2L, "two"), nullValue()); @@ -179,6 +177,7 @@ public void testCAS() throws Exception { assertThat(cache.replace(3L, "another one", "yet another one"), is(false)); }); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); caches.forEach(cache -> { @@ -193,8 +192,8 @@ public void testCAS() throws Exception { public void testClear() throws Exception { List> caches = new ArrayList<>(); - caches.add(CACHE1); - caches.add(CACHE2); + caches.add(cacheOne); + caches.add(cacheTwo); Map entriesMap = new HashMap<>(); entriesMap.put(1L, "one"); @@ -216,13 +215,14 @@ public void testClear() throws Exception { assertThat(all.get(6L), is("six")); }); - CACHE1.clear(); - CACHE2.clear(); + cacheOne.clear(); + cacheTwo.clear(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); - keySet.forEach(x -> assertThat(CACHE1.get(x), nullValue())); - keySet.forEach(x -> assertThat(CACHE2.get(x), nullValue())); + keySet.forEach(x -> assertThat(cacheOne.get(x), nullValue())); + keySet.forEach(x -> assertThat(cacheTwo.get(x), nullValue())); } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java index b755d1d046..3272330d39 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java @@ -18,11 +18,13 @@ import org.ehcache.Cache; import org.ehcache.PersistentCacheManager; -import org.ehcache.clustered.ClusteredTests; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.util.runners.ParallelParameterized; +import org.ehcache.clustered.util.ParallelTestCluster; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; @@ -33,15 +35,15 @@ import org.junit.Before; import org.junit.ClassRule; import org.junit.Ignore; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TestName; import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; -import org.terracotta.testing.rules.Cluster; -import java.io.File; import java.io.Serializable; +import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -51,29 +53,25 @@ import java.util.Set; import java.util.stream.LongStream; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; + /** * The point of this test is to assert proper data read after fail-over handling. */ -@RunWith(Parameterized.class) -public class BasicClusteredCacheOpsReplicationWithMultipleClientsTest extends ClusteredTests { - - private static final String RESOURCE_CONFIG = - "" - + "" - + "16" - + "" + - "\n"; +@RunWith(ParallelParameterized.class) +public class BasicClusteredCacheOpsReplicationWithMultipleClientsTest { - private static PersistentCacheManager CACHE_MANAGER1; - private static PersistentCacheManager CACHE_MANAGER2; - private static Cache CACHE1; - private static Cache CACHE2; + private PersistentCacheManager cacheManager1; + private PersistentCacheManager cacheManager2; + private Cache cache1; + private Cache cache2; @Parameters(name = "consistency={0}") public static Consistency[] data() { @@ -83,37 +81,38 @@ public static Consistency[] data() { @Parameter public Consistency cacheConsistency; - @ClassRule - public static Cluster CLUSTER = - newCluster(2).in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); + @ClassRule @Rule + public static final ParallelTestCluster CLUSTER = new ParallelTestCluster(newCluster(2).in(clusterPath()) + .withServerHeap(512) + .withServiceFragment(offheapResource("primary-server-resource", 16)).build()); + + @Rule + public final TestName testName = new TestName(); @Before public void startServers() throws Exception { CLUSTER.getClusterControl().startAllServers(); - CLUSTER.getClusterControl().waitForActive(); - CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); final CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm-replication")) - .autoCreate() - .defaultServerResource("primary-server-resource")); - CACHE_MANAGER1 = clusteredCacheManagerBuilder.build(true); - CACHE_MANAGER2 = clusteredCacheManagerBuilder.build(true); + .timeouts(TimeoutsBuilder.timeouts().read(Duration.ofSeconds(20)).write(Duration.ofSeconds(20))) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); + cacheManager1 = clusteredCacheManagerBuilder.build(true); + cacheManager2 = clusteredCacheManagerBuilder.build(true); CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, BlobValue.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(500, EntryUnit.ENTRIES) .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) + .withService(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) .build(); - CACHE1 = CACHE_MANAGER1.createCache("clustered-cache", config); - CACHE2 = CACHE_MANAGER2.createCache("clustered-cache", config); + cache1 = cacheManager1.createCache(testName.getMethodName(), config); + cache2 = cacheManager2.createCache(testName.getMethodName(), config); } @After public void tearDown() throws Exception { - CACHE_MANAGER1.close(); - CACHE_MANAGER2.close(); - CACHE_MANAGER2.destroy(); + cacheManager1.close(); + cacheManager2.close(); } @Test(timeout=180000) @@ -122,29 +121,30 @@ public void testCRUD() throws Exception { LongStream longStream = random.longs(1000); Set added = new HashSet<>(); longStream.forEach(x -> { - CACHE1.put(x, new BlobValue()); + cache1.put(x, new BlobValue()); added.add(x); }); Set readKeysByCache2BeforeFailOver = new HashSet<>(); added.forEach(x -> { - if (CACHE2.get(x) != null) { + if (cache2.get(x) != null) { readKeysByCache2BeforeFailOver.add(x); } }); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); Set readKeysByCache1AfterFailOver = new HashSet<>(); added.forEach(x -> { - if (CACHE1.get(x) != null) { + if (cache1.get(x) != null) { readKeysByCache1AfterFailOver.add(x); } }); assertThat(readKeysByCache2BeforeFailOver.size(), greaterThanOrEqualTo(readKeysByCache1AfterFailOver.size())); - readKeysByCache1AfterFailOver.stream().filter(readKeysByCache2BeforeFailOver::contains).forEach(y -> assertThat(CACHE2.get(y), notNullValue())); + readKeysByCache1AfterFailOver.stream().filter(readKeysByCache2BeforeFailOver::contains).forEach(y -> assertThat(cache2.get(y), notNullValue())); } @@ -152,8 +152,8 @@ public void testCRUD() throws Exception { @Ignore //TODO: FIXME: FIX THIS RANDOMLY FAILING TEST public void testBulkOps() throws Exception { List> caches = new ArrayList<>(); - caches.add(CACHE1); - caches.add(CACHE2); + caches.add(cache1); + caches.add(cache2); Map entriesMap = new HashMap<>(); @@ -167,31 +167,32 @@ public void testBulkOps() throws Exception { Set readKeysByCache2BeforeFailOver = new HashSet<>(); keySet.forEach(x -> { - if (CACHE2.get(x) != null) { + if (cache2.get(x) != null) { readKeysByCache2BeforeFailOver.add(x); } }); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); Set readKeysByCache1AfterFailOver = new HashSet<>(); keySet.forEach(x -> { - if (CACHE1.get(x) != null) { + if (cache1.get(x) != null) { readKeysByCache1AfterFailOver.add(x); } }); assertThat(readKeysByCache2BeforeFailOver.size(), greaterThanOrEqualTo(readKeysByCache1AfterFailOver.size())); - readKeysByCache1AfterFailOver.stream().filter(readKeysByCache2BeforeFailOver::contains).forEach(y -> assertThat(CACHE2.get(y), notNullValue())); + readKeysByCache1AfterFailOver.stream().filter(readKeysByCache2BeforeFailOver::contains).forEach(y -> assertThat(cache2.get(y), notNullValue())); } @Test(timeout=180000) public void testClear() throws Exception { List> caches = new ArrayList<>(); - caches.add(CACHE1); - caches.add(CACHE2); + caches.add(cache1); + caches.add(cache2); Map entriesMap = new HashMap<>(); @@ -205,20 +206,28 @@ public void testClear() throws Exception { Set readKeysByCache2BeforeFailOver = new HashSet<>(); keySet.forEach(x -> { - if (CACHE2.get(x) != null) { + if (cache2.get(x) != null) { readKeysByCache2BeforeFailOver.add(x); } }); - CACHE1.clear(); + cache1.clear(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); - readKeysByCache2BeforeFailOver.forEach(x -> assertThat(CACHE2.get(x), nullValue())); + if (cacheConsistency == Consistency.STRONG) { + readKeysByCache2BeforeFailOver.forEach(x -> assertThat(cache2.get(x), nullValue())); + } else { + readKeysByCache2BeforeFailOver.forEach(x -> assertThat(cache1.get(x), nullValue())); + } } private static class BlobValue implements Serializable { + + private static final long serialVersionUID = 1L; + private final byte[] data = new byte[10 * 1024]; } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithServersApiTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithServersApiTest.java new file mode 100644 index 0000000000..bc4167a89e --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithServersApiTest.java @@ -0,0 +1,124 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.replication; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.Cluster; + +import java.net.InetSocketAddress; +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; + +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + + +public class BasicClusteredCacheOpsReplicationWithServersApiTest { + + private static PersistentCacheManager CACHE_MANAGER; + private static Cache CACHE1; + private static Cache CACHE2; + + @ClassRule + public static Cluster CLUSTER = newCluster(2).in(clusterPath()) + .withServerHeap(512) + .withServiceFragment(offheapResource("primary-server-resource", 16)).build(); + + @Before + public void setUp() throws Exception { + CLUSTER.getClusterControl().startAllServers(); + + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(getConfigBuilder() + .timeouts(TimeoutsBuilder.timeouts() // we need to give some time for the failover to occur + .read(Duration.ofMinutes(1)) + .write(Duration.ofMinutes(1))) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); + CACHE_MANAGER = clusteredCacheManagerBuilder.build(true); + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .build(); + + CACHE1 = CACHE_MANAGER.createCache("clustered-cache", config); + CACHE2 = CACHE_MANAGER.createCache("another-cache", config); + } + + private ClusteringServiceConfigurationBuilder getConfigBuilder() { + String cacheManagerName = "cm-replication"; + List addresses = new ArrayList<>(); + for (String server : CLUSTER.getClusterHostPorts()) { + String[] hostPort = server.split(":"); + addresses.add(InetSocketAddress.createUnresolved(hostPort[0], Integer.parseInt(hostPort[1]))); + } + return ClusteringServiceConfigurationBuilder.cluster(addresses, cacheManagerName); + } + + @After + public void tearDown() throws Exception { + CACHE_MANAGER.close(); + CACHE_MANAGER.destroy(); + } + + @Test + public void testCRUD() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + caches.forEach(x -> { + x.put(1L, "The one"); + x.put(2L, "The two"); + x.put(1L, "Another one"); + x.put(3L, "The three"); + x.put(4L, "The four"); + assertThat(x.get(1L), equalTo("Another one")); + assertThat(x.get(2L), equalTo("The two")); + assertThat(x.get(3L), equalTo("The three")); + x.remove(4L); + }); + + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.getClusterControl().terminateActive(); + + caches.forEach(x -> { + assertThat(x.get(1L), equalTo("Another one")); + assertThat(x.get(2L), equalTo("The two")); + assertThat(x.get(3L), equalTo("The three")); + assertThat(x.get(4L), nullValue()); + }); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java index a2ed0c85ea..71f0150aaf 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java @@ -17,16 +17,16 @@ package org.ehcache.clustered.replication; import org.ehcache.PersistentCacheManager; -import org.ehcache.clustered.ClusteredTests; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLock; +import org.ehcache.clustered.util.ParallelTestCluster; +import org.ehcache.clustered.util.runners.Parallel; import org.ehcache.config.builders.CacheManagerBuilder; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; +import org.junit.Rule; import org.junit.Test; -import org.terracotta.testing.rules.Cluster; - -import java.io.File; +import org.junit.runner.RunWith; import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; @@ -34,40 +34,31 @@ import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.units.MemoryUnit.MB; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; -public class BasicLifeCyclePassiveReplicationTest extends ClusteredTests { - private static final String RESOURCE_CONFIG = - "" - + "" - + "16" - + "" + - "\n"; +@RunWith(Parallel.class) +public class BasicLifeCyclePassiveReplicationTest { - @ClassRule - public static Cluster CLUSTER = - newCluster(2).in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); + @ClassRule @Rule + public static final ParallelTestCluster CLUSTER = new ParallelTestCluster(newCluster(2).in(clusterPath()) + .withServerHeap(512) + .withServiceFragment(offheapResource("primary-server-resource", 16)).build()); @Before public void startServers() throws Exception { CLUSTER.getClusterControl().startAllServers(); - CLUSTER.getClusterControl().waitForActive(); - CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); - } - - @After - public void tearDown() throws Exception { - CLUSTER.getClusterControl().terminateActive(); } @Test public void testDestroyCacheManager() throws Exception { CacheManagerBuilder configBuilder = newCacheManagerBuilder().with(cluster(CLUSTER.getConnectionURI().resolve("/destroy-CM")) - .autoCreate().defaultServerResource("primary-server-resource")); + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); PersistentCacheManager cacheManager1 = configBuilder.build(true); PersistentCacheManager cacheManager2 = configBuilder.build(true); @@ -80,8 +71,8 @@ public void testDestroyCacheManager() throws Exception { e.printStackTrace(); } + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().waitForActive(); cacheManager1.createCache("test", newCacheConfigurationBuilder(Long.class, String.class, heap(10).with(clusteredDedicated(10, MB)))); } @@ -94,8 +85,8 @@ public void testDestroyLockEntity() throws Exception { VoltronReadWriteLock lock2 = new VoltronReadWriteLock(CLUSTER.newConnection(), "my-lock"); assertThat(lock2.tryWriteLock(), nullValue()); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().waitForActive(); hold1.unlock(); } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/DuplicateTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/DuplicateTest.java index 47b6c43122..913a8434b5 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/DuplicateTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/DuplicateTest.java @@ -17,7 +17,6 @@ import org.ehcache.Cache; import org.ehcache.PersistentCacheManager; -import org.ehcache.clustered.ClusteredTests; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; @@ -27,50 +26,45 @@ import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.Ehcache; -import org.ehcache.core.internal.resilience.ResilienceStrategy; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.StoreAccessException; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.terracotta.testing.rules.Cluster; -import java.io.File; -import java.lang.reflect.Field; import java.lang.reflect.Proxy; import java.time.Duration; import java.util.Arrays; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.Semaphore; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.fail; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; +import static java.util.concurrent.TimeUnit.MINUTES; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.ehcache.testing.StandardTimeouts.eventually; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assume.assumeThat; -public class DuplicateTest extends ClusteredTests { - - private static final String RESOURCE_CONFIG = - "" - + "" - + "512" - + "" + - "\n"; +public class DuplicateTest { private PersistentCacheManager cacheManager; @ClassRule public static Cluster CLUSTER = - newCluster(2).in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); + newCluster(2).in(clusterPath()) + .withServerHeap(512) + .withServiceFragment(offheapResource("primary-server-resource", 512)).build(); @Before public void startServers() throws Exception { CLUSTER.getClusterControl().startAllServers(); - CLUSTER.getClusterControl().waitForActive(); - CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); } @After @@ -85,45 +79,56 @@ public void tearDown() throws Exception { public void duplicateAfterFailoverAreReturningTheCorrectResponse() throws Exception { CacheManagerBuilder builder = CacheManagerBuilder.newCacheManagerBuilder() .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI()) - .timeouts(TimeoutsBuilder.timeouts().write(Duration.ofSeconds(20))) - .autoCreate() - .defaultServerResource("primary-server-resource")) + .timeouts(TimeoutsBuilder.timeouts().write(Duration.ofSeconds(60))) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))) .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Integer.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 128, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG))); + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 10, MemoryUnit.MB))) + .withResilienceStrategy(failingResilienceStrategy()) + .withService(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG))); cacheManager = builder.build(true); Cache cache = cacheManager.getCache("cache", Integer.class, String.class); - switchResilienceStrategy(cache); int numEntries = 3000; - AtomicInteger currentEntry = new AtomicInteger(); //Perform put operations in another thread ExecutorService executorService = Executors.newSingleThreadExecutor(); try { - Future puts = executorService.submit((Runnable) () -> { - while (true) { - int i = currentEntry.getAndIncrement(); - if (i >= numEntries) { - break; + Semaphore failoverAllowed = new Semaphore(0); + Semaphore failoverComplete = new Semaphore(0); + Future puts = executorService.submit(() -> { + try { + for (int i = 0; i < numEntries; i++) { + if (i == 100) { + failoverAllowed.release(); + } + if (i == (numEntries - 100)) { + failoverComplete.acquire(); + } + cache.put(i, "value:" + i); } - cache.put(i, "value:" + i); + } catch (InterruptedException e) { + throw new AssertionError(e); } }); - while (currentEntry.get() < 100); // wait to make sure some entries are added before shutdown - // Failover to mirror when put & replication are in progress + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + assertThat(failoverAllowed.tryAcquire(1, MINUTES), is(true)); CLUSTER.getClusterControl().terminateActive(); + failoverComplete.release(); - puts.get(30, TimeUnit.SECONDS); + assertThat(puts::isDone, eventually().is(true)); + puts.get(); + + //if failover didn't interrupt puts then the test is 'moot' + assumeThat(cache.get(0), is(notNullValue())); //Verify cache entries on mirror for (int i = 0; i < numEntries; i++) { - assertThat(cache.get(i)).isEqualTo("value:" + i); + assertThat(cache.get(i), is("value:" + i)); } } finally { executorService.shutdownNow(); @@ -131,19 +136,25 @@ public void duplicateAfterFailoverAreReturningTheCorrectResponse() throws Except } - private void switchResilienceStrategy(Cache cache) throws Exception { - Field field = Ehcache.class.getDeclaredField("resilienceStrategy"); - field.setAccessible(true); - ResilienceStrategy newResilienceStrategy = (ResilienceStrategy) - Proxy.newProxyInstance(cache.getClass().getClassLoader(), + @SuppressWarnings("unchecked") + private ResilienceStrategy failingResilienceStrategy() { + return (ResilienceStrategy) + Proxy.newProxyInstance(getClass().getClassLoader(), new Class[] { ResilienceStrategy.class}, (proxy, method, args) -> { - System.out.println("In there!!!!!!!!!!!!!!!!!!!!!!!!!"); - fail("Failure on " + method.getName(), findStoreAccessException(args)); // 1 is always the exception - return null; - }); + if(method.getName().endsWith("Failure")) { + throw new AssertionError("Failure on " + method.getName(), findStoreAccessException(args)); // one param is always a SAE + } - field.set(cache, newResilienceStrategy); + switch(method.getName()) { + case "hashCode": + return 0; + case "equals": + return proxy == args[0]; + default: + throw new AssertionError("Unexpected method call: " + method.getName()); + } + }); } private StoreAccessException findStoreAccessException(Object[] objects) { @@ -152,7 +163,6 @@ private StoreAccessException findStoreAccessException(Object[] objects) { return (StoreAccessException) o; } } - fail("There should be an exception somewhere in " + Arrays.toString(objects)); - return null; + throw new AssertionError("There should be an exception somewhere in " + Arrays.toString(objects)); } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/OversizedCacheOpsPassiveTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/OversizedCacheOpsPassiveTest.java new file mode 100644 index 0000000000..f051d34388 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/OversizedCacheOpsPassiveTest.java @@ -0,0 +1,114 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.replication; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.Cluster; + +import java.util.Arrays; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CountDownLatch; +import org.junit.Ignore; + +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; + + +/** + * Test the effect of cache eviction during passive sync. + */ +@Ignore("OOME on build slaves due to high memory requirements") +public class OversizedCacheOpsPassiveTest { + private static final int MAX_PUTS = 3000; + private static final int MAX_SWITCH_OVER = 3; + private static final int PER_ELEMENT_SIZE = 256 * 1024; + private static final int CACHE_SIZE_IN_MB = 2; + private static final String LARGE_VALUE = buildLargeString(); + + @ClassRule + public static Cluster CLUSTER = + newCluster(2).in(clusterPath()) + .withSystemProperty("ehcache.sync.data.gets.threshold", "2") + .withServiceFragment(offheapResource("primary-server-resource", 2)) + .withServerHeap(2048) + .build(); + + @Test + public void oversizedPuts() throws Exception { + CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm")) + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); + CountDownLatch syncLatch = new CountDownLatch(2); + + CompletableFuture f1 = CompletableFuture.runAsync(() -> doPuts(clusteredCacheManagerBuilder, syncLatch)); + CompletableFuture f2 = CompletableFuture.runAsync(() -> doPuts(clusteredCacheManagerBuilder, syncLatch)); + + syncLatch.await(); + for (int i = 0; i < MAX_SWITCH_OVER; i++) { + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.getClusterControl().terminateActive(); + CLUSTER.getClusterControl().waitForActive(); + CLUSTER.getClusterControl().startOneServer(); + Thread.sleep(2000); + } + + f1.get(); + f2.get(); + } + + private void doPuts(CacheManagerBuilder clusteredCacheManagerBuilder, + CountDownLatch syncLatch) { + try (PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", CACHE_SIZE_IN_MB, MemoryUnit.MB))) + .build(); + + syncLatch.countDown(); + Cache cache = cacheManager.createCache("clustered-cache", config); + for (long i = 0; i < MAX_PUTS; i++) { + if (i % 1000 == 0) { + // a small pause + try { + Thread.sleep(10); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + } + cache.put(i, LARGE_VALUE); + } + } + } + + private static String buildLargeString() { + char[] filler = new char[PER_ELEMENT_SIZE]; + Arrays.fill(filler, '0'); + return new String(filler); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java index 6f27031f6d..54b3f1635a 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java @@ -18,7 +18,6 @@ import org.ehcache.Cache; import org.ehcache.PersistentCacheManager; -import org.ehcache.clustered.ClusteredTests; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; import org.ehcache.config.CacheConfiguration; @@ -32,45 +31,37 @@ import org.junit.Test; import org.terracotta.testing.rules.Cluster; -import java.io.File; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; -import static com.google.code.tempusfugit.temporal.Duration.seconds; -import static com.google.code.tempusfugit.temporal.Timeout.timeout; -import static com.google.code.tempusfugit.temporal.WaitFor.waitOrTimeout; +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.ehcache.testing.StandardTimeouts.eventually; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; -import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; +import static org.hamcrest.Matchers.notNullValue; -public class PassiveSyncTest extends ClusteredTests { - private static final String RESOURCE_CONFIG = - "" - + "" - + "16" - + "" + - "\n"; + +public class PassiveSyncTest { @ClassRule - public static Cluster CLUSTER = - newCluster(2).in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); + public static Cluster CLUSTER = newCluster(2).in(clusterPath()) + .withServiceFragment(offheapResource("primary-server-resource", 16)) + .withServerHeap(512) + .build(); @Before public void startServers() throws Exception { - CLUSTER.getClusterControl().startAllServers(); - CLUSTER.getClusterControl().waitForActive(); - CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.getClusterControl().terminateOnePassive(); } @Test(timeout = 150000) public void testSync() throws Exception { - CLUSTER.getClusterControl().terminateOnePassive(); - final CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/op-sync")) - .autoCreate() - .defaultServerResource("primary-server-resource")); + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); cacheManager.init(); @@ -88,12 +79,10 @@ public void testSync() throws Exception { CLUSTER.getClusterControl().startOneServer(); CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().waitForActive(); - // Sometimes the new passive believes there is a second connection and we have to wait for the full reconnect window before getting a result - waitOrTimeout(() -> "value-5".equals(cache.get(-5L)), timeout(seconds(130))); - for (long i = -4; i < 5; i++) { + assertThat(() -> cache.get(0L), eventually().matches(notNullValue())); + for (long i = -5; i < 5; i++) { assertThat(cache.get(i), equalTo("value" + i)); } } finally { @@ -104,13 +93,10 @@ public void testSync() throws Exception { @Ignore @Test public void testLifeCycleOperationsOnSync() throws Exception { - CLUSTER.getClusterControl().terminateOnePassive(); - final CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/lifecycle-sync")) - .autoCreate() - .defaultServerResource("primary-server-resource")); + .autoCreate(server -> server.defaultServerResource("primary-server-resource"))); try (PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true)) { CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/util/BeforeAll.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/BeforeAll.java new file mode 100644 index 0000000000..5b261dbc3c --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/BeforeAll.java @@ -0,0 +1,32 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.util; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Port of Junit 5 @BeforeAll + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +@Inherited +public @interface BeforeAll { +} + diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/util/BeforeAllRule.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/BeforeAllRule.java new file mode 100644 index 0000000000..9267fe7c5d --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/BeforeAllRule.java @@ -0,0 +1,61 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.util; + +import org.junit.rules.ExternalResource; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +import java.lang.reflect.Method; +import java.util.Comparator; +import java.util.List; +import java.util.WeakHashMap; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * @author Mathieu Carbou + */ +public class BeforeAllRule extends ExternalResource { + + private static WeakHashMap, Boolean> ran = new WeakHashMap<>(); + + private final Object test; + + public BeforeAllRule(Object test) { + this.test = test; + } + + @Override + public Statement apply(Statement base, Description description) { + ran.putIfAbsent(description.getTestClass(), Boolean.FALSE); + return super.apply(base, description); + } + + @Override + protected void before() throws Throwable { + if (ran.replace(test.getClass(), Boolean.FALSE, Boolean.TRUE)) { + List list = Stream.of(test.getClass().getMethods()) + .filter(m -> m.isAnnotationPresent(BeforeAll.class)) + .sorted(Comparator.comparing(Method::getName)) + .collect(Collectors.toList()); + for (Method method : list) { + method.invoke(test); + } + } + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/util/ParallelTestCluster.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/ParallelTestCluster.java new file mode 100644 index 0000000000..6fb37df56a --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/ParallelTestCluster.java @@ -0,0 +1,182 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.util; + +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; +import org.terracotta.connection.Connection; +import org.terracotta.connection.ConnectionException; +import org.terracotta.passthrough.IClusterControl; +import org.terracotta.testing.rules.Cluster; + +import java.net.URI; +import java.util.concurrent.Phaser; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +public class ParallelTestCluster implements TestRule { + + private final Cluster cluster; + private final IClusterControl control; + private final AtomicReference nextTask = new AtomicReference<>(); + + private final Phaser membership = new Phaser() { + @Override + protected boolean onAdvance(int phase, int registeredParties) { + activeCycle.bulkRegister(registeredParties); + return false; + } + }; + private final Phaser activeCycle = new Phaser() { + @Override + protected boolean onAdvance(int phase, int registeredParties) { + return false; + } + }; + + public ParallelTestCluster(Cluster cluster) { + this.cluster = cluster; + + IClusterControl underlyingControl = cluster.getClusterControl(); + this.control = new IClusterControl() { + @Override + public void waitForActive() throws Exception { + underlyingControl.waitForActive(); + } + + @Override + public void waitForRunningPassivesInStandby() throws Exception { + underlyingControl.waitForRunningPassivesInStandby(); + } + + @Override + public void startOneServer() { + request(ClusterTask.START_ONE_SERVER); + } + + @Override + public void startAllServers() { + request(ClusterTask.START_ALL_SERVERS); + } + + @Override + public void terminateActive() { + request(ClusterTask.TERMINATE_ACTIVE); + } + + @Override + public void terminateOnePassive() { + request(ClusterTask.TERMINATE_ONE_PASSIVE); + } + + @Override + public void terminateAllServers() { + request(ClusterTask.TERMINATE_ALL_SERVERS); + } + + private void request(ClusterTask task) { + try { + if (nextTask.compareAndSet(null, task)) { + activeCycle.awaitAdvanceInterruptibly(activeCycle.arrive()); + nextTask.getAndSet(null).accept(underlyingControl); + activeCycle.awaitAdvanceInterruptibly(activeCycle.arrive()); + } else { + ClusterTask requestedTask = nextTask.get(); + if (requestedTask.equals(task)) { + activeCycle.awaitAdvanceInterruptibly(activeCycle.arrive()); + activeCycle.awaitAdvanceInterruptibly(activeCycle.arrive()); + } else { + throw new AssertionError("Existing requested task is " + requestedTask); + } + } + } catch (InterruptedException e) { + throw new AssertionError(e); + } + } + }; + } + + public URI getConnectionURI() { + return cluster.getConnectionURI(); + } + + public String[] getClusterHostPorts() { + return cluster.getClusterHostPorts(); + } + + public Connection newConnection() throws ConnectionException { + return cluster.newConnection(); + } + + public IClusterControl getClusterControl() { + return control; + } + + @Override + public Statement apply(Statement base, Description description) { + if (description.isSuite()) { + return cluster.apply(base, description); + } else if (description.isTest()) { + return new Statement() { + @Override + public void evaluate() throws Throwable { + membership.register(); + Thread.sleep(100); + membership.awaitAdvanceInterruptibly(membership.arrive()); + try { + activeCycle.awaitAdvanceInterruptibly(activeCycle.arrive()); + try { + base.evaluate(); + } finally { + activeCycle.arriveAndDeregister(); + } + } finally { + membership.arriveAndDeregister(); + } + } + }; + } else { + return base; + } + } + + enum ClusterTask implements Consumer { + START_ONE_SERVER(IClusterControl::startOneServer), + START_ALL_SERVERS(IClusterControl::startAllServers), + TERMINATE_ACTIVE(IClusterControl::terminateActive), + TERMINATE_ONE_PASSIVE(IClusterControl::terminateOnePassive), + TERMINATE_ALL_SERVERS(IClusterControl::terminateAllServers); + + private final Task task; + + ClusterTask(Task task) { + this.task = task; + } + + public void accept(IClusterControl control) { + try { + task.run(control); + } catch (Exception e) { + throw new AssertionError(e); + } + } + + interface Task { + void run(IClusterControl control) throws Exception; + } + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/util/TCPProxyManager.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/TCPProxyManager.java new file mode 100644 index 0000000000..66bdea9571 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/TCPProxyManager.java @@ -0,0 +1,117 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.util; + +import com.tc.net.proxy.TCPProxy; + +import org.terracotta.utilities.test.net.PortManager; + +import java.net.InetAddress; +import java.net.URI; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import static java.util.Collections.unmodifiableList; + +/** + * Manages creation and use of {@link TCPProxy} instances for a collection of Terracotta servers. + */ +public class TCPProxyManager implements AutoCloseable { + private static final String STRIPE_SEPARATOR = ","; + + private final List proxyPorts; + private final List proxies; + + private TCPProxyManager(List proxyPorts, List proxies) { + this.proxyPorts = unmodifiableList(proxyPorts); + this.proxies = unmodifiableList(proxies); + } + + /** + * Creates a new {@code TCPProxyManager} instance holding {@link TCPProxy} instances for + * the endpoints represented in the specified connection URI. + *

+ * This method creates {@code TCPProxy} instances for servers on {@code localhost}. + *

+ * A reference to the returned {@code TCPProxyManager} must be maintained for the duration + * of the use of the {@code TCPProxy} instances contained therein -- allowing the + * {@code TCPProxyManager} to become weakly-referenced while the proxies are in + * active use could result in random connection-related failures. + * + * @param connectionURI the {@code terracotta} connection URI + * @return a new {@code TCPProxyManager} instance with a {@code TCPProxy} for each endpoint + * @throws Exception if + */ + public static TCPProxyManager create(URI connectionURI) throws Exception { + List primaryPorts = parsePorts(connectionURI); + List proxyPorts = PortManager.getInstance().reservePorts(primaryPorts.size()); + + List proxies = new ArrayList<>(primaryPorts.size()); + InetAddress host = InetAddress.getByName("localhost"); + try { + for (int i = 0; i < primaryPorts.size(); i++) { + TCPProxy proxy = new TCPProxy(proxyPorts.get(i).port(), host, primaryPorts.get(i), 0L, false, null); + proxies.add(proxy); + proxy.start(); + } + } catch (Exception e) { + proxyPorts.forEach(PortManager.PortRef::close); + throw e; + } + + return new TCPProxyManager(proxyPorts, proxies); + } + + /** + * Returns the URI to use for the proxy connection to the Terracotta cluster. + * @return the URI for connection to the Terracotta cluster via the allocated {@link TCPProxy} instances + */ + public URI getURI() { + String uri = proxyPorts.stream() + .map(portRef -> "localhost:" + portRef.port()) + .collect(Collectors.joining(STRIPE_SEPARATOR, "terracotta://", "")); + + return URI.create(uri); + } + + /** + * Sets the delay for each allocated {@link TCPProxy} instance. + * @param delay the non-negative delay + */ + public void setDelay(long delay) { + proxies.forEach(p -> p.setDelay(delay)); + } + + /** + * Stops each allocated {@link TCPProxy} instance and releases the allocated proxy ports. + */ + @Override + public void close() { + proxies.forEach(TCPProxy::stop); + proxyPorts.forEach(PortManager.PortRef::close); + } + + private static List parsePorts(URI connectionURI) { + String withoutProtocol = connectionURI.toString().substring(13); + return Arrays.stream(withoutProtocol.split(STRIPE_SEPARATOR)) + .map(stripe -> stripe.substring(stripe.indexOf(":") + 1)) + .mapToInt(Integer::parseInt) + .boxed() + .collect(Collectors.toList()); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/util/TestCacheLoaderWriter.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/TestCacheLoaderWriter.java new file mode 100644 index 0000000000..26d7f691ff --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/TestCacheLoaderWriter.java @@ -0,0 +1,44 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.util; + +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; + +import java.util.concurrent.ConcurrentMap; + +public class TestCacheLoaderWriter implements CacheLoaderWriter { + + private final ConcurrentMap sor; + + public TestCacheLoaderWriter(ConcurrentMap sor) { + this.sor = sor; + } + + @Override + public String load(Long key) throws Exception { + return sor.get(key); + } + + @Override + public void write(Long key, String value) throws Exception { + sor.put(key, value); + } + + @Override + public void delete(Long key) throws Exception { + sor.remove(key); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/util/runners/ExecutorScheduler.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/runners/ExecutorScheduler.java new file mode 100644 index 0000000000..6f16641557 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/runners/ExecutorScheduler.java @@ -0,0 +1,62 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.util.runners; + +import org.junit.runners.model.RunnerScheduler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; + +public class ExecutorScheduler implements RunnerScheduler { + + private static final Logger LOGGER = LoggerFactory.getLogger(ExecutorScheduler.class); + + public final Supplier executorSupplier; + public final AtomicReference executor = new AtomicReference<>(); + + public ExecutorScheduler(Supplier executorSupplier) { + this.executorSupplier = executorSupplier; + } + + @Override + public void schedule(Runnable childStatement) { + ExecutorService executorService; + while ((executorService = executor.get()) == null && !executor.compareAndSet(null, (executorService = executorSupplier.get()))) { + executorService.shutdown(); + } + executorService.execute(childStatement); + } + + @Override + public void finished() { + ExecutorService departing = executor.getAndSet(null); + departing.shutdown(); + try { + if (!departing.awaitTermination(1, TimeUnit.DAYS)) { + throw new AssertionError(new TimeoutException()); + } + } catch (InterruptedException e) { + List runnables = departing.shutdownNow(); + LOGGER.warn("Forcibly terminating execution of scheduled test tasks due to interrupt (" + runnables.size() + " tasks remain unscheduled)"); + } + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/util/runners/Parallel.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/runners/Parallel.java new file mode 100644 index 0000000000..2876eeaffd --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/runners/Parallel.java @@ -0,0 +1,29 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.util.runners; + +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.model.InitializationError; + +import static java.util.concurrent.Executors.newCachedThreadPool; + +public class Parallel extends BlockJUnit4ClassRunner { + + public Parallel(Class klass) throws InitializationError { + super(klass); + setScheduler(new ExecutorScheduler(() -> newCachedThreadPool(r -> new Thread(r, "TestRunner-Thread-" + klass)))); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/util/runners/ParallelParameterized.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/runners/ParallelParameterized.java new file mode 100644 index 0000000000..3c64e2ba32 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/util/runners/ParallelParameterized.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.util.runners; + +import org.junit.runners.Parameterized; +import org.junit.runners.ParentRunner; + +import static java.util.concurrent.Executors.newCachedThreadPool; + +public class ParallelParameterized extends Parameterized { + + public ParallelParameterized(Class klass) throws Throwable { + super(klass); + setScheduler(new ExecutorScheduler(() -> newCachedThreadPool(r -> new Thread(r, "TestRunner-Thread-" + klass)))); + getChildren().forEach(child -> { + if (child instanceof ParentRunner) { + ((ParentRunner) child).setScheduler(new ExecutorScheduler(() -> newCachedThreadPool(r -> new Thread(r, "TestRunner-Thread-" + r.toString())))); + } + }); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/BasicClusteredWriteBehindMultiClientTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/BasicClusteredWriteBehindMultiClientTest.java new file mode 100644 index 0000000000..7f30482369 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/BasicClusteredWriteBehindMultiClientTest.java @@ -0,0 +1,87 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.writebehind; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.Cluster; + +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; + + +public class BasicClusteredWriteBehindMultiClientTest extends WriteBehindTestBase { + + @ClassRule + public static Cluster CLUSTER = + newCluster().in(clusterPath()).withServiceFragment(RESOURCE_CONFIG).build(); + + private PersistentCacheManager cacheManager1; + private PersistentCacheManager cacheManager2; + + private Cache client1; + private Cache client2; + + @Before + public void setUp() throws Exception { + super.setUp(); + + CLUSTER.getClusterControl().startAllServers(); + + cacheManager1 = createCacheManager(CLUSTER.getConnectionURI()); + cacheManager2 = createCacheManager(CLUSTER.getConnectionURI()); + + client1 = cacheManager1.getCache(testName.getMethodName(), Long.class, String.class); + client2 = cacheManager2.getCache(testName.getMethodName(), Long.class, String.class); + } + + @After + public void tearDown() throws Exception { + if (cacheManager1 != null) { + cacheManager1.close(); + } + + if (cacheManager2 != null) { + cacheManager2.close(); + cacheManager2.destroy(); + } + } + + @Test + public void testWriteBehindMultipleClients() throws Exception { + client1.put(KEY, "The one from client1"); + client2.put(KEY, "The one from client2"); + assertValue(client1, "The one from client2"); + client1.remove(KEY); + client2.put(KEY, "The one from client2"); + client1.put(KEY, "The one from client1"); + assertValue(client2, "The one from client1"); + client2.remove(KEY); + assertValue(client1, null); + client1.put(KEY, "The one from client1"); + client1.put(KEY, "The one one from client1"); + client2.remove(KEY); + client2.put(KEY, "The one from client2"); + client2.put(KEY, "The one one from client2"); + + checkValueFromLoaderWriter(client1, "The one one from client2"); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/BasicClusteredWriteBehindTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/BasicClusteredWriteBehindTest.java new file mode 100644 index 0000000000..69d8d7234b --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/BasicClusteredWriteBehindTest.java @@ -0,0 +1,130 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.writebehind; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.Timeout; +import org.terracotta.testing.rules.Cluster; + +import java.lang.management.ManagementFactory; +import java.lang.management.ThreadInfo; +import java.lang.management.ThreadMXBean; +import java.util.Arrays; +import java.util.concurrent.TimeUnit; + +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.notNullValue; + + +public class BasicClusteredWriteBehindTest extends WriteBehindTestBase { + + @Rule + public Timeout timeout = new Timeout(1, TimeUnit.MINUTES); + + private boolean doThreadDump = true; + + @ClassRule + public static Cluster CLUSTER = + newCluster().in(clusterPath()).withServiceFragment(RESOURCE_CONFIG).build(); + + private PersistentCacheManager cacheManager; + private Cache cache; + + @Before + public void setUp() throws Exception { + super.setUp(); + + CLUSTER.getClusterControl().startAllServers(); + + cacheManager = createCacheManager(CLUSTER.getConnectionURI()); + cache = cacheManager.getCache(testName.getMethodName(), Long.class, String.class); + } + + @After + public void tearDown() throws Exception { + if (doThreadDump) { + System.out.println("Performing thread dump"); + ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean(); + ThreadInfo[] threadInfos = threadMXBean.dumpAllThreads(true, true); + Arrays.stream(threadInfos).forEach(System.out::println); + } + + if (cacheManager != null) { + cacheManager.close(); + cacheManager.destroy(); + } + } + + @Test + public void testBasicClusteredWriteBehind() throws Exception { + for (int i = 0; i < 10; i++) { + cache.put(KEY, String.valueOf(i)); + } + + assertValue(cache, "9"); + + checkValueFromLoaderWriter(cache, "9"); + + doThreadDump = false; + } + + @Test + public void testClusteredWriteBehindCAS() throws Exception { + cache.putIfAbsent(KEY, "First value"); + assertValue(cache,"First value"); + cache.putIfAbsent(KEY, "Second value"); + assertValue(cache, "First value"); + cache.put(KEY, "First value again"); + assertValue(cache, "First value again"); + cache.replace(KEY, "Replaced First value"); + assertValue(cache, "Replaced First value"); + cache.replace(KEY, "Replaced First value", "Replaced First value again"); + assertValue(cache, "Replaced First value again"); + cache.replace(KEY, "Replaced First", "Tried Replacing First value again"); + assertValue(cache, "Replaced First value again"); + cache.remove(KEY, "Replaced First value again"); + assertValue(cache, null); + cache.replace(KEY, "Trying to replace value"); + assertValue(cache, null); + cache.put(KEY, "new value"); + assertValue(cache, "new value"); + cache.remove(KEY); + + checkValueFromLoaderWriter(cache, null); + + doThreadDump = false; + } + + @Test + public void testClusteredWriteBehindLoading() throws Exception { + cache.put(KEY, "Some value"); + checkValueFromLoaderWriter(cache, "Some value"); + cache.clear(); + + assertThat(cache.get(KEY), notNullValue()); + + doThreadDump = false; + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/BasicClusteredWriteBehindWithPassiveMultiClientTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/BasicClusteredWriteBehindWithPassiveMultiClientTest.java new file mode 100644 index 0000000000..0f17ec636b --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/BasicClusteredWriteBehindWithPassiveMultiClientTest.java @@ -0,0 +1,96 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.writebehind; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.Cluster; + +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; + + +public class BasicClusteredWriteBehindWithPassiveMultiClientTest extends WriteBehindTestBase { + + @ClassRule + public static Cluster CLUSTER = + newCluster(2).in(clusterPath()).withServiceFragment(RESOURCE_CONFIG).build(); + + private PersistentCacheManager cacheManager1; + private PersistentCacheManager cacheManager2; + + private Cache client1; + private Cache client2; + + @Before + public void setUp() throws Exception { + super.setUp(); + + CLUSTER.getClusterControl().startAllServers(); + + cacheManager1 = createCacheManager(CLUSTER.getConnectionURI()); + cacheManager2 = createCacheManager(CLUSTER.getConnectionURI()); + + client1 = cacheManager1.getCache(testName.getMethodName(), Long.class, String.class); + client2 = cacheManager2.getCache(testName.getMethodName(), Long.class, String.class); + } + + @After + public void tearDown() throws Exception { + if (cacheManager1 != null) { + cacheManager1.close(); + } + + if (cacheManager2 != null) { + cacheManager2.close(); + cacheManager2.destroy(); + } + } + + @Test + public void testWriteBehindMultipleClients() throws Exception { + client1.put(KEY, "The one from client1"); + client2.put(KEY, "The one from client2"); + assertValue(client1, "The one from client2"); + client1.remove(KEY); + client2.put(KEY, "The one from client2"); + client1.put(KEY, "The one from client1"); + assertValue(client2, "The one from client1"); + client2.remove(KEY); + assertValue(client1, null); + client1.put(KEY, "The one from client1"); + client1.put(KEY, "The one one from client1"); + assertValue(client2, "The one one from client1"); + client2.remove(KEY); + assertValue(client1, null); + client2.put(KEY, "The one from client2"); + client2.put(KEY, "The one one from client2"); + assertValue(client1, "The one one from client2"); + + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.getClusterControl().terminateActive(); + + assertValue(client1, "The one one from client2"); + assertValue(client2, "The one one from client2"); + + checkValueFromLoaderWriter(client1, "The one one from client2"); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/BasicClusteredWriteBehindWithPassiveTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/BasicClusteredWriteBehindWithPassiveTest.java new file mode 100644 index 0000000000..d7a12d4bd0 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/BasicClusteredWriteBehindWithPassiveTest.java @@ -0,0 +1,104 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.writebehind; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.util.ParallelTestCluster; +import org.ehcache.clustered.util.runners.Parallel; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; + +import static org.ehcache.testing.StandardCluster.clusterPath; +import static org.ehcache.testing.StandardCluster.newCluster; + + +@RunWith(Parallel.class) +public class BasicClusteredWriteBehindWithPassiveTest extends WriteBehindTestBase { + + @ClassRule @Rule + public static final ParallelTestCluster CLUSTER = new ParallelTestCluster( + newCluster(2).in(clusterPath()).withServiceFragment(RESOURCE_CONFIG).build() + ); + + private PersistentCacheManager cacheManager; + private Cache cache; + + @Before + public void setUp() throws Exception { + super.setUp(); + + CLUSTER.getClusterControl().startAllServers(); + + cacheManager = createCacheManager(CLUSTER.getConnectionURI()); + cache = cacheManager.getCache(testName.getMethodName(), Long.class, String.class); + } + + @After + public void tearDown() throws Exception { + if (cacheManager != null) { + cacheManager.close(); + } + } + + @Test + public void testBasicClusteredWriteBehind() throws Exception { + for (int i = 0; i < 10; i++) { + cache.put(KEY, String.valueOf(i)); + } + + assertValue(cache, "9"); + + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.getClusterControl().terminateActive(); + + assertValue(cache, "9"); + checkValueFromLoaderWriter(cache, String.valueOf(9)); + } + + @Test + public void testClusteredWriteBehindCAS() throws Exception { + cache.putIfAbsent(KEY, "First value"); + assertValue(cache,"First value"); + cache.putIfAbsent(KEY, "Second value"); + assertValue(cache, "First value"); + cache.put(KEY, "First value again"); + assertValue(cache, "First value again"); + cache.replace(KEY, "Replaced First value"); + assertValue(cache, "Replaced First value"); + cache.replace(KEY, "Replaced First value", "Replaced First value again"); + assertValue(cache, "Replaced First value again"); + cache.replace(KEY, "Replaced First", "Tried Replacing First value again"); + assertValue(cache, "Replaced First value again"); + cache.remove(KEY, "Replaced First value again"); + assertValue(cache, null); + cache.replace(KEY, "Trying to replace value"); + assertValue(cache, null); + cache.put(KEY, "new value"); + assertValue(cache, "new value"); + + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.getClusterControl().terminateActive(); + + assertValue(cache, "new value"); + checkValueFromLoaderWriter(cache,"new value"); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/RecordingLoaderWriter.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/RecordingLoaderWriter.java new file mode 100644 index 0000000000..c24a9736f6 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/RecordingLoaderWriter.java @@ -0,0 +1,68 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.writebehind; + +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class RecordingLoaderWriter implements CacheLoaderWriter { + + private final Map> records = new HashMap<>(); + + @Override + public synchronized V load(K key) { + List list = records.get(key); + return list == null ? null : list.get(list.size() - 1); + } + + @Override + public synchronized void write(K key, V value) { + record(key, value); + } + + @Override + public synchronized void delete(K key) { + record(key, null); + } + + @Override + public synchronized Map loadAll(Iterable keys) throws Exception { + return CacheLoaderWriter.super.loadAll(keys); + } + + @Override + public void writeAll(Iterable> entries) throws Exception { + CacheLoaderWriter.super.writeAll(entries); + } + + @Override + public void deleteAll(Iterable keys) throws Exception { + CacheLoaderWriter.super.deleteAll(keys); + } + + private void record(K key, V value) { + records.computeIfAbsent(key, k -> new ArrayList<>()).add(value); + } + + synchronized Map> getRecords() { + return Collections.unmodifiableMap(records); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/WriteBehindTestBase.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/WriteBehindTestBase.java new file mode 100644 index 0000000000..9546a6bf96 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/writebehind/WriteBehindTestBase.java @@ -0,0 +1,115 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.writebehind; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.builders.WriteBehindConfigurationBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.resilience.ThrowingResilienceStrategy; +import org.junit.Before; +import org.junit.Rule; +import org.junit.rules.TestName; + +import java.net.URI; +import java.time.Duration; +import java.util.List; +import java.util.Map; + +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.testing.StandardCluster.offheapResource; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +public class WriteBehindTestBase { + + static final String RESOURCE_CONFIG = offheapResource("primary-server-resource", 64); + + static final long KEY = 1L; + + private static final String FLUSH_QUEUE_MARKER = "FLUSH_QUEUE"; + + @Rule + public final TestName testName = new TestName(); + + private RecordingLoaderWriter loaderWriter; + + @Before + public void setUp() throws Exception { + loaderWriter = new RecordingLoaderWriter<>(); + } + + void checkValueFromLoaderWriter(Cache cache, + String expected) throws Exception { + tryFlushingUpdatesToSOR(cache); + + Map> records = loaderWriter.getRecords(); + List keyRecords = records.get(KEY); + + int index = keyRecords.size() - 1; + while (index >= 0 && keyRecords.get(index) != null && keyRecords.get(index).equals(FLUSH_QUEUE_MARKER)) { + index--; + } + + assertThat(keyRecords.get(index), is(expected)); + } + + private void tryFlushingUpdatesToSOR(Cache cache) throws Exception { + int retryCount = 1000; + while (retryCount-- != 0) { + cache.put(KEY, FLUSH_QUEUE_MARKER); + Thread.sleep(100); + String loadedValue = loaderWriter.load(KEY); + if (loadedValue != null && loadedValue.equals(FLUSH_QUEUE_MARKER)) { + return; + } + } + throw new AssertionError("Couldn't flush updates to SOR after " + retryCount + " tries"); + } + + void assertValue(Cache cache, String value) { + assertThat(cache.get(KEY), is(value)); + } + + PersistentCacheManager createCacheManager(URI clusterUri) { + CacheConfiguration cacheConfiguration = + newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) + .withLoaderWriter(loaderWriter) + .withService(WriteBehindConfigurationBuilder.newUnBatchedWriteBehindConfiguration()) + .withResilienceStrategy(new ThrowingResilienceStrategy<>()) + .withService(new ClusteredStoreConfiguration(Consistency.STRONG)) + .build(); + + return CacheManagerBuilder + .newCacheManagerBuilder() + .with(cluster(clusterUri.resolve("/cm-wb")).timeouts(TimeoutsBuilder.timeouts().read(Duration.ofMinutes(1)).write(Duration.ofMinutes(1))).autoCreate(c -> c)) + .withCache(testName.getMethodName(), cacheConfiguration) + .build(true); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/testing/ExternalTests.java b/clustered/integration-test/src/test/java/org/ehcache/testing/ExternalTests.java new file mode 100644 index 0000000000..593a907158 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/testing/ExternalTests.java @@ -0,0 +1,191 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.testing; + +import org.junit.runner.Description; +import org.junit.runner.Request; +import org.junit.runner.manipulation.Filter; +import org.junit.runner.notification.RunNotifier; +import org.junit.runners.ParentRunner; +import org.junit.runners.model.InitializationError; +import org.junit.runners.model.TestClass; + +import java.io.IOException; +import java.io.InputStream; +import java.lang.annotation.Annotation; +import java.lang.annotation.Repeatable; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.reflect.Modifier; +import java.net.URL; +import java.util.ArrayList; +import java.util.List; +import java.util.jar.JarEntry; +import java.util.jar.JarInputStream; +import java.util.stream.Collectors; + +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; + +public class ExternalTests extends ParentRunner { + + private final List children; + + public ExternalTests(Class testClass) throws InitializationError, IOException, ClassNotFoundException { + super(testClass); + this.children = singletonList(parseRequest(getTestClass(), parseFilter(getTestClass()))); + } + + @Override + protected List getChildren() { + return children; + } + + @Override + protected Description describeChild(Request child) { + return child.getRunner().getDescription(); + } + + @Override + protected void runChild(Request child, RunNotifier notifier) { + child.getRunner().run(notifier); + } + + private static Filter parseFilter(TestClass testClass) { + return groupAnnotations(testClass, Ignore.class, Ignores.class).stream().map(IgnoreFilter::ignore).reduce(Filter.ALL, Filter::intersect); + } + + private static class IgnoreFilter extends Filter { + + private final Ignore ignore; + + public static Filter ignore(Ignore ignore) { + return new IgnoreFilter(ignore); + } + + private IgnoreFilter(Ignore ignore) { + this.ignore = ignore; + } + + @Override + public boolean shouldRun(Description description) { + if (ignore.value().equals(description.getTestClass())) { + if (ignore.method().isEmpty()) { + return false; + } else { + return !ignore.method().equals(description.getMethodName()); + } + } else { + return true; + } + } + + @Override + public String describe() { + if (ignore.method().isEmpty()) { + return "Ignore " + ignore.value(); + } else { + return "Ignore " + ignore.value() + "#" + ignore.method(); + } + } + } + + private static Request parseRequest(TestClass testClass, Filter filter) throws IOException, ClassNotFoundException { + List froms = groupAnnotations(testClass, From.class, Froms.class); + List tests = groupAnnotations(testClass, Test.class, Tests.class); + + List> classes = new ArrayList<>(); + + for (From from : froms) { + URL location = from.value().getProtectionDomain().getCodeSource().getLocation(); + try (InputStream is = location.openStream(); JarInputStream jis = new JarInputStream(is)) { + while (true) { + JarEntry entry = jis.getNextJarEntry(); + if (entry == null) { + break; + } else if (entry.getName().endsWith("Test.class")) { + classes.add(Class.forName(entry.getName().replace(".class", "").replace('/', '.'))); + } + } + } + } + for (Test test : tests) { + classes.add(test.value()); + } + + return Request.classes(classes.stream() + .filter(c -> Modifier.isPublic(c.getModifiers()) && !Modifier.isAbstract(c.getModifiers())) + .filter(c -> !c.getSimpleName().startsWith("Abstract")).toArray(Class[]::new)) + .filterWith(filter); + + } + + @SuppressWarnings("unchecked") + private static List groupAnnotations(TestClass testClass, Class annoType, Class wrapperType) { + try { + List annotations = new ArrayList<>(); + + WT testsAnn = testClass.getAnnotation(wrapperType); + if (testsAnn != null) { + annotations.addAll(asList((T[]) wrapperType.getMethod("value").invoke(testsAnn))); + } + + T singularAnn = testClass.getAnnotation(annoType); + if (singularAnn != null) { + annotations.add(singularAnn); + } + return annotations; + } catch (ReflectiveOperationException e) { + throw new IllegalArgumentException(e); + } + } + + + @Retention(RetentionPolicy.RUNTIME) + @Repeatable(Tests.class) + public @interface Test { + + Class value(); + } + @Retention(RetentionPolicy.RUNTIME) + public @interface Tests { + Test[] value(); + } + + @Retention(RetentionPolicy.RUNTIME) + @Repeatable(Froms.class) + public @interface From { + + Class value(); + } + @Retention(RetentionPolicy.RUNTIME) + public @interface Froms { + From[] value(); + } + + @Retention(RetentionPolicy.RUNTIME) + @Repeatable(Ignores.class) + public @interface Ignore { + Class value(); + + String method() default ""; + } + + @Retention(RetentionPolicy.RUNTIME) + public @interface Ignores { + Ignore[] value(); + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/testing/StandardCluster.java b/clustered/integration-test/src/test/java/org/ehcache/testing/StandardCluster.java new file mode 100644 index 0000000000..199c2691ac --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/testing/StandardCluster.java @@ -0,0 +1,64 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.testing; + +import org.terracotta.testing.config.ConfigRepoStartupBuilder; +import org.terracotta.testing.rules.BasicExternalClusterBuilder; + +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Duration; +import java.util.Map; + +import static java.time.temporal.ChronoUnit.SECONDS; +import static java.util.Collections.singletonMap; + +public interface StandardCluster { + static Path clusterPath() { + return Paths.get("build", "cluster"); + } + + static String offheapResource(String name, long size) { + return offheapResources(singletonMap(name, size)); + } + + static String offheapResources(Map resources) { + StringBuilder sb = new StringBuilder(""); + sb.append(""); + for (Map.Entry e : resources.entrySet()) { + sb.append("").append(e.getValue()).append(""); + } + sb.append(""); + return sb.append("\n").toString(); + } + + static BasicExternalClusterBuilder newCluster() { + return BasicExternalClusterBuilder.newCluster().startupBuilder(ConfigRepoStartupBuilder::new); + } + + static BasicExternalClusterBuilder newCluster(int size) { + return BasicExternalClusterBuilder.newCluster(size).startupBuilder(ConfigRepoStartupBuilder::new); + } + + static String leaseLength(Duration leaseLength) { + return "" + + "" + + "" + leaseLength.get(SECONDS) + "" + + "" + + ""; + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/testing/StandardTimeouts.java b/clustered/integration-test/src/test/java/org/ehcache/testing/StandardTimeouts.java new file mode 100644 index 0000000000..1ac947b4d5 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/testing/StandardTimeouts.java @@ -0,0 +1,27 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.testing; + +import org.terracotta.utilities.test.matchers.Eventually; + +import java.time.Duration; + +public interface StandardTimeouts { + + static Eventually.Timeout eventually() { + return Eventually.within(Duration.ofMinutes(1L)); + } +} diff --git a/clustered/integration-test/src/test/resources/ExcludeList b/clustered/integration-test/src/test/resources/ExcludeList new file mode 100644 index 0000000000..40f1074143 --- /dev/null +++ b/clustered/integration-test/src/test/resources/ExcludeList @@ -0,0 +1,119 @@ +# This is a dummy test that fails if not in the exclude list. +org.jsr107.tck.CachingTest#dummyTest + +# Hard assertions on the default URI +org.jsr107.tck.CachingTest#getCacheManager_defaultURI +org.jsr107.tck.CachingTest#getCacheManager_nullUriParameter +org.jsr107.tck.CachingTest#getCacheManager_URI +org.jsr107.tck.spi.CachingProviderClassLoaderTest#getCacheManagerSameURI +org.jsr107.tck.spi.CachingProviderClassLoaderTest#getCacheManagerDefaultURI +org.jsr107.tck.spi.CachingProviderTest#getCacheManagerUsingDefaultURI + +# Assumes store-by-reference semantics +org.jsr107.tck.StoreByReferenceTest#get_Existing +org.jsr107.tck.StoreByReferenceTest#get_Existing_NotSameKey +org.jsr107.tck.StoreByReferenceTest#put_Existing_NotSameKey +org.jsr107.tck.StoreByReferenceTest#getAndPut_NotThere +org.jsr107.tck.StoreByReferenceTest#getAndPut_Existing +org.jsr107.tck.StoreByReferenceTest#getAndPut_Existing_NotSameKey +org.jsr107.tck.StoreByReferenceTest#putAll +org.jsr107.tck.StoreByReferenceTest#putIfAbsent_Missing +org.jsr107.tck.StoreByReferenceTest#putIfAbsent_There +org.jsr107.tck.StoreByReferenceTest#replace_3arg +org.jsr107.tck.StoreByReferenceTest#getAndReplace +org.jsr107.tck.TypesTest#simpleAPINoGenericsAndNoTypeEnforcementStoreByReference + +# Assumes inline expiry policy calls +org.jsr107.tck.management.CacheMBStatisticsBeanTest#testExpiryOnCreation +org.jsr107.tck.expiry.CacheExpiryTest#putShouldCallGetExpiry +org.jsr107.tck.expiry.CacheExpiryTest#getAndReplaceShouldCallGetExpiryForModifiedEntry +org.jsr107.tck.expiry.CacheExpiryTest#containsKeyShouldNotCallExpiryPolicyMethods +org.jsr107.tck.expiry.CacheExpiryTest#removeEntryShouldNotCallExpiryPolicyMethods +org.jsr107.tck.expiry.CacheExpiryTest#putIfAbsentShouldCallGetExpiry +org.jsr107.tck.expiry.CacheExpiryTest#replaceSpecificShouldCallGetExpiry +org.jsr107.tck.expiry.CacheExpiryTest#getAllShouldCallGetExpiryForAccessedEntry +org.jsr107.tck.expiry.CacheExpiryTest#removeSpecifiedEntryShouldNotCallExpiryPolicyMethods +org.jsr107.tck.expiry.CacheExpiryTest#replaceShouldCallGetExpiryForModifiedEntry +org.jsr107.tck.expiry.CacheExpiryTest#putAllShouldCallGetExpiry +org.jsr107.tck.expiry.CacheExpiryTest#invokeAllSetValueShouldCallGetExpiry +org.jsr107.tck.expiry.CacheExpiryTest#iteratorNextShouldCallGetExpiryForAccessedEntry +org.jsr107.tck.expiry.CacheExpiryTest#getShouldCallGetExpiryForAccessedEntry +org.jsr107.tck.expiry.CacheExpiryTest#invokeAllReadThroughEnabledGetOnNonExistentEntry +org.jsr107.tck.expiry.CacheExpiryTest#expire_whenCreated_ModifiedExpiryPolicy +org.jsr107.tck.expiry.CacheExpiryTest#expire_whenCreated_ParameterizedExpiryPolicy +org.jsr107.tck.expiry.CacheExpiryTest#expire_whenCreated_TouchedExpiryPolicy +org.jsr107.tck.expiry.CacheExpiryTest#expire_whenCreated_CreatedExpiryPolicy +org.jsr107.tck.expiry.CacheExpiryTest#expire_whenCreated_AccessedExpiryPolicy +org.jsr107.tck.expiry.CacheExpiryTest#getAndRemoveShouldNotCallExpiryPolicyMethods +org.jsr107.tck.expiry.CacheExpiryTest#getAndPutShouldCallEitherCreatedOrModifiedExpiryPolicy + +# Clustered cache TTI is busted(-ish) +org.jsr107.tck.integration.CacheLoaderWithExpiryTest#shouldLoadWhenMissCausedByExpiry +org.jsr107.tck.expiry.CacheExpiryTest#expire_whenAccessed + +# Implement org.ehcache.clustered.client.internal.store.ClusteredStore.computeAndGet +org.jsr107.tck.event.CacheListenerTest#testBrokenCacheEntryListener +org.jsr107.tck.event.CacheListenerTest#testCacheEntryListener +org.jsr107.tck.management.CacheMBStatisticsBeanTest#testCacheStatisticsInvokeEntryProcessorRemove +org.jsr107.tck.management.CacheMBStatisticsBeanTest#testCacheStatisticsInvokeEntryProcessorUpdate +org.jsr107.tck.management.CacheMBStatisticsBeanTest#testCacheStatisticsInvokeEntryProcessorGet +org.jsr107.tck.management.CacheMBStatisticsBeanTest#testCacheStatisticsInvokeEntryProcessorNoOp +org.jsr107.tck.processor.CacheInvokeTest#removeMissing +org.jsr107.tck.processor.CacheInvokeTest#noValueException +org.jsr107.tck.processor.CacheInvokeTest#testProcessorEmptyExceptionIsWrapped +org.jsr107.tck.processor.CacheInvokeTest#varArgumentsPassedIn +org.jsr107.tck.processor.CacheInvokeTest#removeException +org.jsr107.tck.processor.CacheInvokeTest#noValueSetValue +org.jsr107.tck.processor.CacheInvokeTest#noValueNoMutation +org.jsr107.tck.processor.CacheInvokeTest#testProcessorExceptionIsWrapped +org.jsr107.tck.processor.CacheInvokeTest#setValueToNull +org.jsr107.tck.processor.CacheInvokeTest#removeExisting +org.jsr107.tck.processor.CacheInvokeTest#existingException +org.jsr107.tck.processor.CacheInvokeTest#invokeAllgetResultFromMap +org.jsr107.tck.processor.CacheInvokeTest#existingReplace +org.jsr107.tck.processor.CacheInvokeTest#nullGetValue +org.jsr107.tck.expiry.CacheExpiryTest#invokeSetValueShouldCallGetExpiry +org.jsr107.tck.expiry.CacheExpiryTest#invokeGetValueShouldCallGetExpiry +org.jsr107.tck.expiry.CacheExpiryTest#invokeMultiSetValueShouldCallGetExpiry +org.jsr107.tck.expiry.CacheExpiryTest#invokeGetValueWithReadThroughForNonExistentEntryShouldCallGetExpiryForCreatedEntry +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThroughUsingInvoke_setValue_CreateEntryThenRemove +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThroughUsingInvoke_remove +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThroughUsingInvoke_setValue_CreateEntryGetValue +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThroughUsingInvoke_setValue_UpdateEntry +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThroughUsingInvoke_remove_createEntry +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThroughUsingInvoke_setValue_CreateEntry +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThroughUsingInvoke_remove_nonExistingEntry +org.jsr107.tck.integration.CacheLoaderWithoutReadThroughTest#shouldLoadWhenAccessingWithEntryProcessor +org.jsr107.tck.integration.CacheLoaderTest#shouldLoadWhenAccessingWithEntryProcessor +org.jsr107.tck.integration.CacheLoaderWriterTest#shouldLoadWhenAccessingWithEntryProcessor +org.jsr107.tck.processor.CacheInvokeTest#invokeAllEntryProcessorReturnsNullResult +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThroughUsingInvokeAll_setValue_UpdateEntry +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThroughUsingInvokeAll_setValue_CreateEntry +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThroughUsingInvokeAll_setValue_RemoveEntry + +# Implement org.ehcache.clustered.client.internal.store.ClusteredStore.bulkComputeIfAbsent +org.jsr107.tck.integration.CacheLoaderTest#shouldNotLoadMultipleNullEntriesUsingLoadAll +org.jsr107.tck.integration.CacheLoaderTest#shouldNotLoadMultipleNullValuesUsingLoadAll +org.jsr107.tck.integration.CacheLoaderWithoutReadThroughTest#shouldLoadMultipleNonExistingEntryUsingLoadAll +org.jsr107.tck.integration.CacheLoaderWithoutReadThroughTest#shouldNotLoadMultipleNullEntriesUsingLoadAll +org.jsr107.tck.integration.CacheLoaderWithoutReadThroughTest#shouldNotLoadMultipleNullValuesUsingLoadAll +org.jsr107.tck.integration.CacheLoaderWithoutReadThroughTest#shouldLoadSingleMissingEntryUsingLoadAll +org.jsr107.tck.integration.CacheLoaderTest#shouldLoadMultipleNonExistingEntryUsingLoadAll +org.jsr107.tck.integration.CacheLoaderTest#shouldLoadSingleMissingEntryUsingLoadAll +org.jsr107.tck.integration.CacheLoaderWriterTest#shouldLoadSingleMissingEntryUsingLoadAll +org.jsr107.tck.integration.CacheLoaderWriterTest#shouldLoadMultipleNonExistingEntryUsingLoadAll +org.jsr107.tck.expiry.CacheExpiryTest#loadAllWithReadThroughEnabledShouldCallGetExpiryForCreatedEntry + +# Implement org.ehcache.clustered.client.internal.store.ClusteredStore.bulkCompute +org.jsr107.tck.integration.CacheLoaderWithoutReadThroughTest#shouldLoadSingleExistingEntryUsingLoadAll +org.jsr107.tck.integration.CacheLoaderWithoutReadThroughTest#shouldLoadMultipleExistingEntryUsingLoadAll +org.jsr107.tck.integration.CacheLoaderTest#shouldLoadSingleExistingEntryUsingLoadAll +org.jsr107.tck.integration.CacheLoaderWriterTest#shouldNotWriteThroughUsingLoadAll +org.jsr107.tck.integration.CacheLoaderTest#shouldLoadMultipleExistingEntryUsingLoadAll +org.jsr107.tck.integration.CacheLoaderWriterTest#shouldLoadMultipleExistingEntryUsingLoadAll +org.jsr107.tck.integration.CacheLoaderWriterTest#shouldLoadSingleExistingEntryUsingLoadAll + +# Clustered removeAll/putAll incorrectly terminates early when hitting an exception on a key +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThroughRemoveAllSpecific_partialSuccess +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThroughRemoveAll_partialSuccess +org.jsr107.tck.integration.CacheWriterTest#shouldWriteThoughUsingPutAll_partialSuccess diff --git a/clustered/integration-test/src/test/resources/clusteredConfiguration.txt b/clustered/integration-test/src/test/resources/clusteredConfiguration.txt index 9a601bca24..4e742d8a1c 100644 --- a/clustered/integration-test/src/test/resources/clusteredConfiguration.txt +++ b/clustered/integration-test/src/test/resources/clusteredConfiguration.txt @@ -4,7 +4,7 @@ caches: valueType: java.lang.String serviceConfigurations: None evictionAdvisor: None - expiry: NoExpiry + expiry: No Expiry resourcePools: pools: heap: @@ -18,7 +18,7 @@ caches: valueType: java.lang.String serviceConfigurations: None evictionAdvisor: None - expiry: NoExpiry + expiry: No Expiry resourcePools: pools: heap: @@ -33,9 +33,9 @@ caches: services: - org.ehcache.clustered.client.config.ClusteringServiceConfiguration: clusterUri: terracotta://server-1/my-server-entity-2 - timeouts: Timeouts{readOperation=PT5S,writeOperation=PT5S,connection=PT2562047H47M16.854775807S} - autoCreate: true + timeouts: Timeouts{readOperation=PT5S,writeOperation=PT5S,connection=PT2M30S} + clientMode: AUTO_CREATE defaultServerResource: primary-server-resource resourcePools: - resource-pool-a: [33554432 bytes from ''] + resource-pool-a: [10485760 bytes from ''] - org.ehcache.management.registry.DefaultManagementRegistryConfiguration diff --git a/clustered/integration-test/src/test/resources/configs/clustered.xml b/clustered/integration-test/src/test/resources/configs/clustered.xml index 007d0ac885..e1e0a2a12a 100644 --- a/clustered/integration-test/src/test/resources/configs/clustered.xml +++ b/clustered/integration-test/src/test/resources/configs/clustered.xml @@ -11,8 +11,8 @@ - + - \ No newline at end of file + diff --git a/clustered/integration-test/src/test/resources/configs/jcache-clustered.xml b/clustered/integration-test/src/test/resources/configs/jcache-clustered.xml index 11061512bc..8e47f844a6 100644 --- a/clustered/integration-test/src/test/resources/configs/jcache-clustered.xml +++ b/clustered/integration-test/src/test/resources/configs/jcache-clustered.xml @@ -15,27 +15,26 @@ ~ limitations under the License. --> - -] -> + xmlns:tc="http://www.ehcache.org/v3/clustered" + xmlns:jsr107="http://www.ehcache.org/v3/jsr107"> - - + + - + + + + + - 10 - 10 + 1000 + 4 - - - \ No newline at end of file + + diff --git a/clustered/integration-test/src/test/resources/configs/offheap-resource.xml b/clustered/integration-test/src/test/resources/configs/offheap-resource.xml index 2b2ab18a3c..fab9f0273e 100644 --- a/clustered/integration-test/src/test/resources/configs/offheap-resource.xml +++ b/clustered/integration-test/src/test/resources/configs/offheap-resource.xml @@ -17,7 +17,6 @@ --> 64 diff --git a/clustered/integration-test/src/test/resources/simpleConfiguration.txt b/clustered/integration-test/src/test/resources/simpleConfiguration.txt index 09765c1a48..2767f10986 100644 --- a/clustered/integration-test/src/test/resources/simpleConfiguration.txt +++ b/clustered/integration-test/src/test/resources/simpleConfiguration.txt @@ -6,7 +6,7 @@ caches: - org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration - org.ehcache.impl.config.loaderwriter.writebehind.DefaultWriteBehindConfiguration evictionAdvisor: org.ehcache.clustered.management.EhcacheManagerToStringTest$1 - expiry: NoExpiry + expiry: NoExpiryPolicy resourcePools: pools: heap: diff --git a/clustered/integration-test/src/test/resources/tc-logback.xml b/clustered/integration-test/src/test/resources/tc-logback.xml new file mode 100644 index 0000000000..4208262b28 --- /dev/null +++ b/clustered/integration-test/src/test/resources/tc-logback.xml @@ -0,0 +1,32 @@ + + + + + %d [%t] %p %c - %m%n + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/clustered/ops-tool/build.gradle b/clustered/ops-tool/build.gradle index 4f676ee524..0963f3352c 100644 --- a/clustered/ops-tool/build.gradle +++ b/clustered/ops-tool/build.gradle @@ -14,6 +14,10 @@ * limitations under the License. */ +plugins { + id 'org.ehcache.build.conventions.java-library' +} + dependencies { - compile 'com.beust:jcommander:1.47' + implementation 'com.beust:jcommander:1.47' } diff --git a/clustered/ops-tool/gradle.properties b/clustered/ops-tool/gradle.properties deleted file mode 100644 index 86aa0f75bb..0000000000 --- a/clustered/ops-tool/gradle.properties +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -subPomName = Ehcache 3 Clustered Ops Tooling -subPomDesc = Operations Tools for Clustered Ehcache 3 diff --git a/clustered/ops-tool/src/main/java/org/ehcache/clustered/operations/OperationsTool.java b/clustered/ops-tool/src/main/java/org/ehcache/clustered/operations/OperationsTool.java index a38da11064..cdffbc3d67 100644 --- a/clustered/ops-tool/src/main/java/org/ehcache/clustered/operations/OperationsTool.java +++ b/clustered/ops-tool/src/main/java/org/ehcache/clustered/operations/OperationsTool.java @@ -33,7 +33,7 @@ public class OperationsTool { } }; - public static void main(String[] args) throws IOException { + public static void main(String[] args) { System.exit(innerMain(args)); } diff --git a/clustered/ops-tool/src/test/java/org/ehcache/clustered/operations/OperationsToolTest.java b/clustered/ops-tool/src/test/java/org/ehcache/clustered/operations/OperationsToolTest.java index cebd4ab66e..754d1e0612 100644 --- a/clustered/ops-tool/src/test/java/org/ehcache/clustered/operations/OperationsToolTest.java +++ b/clustered/ops-tool/src/test/java/org/ehcache/clustered/operations/OperationsToolTest.java @@ -16,8 +16,9 @@ package org.ehcache.clustered.operations; import java.io.IOException; + +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import org.junit.Test; public class OperationsToolTest { diff --git a/clustered/osgi-test/build.gradle b/clustered/osgi-test/build.gradle new file mode 100644 index 0000000000..57909678f5 --- /dev/null +++ b/clustered/osgi-test/build.gradle @@ -0,0 +1,123 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.conventions.java' +} + +configurations { + modularOsgiModule + osgiModule + lowerBoundOsgiModule.extendsFrom osgiModule + testCompileOnly.extendsFrom osgiModule +} + +dependencies { + modularOsgiModule project(':ehcache-api') + modularOsgiModule project(':ehcache-core') + modularOsgiModule project(':ehcache-impl') + modularOsgiModule project(':ehcache-xml') + + osgiModule project(':ehcache') + osgiModule project(':clustered:ehcache-clustered') + osgiModule "javax.cache:cache-api:$parent.jcacheVersion" + osgiModule "org.slf4j:slf4j-simple:$parent.slf4jVersion" + osgiModule "org.terracotta:terracotta-utilities-test-tools:$terracottaUtilitiesVersion" + osgiModule "org.terracotta:terracotta-utilities-port-chooser:$terracottaUtilitiesVersion" + osgiModule 'org.apache.felix:org.apache.felix.scr:2.1.6' + osgiModule 'com.sun.activation:javax.activation:1.2.0' + osgiModule 'org.glassfish.hk2:osgi-resource-locator:1.0.2' + + testImplementation project(':osgi-test') + testImplementation 'org.osgi:osgi.core:6.0.0' +} + +configurations.all { + resolutionStrategy { + dependencySubstitution { + substitute(module('org.ops4j.pax.url:pax-url-aether:2.4.5')) + .because('https://github.com/codehaus-plexus/plexus-utils/issues/3' + + ' and https://github.com/codehaus-plexus/plexus-utils/issues/4') + .with(module('org.ops4j.pax.url:pax-url-aether:2.6.3')) + substitute(module('org.ops4j.pax.url:pax-url-classpath:2.4.5')) + .because('https://ops4j1.jira.com/browse/PAXURL-341') + .with(module('org.ops4j.pax.url:pax-url-classpath:2.6.1')) + substitute(module('org.ops4j.pax.url:pax-url-link:2.4.5')) + .because('https://ops4j1.jira.com/browse/PAXURL-341') + .with(module('org.ops4j.pax.url:pax-url-link:2.6.1')) + + substitute(module('biz.aQute.bnd:bndlib:2.4.0')) + .because('Java 9 Stuff') + .with(module('biz.aQute.bnd:biz.aQute.bndlib:5.2.0')) + substitute(module('junit:junit:4.12')) + .because('CVE-2020-15250') + .with(module('junit:junit:4.13.1')) + } + } +} + +sourceSets { + test { + // Needed for PaxExam which makes the dynamic bundle load content of a single dir + // matching the package of the test class + output.resourcesDir = java.outputDir + } +} + +task unzipKit(type: Sync) { + dependsOn project(':clustered:ehcache-clustered').distZip + from zipTree(project(':clustered:ehcache-clustered').distZip.archivePath) + into 'build/ehcache-kit' +} + +tasks.withType(Test) { + dependsOn unzipKit + systemProperty 'kitInstallationPath', "$unzipKit.destinationDir/${project(':clustered:ehcache-clustered').archivesBaseName}-$project.version-kit" +} + +test { + dependsOn configurations.osgiModule, configurations.modularOsgiModule + doFirst { + [configurations.modularOsgiModule, configurations.osgiModule]*.resolvedConfiguration*.resolvedArtifacts*.forEach({ + systemProperty "$it.moduleVersion.id.module:osgi-path", it.file + }) + } +} + +configurations { + lowerBoundOsgiModule { + resolutionStrategy.dependencySubstitution { + substitute module('org.glassfish.jaxb:jaxb-runtime') with module('com.sun.xml.bind:jaxb-osgi:2.2.8-b01') + } + } +} +dependencies { + lowerBoundOsgiModule 'javax.xml.bind:jaxb-api:2.2.9' +} + +tasks.register('lowerBoundTest', Test) { + group = JavaBasePlugin.VERIFICATION_GROUP + dependsOn configurations.lowerBoundOsgiModule, configurations.modularOsgiModule + doFirst { + [configurations.modularOsgiModule, configurations.lowerBoundOsgiModule]*.resolvedConfiguration*.resolvedArtifacts*.forEach({ + systemProperty "$it.moduleVersion.id.module:osgi-path", it.file + }) + } +} + +tasks.named('check') { + dependsOn tasks.lowerBoundTest +} diff --git a/transactions/config/checkstyle-suppressions.xml b/clustered/osgi-test/config/checkstyle-suppressions.xml similarity index 100% rename from transactions/config/checkstyle-suppressions.xml rename to clustered/osgi-test/config/checkstyle-suppressions.xml diff --git a/clustered/osgi-test/src/test/java/org/ehcache/osgi/ClusterSupport.java b/clustered/osgi-test/src/test/java/org/ehcache/osgi/ClusterSupport.java new file mode 100644 index 0000000000..c016e8e341 --- /dev/null +++ b/clustered/osgi-test/src/test/java/org/ehcache/osgi/ClusterSupport.java @@ -0,0 +1,114 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.osgi; + +import org.terracotta.utilities.test.net.PortManager; + +import java.io.Closeable; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.URI; +import java.nio.channels.ServerSocketChannel; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Collection; +import java.util.concurrent.TimeUnit; + +import static java.util.Arrays.asList; + +public class ClusterSupport { + + public static Cluster startServer(Path serverDirectory) throws IOException { + Path kitLocation = Paths.get(System.getProperty("kitInstallationPath")); + + PortManager portManager = PortManager.getInstance(); + PortManager.PortRef tsaPort = portManager.reservePort(); + PortManager.PortRef tsaGroupPort = portManager.reservePort(); + + Path serverDir = kitLocation.resolve("server"); + + ProcessBuilder serverProcess = new ProcessBuilder() + .directory(serverDirectory.toFile()) + .command(Paths.get(System.getProperty("java.home")).resolve("bin") + .resolve(System.getProperty("os.name").contains("Windows") ? "java.exe" : "java").toString()); + + serverProcess.command().addAll(asList( + "-Xmx128m", + "-Dtc.install-root=" + serverDir, + "-cp", serverDir.resolve("lib").resolve("tc.jar").toString(), + "com.tc.server.TCServerMain", + "--auto-activate", + "--cluster-name=foo", + "--failover-priority=availability", + "--client-reconnect-window=120s", + "--name=default-server", + "--hostname=localhost", + "--port=" + tsaPort.port(), + "--group-port=" + tsaGroupPort.port(), + "--log-dir=" + serverDirectory.resolve("logs"), + "--config-dir=" + serverDirectory.resolve("repository"), + "--offheap-resources=main:32MB")); + serverProcess.inheritIO(); + + return new Cluster(serverProcess.start(), URI.create("terracotta://localhost:" + tsaPort.port()), serverDirectory, tsaPort, tsaGroupPort); + } + + private static int selectAvailableEphemeralPort() throws IOException { + try (ServerSocketChannel channel = ServerSocketChannel.open().bind(new InetSocketAddress(0))) { + return channel.socket().getLocalPort(); + } + } + + static class Cluster implements Closeable { + + private final Process serverProcess; + private final URI connectionUri; + private final Path workingPath; + private final Collection ports; + + Cluster(Process serverProcess, URI connectionUri, Path workingPath, PortManager.PortRef... ports) { + this.serverProcess = serverProcess; + this.connectionUri = connectionUri; + this.workingPath = workingPath; + this.ports = asList(ports); + } + + public URI getConnectionUri() { + return connectionUri; + } + + @Override + public void close() { + try { + serverProcess.destroyForcibly(); + } finally { + try { + serverProcess.waitFor(60, TimeUnit.SECONDS); + } catch (InterruptedException e) { + throw new AssertionError(e); + } finally { + ports.forEach(PortManager.PortRef::close); + } + } + } + + public Path getWorkingArea() { + return workingPath; + } + } +} + diff --git a/clustered/osgi-test/src/test/java/org/ehcache/osgi/ClusteredOsgiTest.java b/clustered/osgi-test/src/test/java/org/ehcache/osgi/ClusteredOsgiTest.java new file mode 100644 index 0000000000..af954049df --- /dev/null +++ b/clustered/osgi-test/src/test/java/org/ehcache/osgi/ClusteredOsgiTest.java @@ -0,0 +1,226 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.osgi; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.osgi.EhcacheActivator; +import org.ehcache.core.osgi.OsgiServiceLoader; +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.xml.XmlConfiguration; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.junit.PaxExam; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; +import org.ops4j.pax.exam.spi.reactors.PerMethod; +import org.osgi.framework.wiring.BundleWiring; +import org.w3c.dom.Document; +import org.w3c.dom.Node; + +import javax.xml.namespace.NamespaceContext; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerFactory; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; +import javax.xml.transform.stream.StreamSource; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathFactory; +import java.io.File; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.ServiceLoader; +import java.util.Set; + +import static java.util.Spliterators.spliterator; +import static java.util.stream.Collectors.toSet; +import static java.util.stream.Stream.of; +import static java.util.stream.StreamSupport.stream; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.osgi.ClusterSupport.startServer; +import static org.ehcache.osgi.OsgiTestUtils.baseConfiguration; +import static org.ehcache.osgi.OsgiTestUtils.gradleBundle; +import static org.ehcache.xml.ConfigurationParser.discoverSchema; +import static org.ehcache.xml.XmlConfiguration.CORE_SCHEMA_URL; +import static org.ehcache.osgi.OsgiTestUtils.jaxbConfiguration; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsCollectionContaining.hasItems; +import static org.ops4j.pax.exam.CoreOptions.options; + +@RunWith(PaxExam.class) +@ExamReactorStrategy(PerMethod.class) +public class ClusteredOsgiTest { + + @Rule + public TemporaryFolder serverLocation = new TemporaryFolder(); + + @Configuration + public Option[] individualModules() { + return options( + gradleBundle("org.ehcache.modules:ehcache-api"), + gradleBundle("org.ehcache.modules:ehcache-core"), + gradleBundle("org.ehcache.modules:ehcache-impl"), + gradleBundle("org.ehcache.modules:ehcache-xml"), jaxbConfiguration(), + gradleBundle("org.ehcache:ehcache-clustered"), + + gradleBundle("org.terracotta:statistics"), + gradleBundle("org.ehcache:sizeof"), + gradleBundle("org.terracotta:offheap-store"), + gradleBundle("org.terracotta:terracotta-utilities-tools"), + + baseConfiguration("ClusteredOsgiTest", "individualModules"), + gradleBundle("org.terracotta:terracotta-utilities-test-tools"), + gradleBundle("org.terracotta:terracotta-utilities-port-chooser") + ); + } + + @Configuration + public Option[] uberJar() { + return options( + gradleBundle("org.ehcache:ehcache"), jaxbConfiguration(), + gradleBundle("org.ehcache:ehcache-clustered"), + + baseConfiguration("ClusteredOsgiTest", "uberJar"), + gradleBundle("org.terracotta:terracotta-utilities-tools"), + gradleBundle("org.terracotta:terracotta-utilities-test-tools"), + gradleBundle("org.terracotta:terracotta-utilities-port-chooser") + ); + } + + @Test + public void testProgrammaticClusteredCache() throws Throwable { + try (ClusterSupport.Cluster cluster = startServer(serverLocation.newFolder().toPath())) { + TestMethods.testProgrammaticClusteredCache(cluster); + } + } + + @Test + public void testXmlClusteredCache() throws Throwable { + try (ClusterSupport.Cluster cluster = startServer(serverLocation.newFolder().toPath())) { + TestMethods.testXmlClusteredCache(cluster); + } + } + + @Test + public void testAllServicesAreAvailable() { + TestMethods.testAllServicesAreAvailable(); + } + + private static class TestMethods { + + public static void testProgrammaticClusteredCache(ClusterSupport.Cluster cluster) throws Throwable { + try (PersistentCacheManager cacheManager = newCacheManagerBuilder() + .with(cluster(cluster.getConnectionUri()).autoCreate(c -> c)) + .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, + newResourcePoolsBuilder().with(clusteredDedicated("main", 2, MemoryUnit.MB)))) + .build(true)) { + + final Cache cache = cacheManager.getCache("clustered-cache", Long.class, String.class); + + cache.put(1L, "value"); + assertThat(cache.get(1L), is("value")); + } + } + + public static void testXmlClusteredCache(ClusterSupport.Cluster cluster) throws Exception { + File config = cluster.getWorkingArea().resolve("ehcache.xml").toFile(); + + DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + documentBuilderFactory.setNamespaceAware(true); + documentBuilderFactory.setSchema(discoverSchema(new StreamSource(CORE_SCHEMA_URL.openStream()))); + + Document doc = documentBuilderFactory.newDocumentBuilder().parse(TestMethods.class.getResourceAsStream("ehcache-clustered-osgi.xml")); + + XPath xpath = XPathFactory.newInstance().newXPath(); + xpath.setNamespaceContext(new SimpleNamespaceContext() + .with("eh", "http://www.ehcache.org/v3") + .with("tc", "http://www.ehcache.org/v3/clustered")); + + Node clusterUriAttribute = (Node) xpath.evaluate("//eh:config/eh:service/tc:cluster/tc:connection/@url", doc, XPathConstants.NODE); + clusterUriAttribute.setTextContent(cluster.getConnectionUri().toString() + "/cache-manager"); + Transformer xformer = TransformerFactory.newInstance().newTransformer(); + xformer.transform(new DOMSource(doc), new StreamResult(config)); + + + try (PersistentCacheManager cacheManager = (PersistentCacheManager) CacheManagerBuilder.newCacheManager( + new XmlConfiguration(config.toURI().toURL(), TestMethods.class.getClassLoader()) + )) { + cacheManager.init(); + + final Cache cache = cacheManager.getCache("clustered-cache", Long.class, Person.class); + + cache.put(1L, new Person("Brian")); + assertThat(cache.get(1L).name, is("Brian")); + } + } + + public static void testAllServicesAreAvailable() { + Set osgiAvailableClasses = + stream(spliterator(OsgiServiceLoader.load(ServiceFactory.class).iterator(), Long.MAX_VALUE, 0), false) + .map(f -> f.getClass().getName()) + .collect(toSet()); + + Set jdkAvailableClasses = of(EhcacheActivator.getCoreBundle().getBundles()) + .map(b -> b.adapt(BundleWiring.class).getClassLoader()) + .flatMap(cl -> + stream(spliterator(ServiceLoader.load(ServiceFactory.class, cl).iterator(), Long.MAX_VALUE, 0), false) + .map(f -> f.getClass().getName())) + .collect(toSet()); + + assertThat(osgiAvailableClasses, hasItems(jdkAvailableClasses.toArray(new String[0]))); + } + } + + static class SimpleNamespaceContext implements NamespaceContext { + + public final Map prefixes = new HashMap<>(); + + public SimpleNamespaceContext with(String prefix, String namespaceUri) { + prefixes.put(prefix, namespaceUri); + return this; + } + + @Override + public String getNamespaceURI(String prefix) { + return prefixes.get(prefix); + } + + @Override + public String getPrefix(String namespaceURI) { + return prefixes.entrySet().stream().filter(e -> namespaceURI.equals(e.getValue())) + .map(Map.Entry::getKey).findFirst().orElse(null); + } + + @Override + public Iterator getPrefixes(String namespaceURI) { + return prefixes.keySet().iterator(); + } + }; +} diff --git a/clustered/osgi-test/src/test/java/org/ehcache/osgi/Person.java b/clustered/osgi-test/src/test/java/org/ehcache/osgi/Person.java new file mode 100644 index 0000000000..568a9b4be5 --- /dev/null +++ b/clustered/osgi-test/src/test/java/org/ehcache/osgi/Person.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.osgi; + +import java.io.Serializable; + +/** + * Person + */ +public class Person implements Serializable { + + private static final long serialVersionUID = 1L; + + final String name; + + Person(String name) { + this.name = name; + } + +} diff --git a/clustered/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-clustered-osgi.xml b/clustered/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-clustered-osgi.xml new file mode 100644 index 0000000000..13db0d6848 --- /dev/null +++ b/clustered/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-clustered-osgi.xml @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + java.lang.Long + org.ehcache.osgi.Person + + 100 + 1 + + + diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle deleted file mode 100644 index eb7bd1f0ff..0000000000 --- a/clustered/server/build.gradle +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: EhDeploy - -dependencies { - compile project(':clustered:common'), "org.slf4j:slf4j-api:$slf4jVersion" - compile("org.terracotta:offheap-resource:$terracottaPlatformVersion") { - transitive = false - } - compile group: 'org.terracotta', name: 'offheap-store', version: offheapVersion - compile group: 'org.slf4j', name: 'slf4j-api', version: slf4jVersion - compile("org.terracotta.management:monitoring-service-api:$terracottaPlatformVersion") { - transitive = false - } - compile"org.terracotta.management.dist:mnm-common:$terracottaPlatformVersion" - compile("org.terracotta:client-message-tracker:$terracottaPlatformVersion") - provided "org.terracotta:entity-server-api:$terracottaApisVersion" - provided "org.terracotta:standard-cluster-services:$terracottaApisVersion" - provided "org.terracotta:runnel:$terracottaPlatformVersion" -} - -tasks.withType(JavaCompile) { - options.compilerArgs += ['-Werror'] -} diff --git a/clustered/server/ehcache-entity/build.gradle b/clustered/server/ehcache-entity/build.gradle new file mode 100644 index 0000000000..2d88b49de3 --- /dev/null +++ b/clustered/server/ehcache-entity/build.gradle @@ -0,0 +1,40 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.clustered-server-module' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 Clustering Server Entity module' + description = 'The Server Entity module of Ehcache 3' + } +} + +dependencies { + service project(':clustered:server:ehcache-service-api') + service "org.terracotta.management:monitoring-service-api:$terracottaPlatformVersion" + service "org.terracotta.management:management-registry:$terracottaPlatformVersion" + + api project(':clustered:ehcache-common') + implementation "org.terracotta:runnel:$terracottaPlatformVersion" + implementation "org.terracotta:offheap-store:$offheapVersion" + implementation "org.terracotta:client-message-tracker:$terracottaPlatformVersion" + + testImplementation project(':clustered:server:ehcache-service') + testImplementation project(':clustered:test-utils') +} diff --git a/clustered/server/ehcache-entity/config/checkstyle-suppressions.xml b/clustered/server/ehcache-entity/config/checkstyle-suppressions.xml new file mode 100644 index 0000000000..cb41d0baf7 --- /dev/null +++ b/clustered/server/ehcache-entity/config/checkstyle-suppressions.xml @@ -0,0 +1,9 @@ + + + + + + + diff --git a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockActiveEntity.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockActiveEntity.java similarity index 89% rename from clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockActiveEntity.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockActiveEntity.java index 868dd231de..b989bb1dfa 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockActiveEntity.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockActiveEntity.java @@ -24,12 +24,12 @@ import org.ehcache.clustered.common.internal.lock.LockMessaging.LockOperation; import org.ehcache.clustered.common.internal.lock.LockMessaging.LockTransition; +import org.terracotta.entity.ActiveInvokeContext; import org.terracotta.entity.ActiveServerEntity; import org.terracotta.entity.ClientCommunicator; import org.terracotta.entity.ClientDescriptor; import org.terracotta.entity.MessageCodecException; import org.terracotta.entity.PassiveSynchronizationChannel; -import org.terracotta.entity.ReconnectRejectedException; import org.terracotta.entity.StateDumpCollector; /** @@ -50,11 +50,17 @@ public VoltronReadWriteLockActiveEntity(ClientCommunicator communicator) { } @Override - public LockTransition invoke(ClientDescriptor client, LockOperation message) { + public LockTransition invokeActive(ActiveInvokeContext context, LockOperation message) { + ClientDescriptor clientDescriptor = context.getClientDescriptor(); + return invokeActive(clientDescriptor, message); + } + + private LockTransition invokeActive(ClientDescriptor clientDescriptor, LockOperation message) { switch (message.getOperation()) { - case TRY_ACQUIRE: return tryAcquire(client, message.getHoldType()); - case ACQUIRE: return acquire(client, message.getHoldType()); - case RELEASE: return release(client, message.getHoldType()); + case TRY_ACQUIRE: + return tryAcquire(clientDescriptor, message.getHoldType()); + case ACQUIRE: return acquire(clientDescriptor, message.getHoldType()); + case RELEASE: return release(clientDescriptor, message.getHoldType()); default: throw new AssertionError(); } } @@ -105,7 +111,8 @@ public ReconnectHandler startReconnect() { releaseListeners.add(clientDescriptor); } else { try { - if (!invoke(clientDescriptor, LockMessaging.codec().decodeMessage(bytes)).isAcquired()) { + LockOperation message = LockMessaging.codec().decodeMessage(bytes); + if (!invokeActive(clientDescriptor, message).isAcquired()) { throw new IllegalStateException("Unexpected lock acquisition failure during reconnect"); } } catch (MessageCodecException ex) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java similarity index 86% rename from clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java index 98451e0ad1..2ac4a54513 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java @@ -17,9 +17,10 @@ import org.ehcache.clustered.common.internal.lock.LockMessaging.LockOperation; import org.ehcache.clustered.common.internal.lock.LockMessaging.LockTransition; +import org.terracotta.entity.InvokeContext; import org.terracotta.entity.PassiveServerEntity; -class VoltronReadWriteLockPassiveEntity implements PassiveServerEntity { +final class VoltronReadWriteLockPassiveEntity implements PassiveServerEntity { static final VoltronReadWriteLockPassiveEntity INSTANCE = new VoltronReadWriteLockPassiveEntity(); @@ -28,7 +29,7 @@ private VoltronReadWriteLockPassiveEntity() { } @Override - public void invoke(LockOperation message) { + public void invokePassive(InvokeContext context, LockOperation message) { throw new AssertionError("Unexpected message at passive " + message); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java similarity index 97% rename from clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java index 76f111eb65..b73b4930b8 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java @@ -90,7 +90,7 @@ public SyncMessageCodec getSyncMessageCodec() { return LockSyncMessaging.syncCodec(); } - private static final ServiceConfiguration config(final Class klazz) { + private static ServiceConfiguration config(Class klazz) { return () -> klazz; } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java similarity index 95% rename from clustered/server/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java index 354d7b2e1d..792f2d05b7 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java @@ -31,12 +31,12 @@ public static SyncMessageCodec syncCodec() { private static final SyncMessageCodec SYNC_CODEC = new SyncMessageCodec() { @Override - public byte[] encode(int i, LockMessaging.LockOperation message) throws MessageCodecException { + public byte[] encode(int i, LockMessaging.LockOperation message) { throw new AssertionError(); } @Override - public LockMessaging.LockOperation decode(int i, byte[] bytes) throws MessageCodecException { + public LockMessaging.LockOperation decode(int i, byte[] bytes) { throw new AssertionError(); } }; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ClusterTierManagerActiveEntity.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/ClusterTierManagerActiveEntity.java similarity index 97% rename from clustered/server/src/main/java/org/ehcache/clustered/server/ClusterTierManagerActiveEntity.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/ClusterTierManagerActiveEntity.java index c4c578d6fe..4987d693dc 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ClusterTierManagerActiveEntity.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/ClusterTierManagerActiveEntity.java @@ -33,7 +33,6 @@ import org.terracotta.entity.ClientDescriptor; import org.terracotta.entity.ConfigurationException; import org.terracotta.entity.PassiveSynchronizationChannel; -import org.terracotta.entity.ReconnectRejectedException; import org.terracotta.entity.StateDumpCollector; import java.util.concurrent.atomic.AtomicBoolean; @@ -99,7 +98,7 @@ public void disconnected(ClientDescriptor clientDescriptor) { } @Override - public EhcacheEntityResponse invokeActive(ActiveInvokeContext invokeContext, EhcacheEntityMessage message) { + public EhcacheEntityResponse invokeActive(ActiveInvokeContext invokeContext, EhcacheEntityMessage message) { try { if (message instanceof EhcacheOperationMessage) { EhcacheOperationMessage operationMessage = (EhcacheOperationMessage) message; @@ -129,7 +128,7 @@ public void synchronizeKeyToPassive(PassiveSynchronizationChannel getMessageCodec public SyncMessageCodec getSyncMessageCodec() { return new SyncMessageCodec() { @Override - public byte[] encode(int concurrencyKey, EhcacheEntityMessage response) throws MessageCodecException { + public byte[] encode(int concurrencyKey, EhcacheEntityMessage response) { throw new UnsupportedOperationException("This entity does not have sync messages"); } @Override - public EhcacheEntityMessage decode(int concurrencyKey, byte[] payload) throws MessageCodecException { + public EhcacheEntityMessage decode(int concurrencyKey, byte[] payload) { throw new UnsupportedOperationException("This entity does not have sync messages"); } }; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/CommunicatorServiceConfiguration.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/CommunicatorServiceConfiguration.java similarity index 100% rename from clustered/server/src/main/java/org/ehcache/clustered/server/CommunicatorServiceConfiguration.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/CommunicatorServiceConfiguration.java diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java similarity index 91% rename from clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java index 5cd2ef105a..5ac47d1b79 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java @@ -23,6 +23,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.terracotta.entity.ConcurrencyStrategy; +import org.ehcache.clustered.server.internal.messages.EhcacheMessageTrackerCatchup; import static java.util.Collections.singleton; @@ -55,6 +56,7 @@ public Set getKeysForSynchronization() { public static class DefaultConcurrencyStrategy implements ConcurrencyStrategy { public static final int DATA_CONCURRENCY_KEY_OFFSET = DEFAULT_KEY + 1; + public static final int TRACKER_SYNC_KEY = Integer.MAX_VALUE - 1; private final KeySegmentMapper mapper; @@ -69,6 +71,8 @@ public int concurrencyKey(EhcacheEntityMessage entityMessage) { } else if (entityMessage instanceof ConcurrentEntityMessage) { ConcurrentEntityMessage concurrentEntityMessage = (ConcurrentEntityMessage) entityMessage; return DATA_CONCURRENCY_KEY_OFFSET + mapper.getSegmentForKey(concurrentEntityMessage.concurrencyKey()); + } else if (entityMessage instanceof EhcacheMessageTrackerCatchup) { + return MANAGEMENT_KEY; } else { return DEFAULT_KEY; } @@ -80,6 +84,7 @@ public Set getKeysForSynchronization() { for (int i = 0; i <= mapper.getSegments(); i++) { result.add(DEFAULT_KEY + i); } + result.add(TRACKER_SYNC_KEY); return Collections.unmodifiableSet(result); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java similarity index 93% rename from clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java index c5cba5f45c..d24ce6da60 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java @@ -21,6 +21,7 @@ import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheMessageTrackerCatchup; import org.ehcache.clustered.server.internal.messages.EhcacheSyncMessage; import org.terracotta.entity.ExecutionStrategy; @@ -50,6 +51,8 @@ public Location getExecutionLocation(EhcacheEntityMessage message) { return Location.ACTIVE; } else if (message instanceof PassiveReplicationMessage) { return Location.PASSIVE; + } else if (message instanceof EhcacheMessageTrackerCatchup) { + return Location.PASSIVE; } else if (message instanceof EhcacheSyncMessage) { throw new AssertionError("Unexpected use of ExecutionStrategy for sync messages"); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreCompatibility.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/ServerStoreCompatibility.java similarity index 100% rename from clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreCompatibility.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/ServerStoreCompatibility.java diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java similarity index 93% rename from clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java index 9099b341f2..39a1fc7ad0 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java @@ -18,13 +18,9 @@ import org.ehcache.clustered.common.internal.store.Chain; -import com.tc.classloader.CommonComponent; - import java.util.Collections; -import java.util.HashMap; import java.util.Map; -@CommonComponent public class EhcacheDataSyncMessage extends EhcacheSyncMessage { private final Map chainMap; diff --git a/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerCatchup.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerCatchup.java new file mode 100644 index 0000000000..afae8434a0 --- /dev/null +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerCatchup.java @@ -0,0 +1,40 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import java.util.Collection; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; + +import org.terracotta.client.message.tracker.RecordedMessage; + +/** + * Message sending messages that are tracked for duplication. If a passive becoming active receives + * a duplicate, it needs to discard it. + */ +public class EhcacheMessageTrackerCatchup extends EhcacheEntityMessage { + + private final Collection> trackedMessages; + + public EhcacheMessageTrackerCatchup(Collection> trackedMessages) { + this.trackedMessages = trackedMessages; + } + + public Collection> getTrackedMessages() { + return trackedMessages; + } +} diff --git a/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerMessage.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerMessage.java new file mode 100644 index 0000000000..ca6c6fdf1c --- /dev/null +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerMessage.java @@ -0,0 +1,43 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; + +import java.util.Map; + +/** + * Message sending messages that are tracked for duplication. If a passive becoming active receives + * a duplicate, it needs to discard it. + */ +public class EhcacheMessageTrackerMessage extends EhcacheSyncMessage { + + private final Map> trackedMessages; + + public EhcacheMessageTrackerMessage(Map> trackedMessages) { + this.trackedMessages = trackedMessages; + } + + @Override + public SyncMessageType getMessageType() { + return SyncMessageType.MESSAGE_TRACKER; + } + + public Map> getTrackedMessages() { + return trackedMessages; + } +} diff --git a/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java new file mode 100644 index 0000000000..fa03069f68 --- /dev/null +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java @@ -0,0 +1,179 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.messages.EhcacheCodec; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.entity.MessageCodec; +import org.terracotta.runnel.decoding.Enm; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.terracotta.client.message.tracker.RecordedMessage; +import org.terracotta.entity.ClientDescriptor; +import org.terracotta.entity.ClientSourceId; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.decoding.StructArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; + +import static org.ehcache.clustered.common.internal.messages.BaseCodec.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveReplicationMessage; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; +/** + * EhcacheServerCodec + */ +public class EhcacheServerCodec implements MessageCodec { + + private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheServerCodec.class); + + private final EhcacheCodec clientCodec; + private final PassiveReplicationMessageCodec replicationCodec; + + private static final String CLIENT_ID = "client"; + private static final String TRANSACTION_ID = "transaction"; + private static final String MESSAGE = "message"; + private static final String MESSAGE_SEQUENCE = "sequence"; + + private static final Struct RECORDED_MESSAGE = newStructBuilder() + .int64(CLIENT_ID, 10) + .int64(TRANSACTION_ID, 20) + .byteBuffer(MESSAGE, 30) + .build(); + + private static final Struct MESSAGE_HISTORY = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .structs(MESSAGE_SEQUENCE, 20, RECORDED_MESSAGE) + .build(); + + public EhcacheServerCodec(EhcacheCodec clientCodec, PassiveReplicationMessageCodec replicationCodec) { + this.clientCodec = clientCodec; + this.replicationCodec = replicationCodec; + } + + @Override + public byte[] encodeMessage(EhcacheEntityMessage message) { + if (message instanceof PassiveReplicationMessage) { + return replicationCodec.encode((PassiveReplicationMessage) message); + } else if (message instanceof EhcacheMessageTrackerCatchup) { + return encodeCatchup((EhcacheMessageTrackerCatchup)message); + } + return clientCodec.encodeMessage(message); + } + + private byte[] encodeCatchup(EhcacheMessageTrackerCatchup catchup) { + return MESSAGE_HISTORY.encoder() + .enm(MESSAGE_TYPE_FIELD_NAME, EhcacheMessageType.MESSAGE_CATCHUP) + .structs(MESSAGE_SEQUENCE, catchup.getTrackedMessages(), (encoder, value) -> { + encoder.int64(CLIENT_ID, value.getClientSourceId().toLong()); + encoder.int64(TRANSACTION_ID, value.getTransactionId()); + encoder.byteBuffer(MESSAGE, ByteBuffer.wrap(encodeMessage(value.getRequest()))); + }).encode().array(); + } + + @Override + public EhcacheEntityMessage decodeMessage(byte[] payload) { + return decodeMessage(ByteBuffer.wrap(payload)); + } + + private EhcacheEntityMessage decodeMessage(ByteBuffer byteBuffer) { + Enm opCodeEnm = EhcacheCodec.OP_CODE_DECODER.decoder(byteBuffer).enm(MESSAGE_TYPE_FIELD_NAME); + if (!opCodeEnm.isFound()) { + throw new AssertionError("Got a message without an opCode"); + } + if (!opCodeEnm.isValid()) { + LOGGER.warn("Received message with unknown operation code - more recent version at the other end?"); + return null; + } + + byteBuffer.rewind(); + + EhcacheMessageType messageType = opCodeEnm.get(); + if (messageType == EhcacheMessageType.MESSAGE_CATCHUP) { + return decodeCatchup(byteBuffer); + } else if (isPassiveReplicationMessage(messageType)) { + return replicationCodec.decode(messageType, byteBuffer); + } + return clientCodec.decodeMessage(byteBuffer, messageType); + } + + @Override + public byte[] encodeResponse(EhcacheEntityResponse response) { + return clientCodec.encodeResponse(response); + } + + @Override + public EhcacheEntityResponse decodeResponse(byte[] payload) { + return clientCodec.decodeResponse(payload); + } + + private EhcacheMessageTrackerCatchup decodeCatchup(ByteBuffer payload) { + StructArrayDecoder> array = MESSAGE_HISTORY.decoder(payload).structs(MESSAGE_SEQUENCE); + if (array == null) { + return new EhcacheMessageTrackerCatchup(Collections.emptyList()); + } + List> list = new ArrayList<>(array.length()); + while (array.hasNext()) { + StructDecoder>> decoder = array.next(); + long cid = decoder.int64(CLIENT_ID); + long transaction = decoder.int64(TRANSACTION_ID); + ByteBuffer buff = decoder.byteBuffer(MESSAGE); + EhcacheEntityMessage msg = decodeMessage(buff); + + list.add(new RecordedMessage() { + @Override + public ClientSourceId getClientSourceId() { + return new ClientSourceId() { + @Override + public long toLong() { + return cid; + } + + @Override + public boolean matches(ClientDescriptor cd) { + return cd.getSourceId().toLong() == cid; + } + }; + } + + @Override + public long getTransactionId() { + return transaction; + } + + @Override + public EhcacheEntityMessage getRequest() { + return msg; + } + + @Override + public EhcacheEntityResponse getResponse() { + return null; + } + }); + } + return new EhcacheMessageTrackerCatchup(list); + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java similarity index 89% rename from clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java index 84aa466394..dd4d260f83 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java @@ -20,11 +20,11 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.ResponseCodec; -import org.ehcache.clustered.common.internal.messages.StateRepositoryOpCodec; import org.ehcache.clustered.common.internal.store.Chain; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.entity.SyncMessageCodec; +import org.terracotta.runnel.EnumMapping; import org.terracotta.runnel.Struct; import org.terracotta.runnel.decoding.Enm; import org.terracotta.runnel.decoding.StructArrayDecoder; @@ -32,6 +32,7 @@ import org.terracotta.runnel.encoding.StructEncoder; import java.nio.ByteBuffer; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -47,15 +48,14 @@ import static org.ehcache.clustered.server.internal.messages.SyncMessageType.DATA; import static org.ehcache.clustered.server.internal.messages.SyncMessageType.MESSAGE_TRACKER; import static org.ehcache.clustered.server.internal.messages.SyncMessageType.STATE_REPO; -import static org.ehcache.clustered.server.internal.messages.SyncMessageType.SYNC_MESSAGE_TYPE_FIELD_INDEX; -import static org.ehcache.clustered.server.internal.messages.SyncMessageType.SYNC_MESSAGE_TYPE_FIELD_NAME; -import static org.ehcache.clustered.server.internal.messages.SyncMessageType.SYNC_MESSAGE_TYPE_MAPPING; +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; import static org.terracotta.runnel.StructBuilder.newStructBuilder; public class EhcacheSyncMessageCodec implements SyncMessageCodec { private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheSyncMessageCodec.class); + private static final String SYNC_MESSAGE_TYPE_FIELD = "msgType"; private static final String CHAIN_FIELD = "chain"; private static final String CHAIN_MAP_ENTRIES_SUB_STRUCT = "entries"; private static final String STATE_REPO_ENTRIES_SUB_STRUCT = "mappings"; @@ -67,13 +67,20 @@ public class EhcacheSyncMessageCodec implements SyncMessageCodec SYNC_MESSAGE_TYPE_MAPPING = newEnumMappingBuilder(SyncMessageType.class) + .mapping(STATE_REPO, 1) + .mapping(DATA, 10) + .mapping(MESSAGE_TRACKER, 20) + .build(); + private static final Struct CHAIN_MAP_ENTRY_STRUCT = newStructBuilder() .int64(KEY_FIELD, 10) .struct(CHAIN_FIELD, 20, CHAIN_STRUCT) .build(); private static final Struct DATA_SYNC_STRUCT = newStructBuilder() - .enm(SYNC_MESSAGE_TYPE_FIELD_NAME, SYNC_MESSAGE_TYPE_FIELD_INDEX, SYNC_MESSAGE_TYPE_MAPPING) + .enm(SYNC_MESSAGE_TYPE_FIELD, SYNC_MESSAGE_TYPE_FIELD_INDEX, SYNC_MESSAGE_TYPE_MAPPING) .structs(CHAIN_MAP_ENTRIES_SUB_STRUCT, 20, CHAIN_MAP_ENTRY_STRUCT) .build(); @@ -83,7 +90,7 @@ public class EhcacheSyncMessageCodec implements SyncMessageCodec encoder = MESSAGE_TRACKER_SYNC_STRUCT.encoder(); encoder - .enm(SYNC_MESSAGE_TYPE_FIELD_NAME, MESSAGE_TRACKER) + .enm(SYNC_MESSAGE_TYPE_FIELD, MESSAGE_TRACKER) .structs(MESSAGE_TRACKER_CLIENTS_STRUCT, syncMessage.getTrackedMessages().entrySet(), (clientEncoder, entry) -> { Map responses = entry.getValue(); @@ -146,8 +153,8 @@ private byte[] encodeMessageTrackerSync(EhcacheMessageTrackerMessage syncMessage responseEncoder.byteBuffer(MESSAGE_TRACKER_RESPONSE_FIELD, encodeResponse(response.getValue())); }); } - }) - .int32(MESSAGE_TRACKER_SEGMENT_FIELD, syncMessage.getSegmentId()); + }); + return encoder.encode().array(); } @@ -157,7 +164,7 @@ private ByteBuffer encodeResponse(EhcacheEntityResponse response) { private byte[] encodeStateRepoSync(EhcacheStateRepoSyncMessage syncMessage) { StructEncoder encoder = STATE_REPO_SYNC_STRUCT.encoder(); - encoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME, STATE_REPO) + encoder.enm(SYNC_MESSAGE_TYPE_FIELD, STATE_REPO) .string(SERVER_STORE_NAME_FIELD, syncMessage.getCacheId()) .string(STATE_REPO_MAP_NAME_FIELD, syncMessage.getMapId()); encoder.structs(STATE_REPO_ENTRIES_SUB_STRUCT, syncMessage.getMappings().entrySet(), @@ -169,11 +176,11 @@ private byte[] encodeStateRepoSync(EhcacheStateRepoSyncMessage syncMessage) { private byte[] encodeDataSync(EhcacheDataSyncMessage syncMessage) { StructEncoder encoder; encoder = DATA_SYNC_STRUCT.encoder(); - encoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME, DATA); + encoder.enm(SYNC_MESSAGE_TYPE_FIELD, DATA); encoder.structs(CHAIN_MAP_ENTRIES_SUB_STRUCT, syncMessage.getChainMap().entrySet(), (entryEncoder, entry) -> { entryEncoder.int64(KEY_FIELD, entry.getKey()); - entryEncoder.struct(CHAIN_FIELD, entry.getValue(), ChainCodec::encode); + entryEncoder.struct(CHAIN_FIELD, entry.getValue(), ChainCodec::encodeChain); }); return encoder.encode().array(); } @@ -182,7 +189,7 @@ private byte[] encodeDataSync(EhcacheDataSyncMessage syncMessage) { public EhcacheSyncMessage decode(final int concurrencyKey, final byte[] payload) { ByteBuffer message = wrap(payload); StructDecoder decoder = DATA_SYNC_STRUCT.decoder(message); - Enm enm = decoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME); + Enm enm = decoder.enm(SYNC_MESSAGE_TYPE_FIELD); if (!enm.isFound()) { throw new AssertionError("Invalid message format - misses the message type field"); } @@ -230,8 +237,7 @@ private EhcacheSyncMessage decodeMessageTracker(ByteBuffer message) { } } } - Integer segmentId = decoder.int32(MESSAGE_TRACKER_SEGMENT_FIELD); - return new EhcacheMessageTrackerMessage(segmentId, trackedMessages); + return new EhcacheMessageTrackerMessage(trackedMessages); } private EhcacheSyncMessage decodeStateRepoSync(ByteBuffer message) { @@ -258,20 +264,22 @@ private EhcacheSyncMessage decodeDataSync(ByteBuffer message) { } private Map decodeChainMapEntries(StructDecoder decoder) { - Map chainMap = new HashMap<>(); - StructArrayDecoder> entriesDecoder = decoder.structs(CHAIN_MAP_ENTRIES_SUB_STRUCT); + if (entriesDecoder != null) { - for (int i = 0; i < entriesDecoder.length(); i++) { + int len = entriesDecoder.length(); + Map chainMap = new HashMap<>((int) ((float) len / 0.75f + 1.0f)); + for (int i = 0; i < len; i++) { StructDecoder entryDecoder = entriesDecoder.next(); Long key = entryDecoder.int64(KEY_FIELD); StructDecoder chainDecoder = entryDecoder.struct(CHAIN_FIELD); - Chain chain = ChainCodec.decode(chainDecoder); + Chain chain = ChainCodec.decodeChain(chainDecoder); chainMap.put(key, chain); entryDecoder.end(); } + return chainMap; + } else { + return Collections.emptyMap(); } - return chainMap; } - } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java similarity index 90% rename from clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java index c716ad734d..9150683a93 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java @@ -21,12 +21,13 @@ import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.clustered.common.internal.store.Util; import java.util.List; import java.util.stream.Collectors; import java.util.stream.StreamSupport; +import static org.ehcache.clustered.common.internal.util.ChainBuilder.chainFromList; + /** * This message is sent by the Active Entity to Passive Entity. */ @@ -49,10 +50,14 @@ public ChainReplicationMessage(long key, Chain chain, long transactionId, long o } private Chain dropLastElement(Chain chain) { - List elements = StreamSupport.stream(chain.spliterator(), false) - .collect(Collectors.toList()); - elements.remove(elements.size() -1); // remove last - return Util.getChain(elements); + if (chain.isEmpty()) { + return chain; + } else { + List elements = StreamSupport.stream(chain.spliterator(), false) + .collect(Collectors.toList()); + elements.remove(elements.size() - 1); // remove last + return chainFromList(elements); + } } public long getClientId() { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java similarity index 76% rename from clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java index 5de3839962..4099ea4ddb 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java @@ -19,20 +19,18 @@ import org.ehcache.clustered.common.internal.messages.ChainCodec; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; -import org.ehcache.clustered.common.internal.messages.MessageCodecUtils; import org.ehcache.clustered.common.internal.store.Chain; import org.terracotta.runnel.Struct; import org.terracotta.runnel.decoding.StructDecoder; -import org.terracotta.runnel.encoding.StructEncoder; import java.nio.ByteBuffer; -import java.util.UUID; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.BaseCodec.MESSAGE_TYPE_FIELD_NAME; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.encodeMandatoryFields; import static org.terracotta.runnel.StructBuilder.newStructBuilder; public class PassiveReplicationMessageCodec { @@ -61,14 +59,7 @@ public class PassiveReplicationMessageCodec { .int64(KEY_FIELD, 20) .build(); - private final MessageCodecUtils messageCodecUtils; - - public PassiveReplicationMessageCodec() { - this.messageCodecUtils = new MessageCodecUtils(); - } - public byte[] encode(PassiveReplicationMessage message) { - switch (message.getMessageType()) { case CHAIN_REPLICATION_OP: return encodeChainReplicationMessage((PassiveReplicationMessage.ChainReplicationMessage) message); @@ -82,34 +73,24 @@ public byte[] encode(PassiveReplicationMessage message) { } private byte[] encodeInvalidationCompleteMessage(PassiveReplicationMessage.InvalidationCompleteMessage message) { - StructEncoder encoder = INVALIDATION_COMPLETE_STRUCT.encoder(); - - encoder.enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) - .int64(KEY_FIELD, message.getKey()); - - return encoder.encode().array(); + return encodeMandatoryFields(INVALIDATION_COMPLETE_STRUCT, message) + .int64(KEY_FIELD, message.getKey()) + .encode().array(); } private byte[] encodeClearInvalidationCompleteMessage(PassiveReplicationMessage.ClearInvalidationCompleteMessage message) { - StructEncoder encoder = CLEAR_INVALIDATION_COMPLETE_STRUCT.encoder(); - - encoder.enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()); - - return encoder.encode().array(); + return encodeMandatoryFields(CLEAR_INVALIDATION_COMPLETE_STRUCT, message) + .encode().array(); } private byte[] encodeChainReplicationMessage(PassiveReplicationMessage.ChainReplicationMessage message) { - StructEncoder encoder = CHAIN_REPLICATION_STRUCT.encoder(); - - messageCodecUtils.encodeMandatoryFields(encoder, message); - - encoder.int64(TRANSACTION_ID_FIELD, message.getTransactionId()); - encoder.int64(CLIENT_ID_FIELD, message.getClientId()); - encoder.int64(OLDEST_TRANSACTION_ID_FIELD, message.getOldestTransactionId()); - encoder.int64(KEY_FIELD, message.getKey()); - encoder.struct(CHAIN_FIELD, message.getChain(), ChainCodec::encode); - - return encoder.encode().array(); + return encodeMandatoryFields(CHAIN_REPLICATION_STRUCT, message) + .int64(TRANSACTION_ID_FIELD, message.getTransactionId()) + .int64(CLIENT_ID_FIELD, message.getClientId()) + .int64(OLDEST_TRANSACTION_ID_FIELD, message.getOldestTransactionId()) + .int64(KEY_FIELD, message.getKey()) + .struct(CHAIN_FIELD, message.getChain(), ChainCodec::encodeChain) + .encode().array(); } public EhcacheEntityMessage decode(EhcacheMessageType messageType, ByteBuffer messageBuffer) { @@ -147,7 +128,7 @@ private PassiveReplicationMessage.ChainReplicationMessage decodeChainReplication Long oldestTransactionId = decoder.int64(OLDEST_TRANSACTION_ID_FIELD); Long key = decoder.int64(KEY_FIELD); - Chain chain = ChainCodec.decode(decoder.struct(CHAIN_FIELD)); + Chain chain = ChainCodec.decodeChain(decoder.struct(CHAIN_FIELD)); return new PassiveReplicationMessage.ChainReplicationMessage(key, chain, currentTransactionId, oldestTransactionId, clientId); } diff --git a/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagement.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagement.java new file mode 100644 index 0000000000..854376401f --- /dev/null +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagement.java @@ -0,0 +1,108 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.server.ServerSideServerStore; +import org.ehcache.clustered.server.state.EhcacheStateService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.entity.ConfigurationException; +import org.terracotta.entity.ServiceException; +import org.terracotta.entity.ServiceRegistry; +import org.terracotta.management.service.monitoring.EntityManagementRegistry; +import org.terracotta.management.service.monitoring.EntityManagementRegistryConfiguration; + +import java.io.Closeable; +import java.util.concurrent.CompletableFuture; + +import static org.ehcache.clustered.server.management.Notification.EHCACHE_SERVER_STORE_CREATED; + +public class ClusterTierManagement implements Closeable { + + private static final Logger LOGGER = LoggerFactory.getLogger(ClusterTierManagement.class); + + private final EntityManagementRegistry managementRegistry; + private final EhcacheStateService ehcacheStateService; + private final String storeIdentifier; + + public ClusterTierManagement(ServiceRegistry services, EhcacheStateService ehcacheStateService, boolean active, String storeIdentifier, String clusterTierManagerIdentifier) throws ConfigurationException { + this.ehcacheStateService = ehcacheStateService; + this.storeIdentifier = storeIdentifier; + + // create an entity monitoring service that allows this entity to push some management information into voltron monitoring service + try { + managementRegistry = services.getService(new EntityManagementRegistryConfiguration(services, active)); + } catch (ServiceException e) { + throw new ConfigurationException("Unable to retrieve service: " + e.getMessage()); + } + + if (managementRegistry != null) { + // expose settings about server stores + managementRegistry.addManagementProvider(new ServerStoreSettingsManagementProvider(clusterTierManagerIdentifier, ehcacheStateService.getDefaultServerResource())); + // expose settings about pools + managementRegistry.addManagementProvider(new PoolSettingsManagementProvider()); + + // expose stats about server stores + managementRegistry.addManagementProvider(new ServerStoreStatisticsManagementProvider()); + // expose stats about pools + managementRegistry.addManagementProvider(new PoolStatisticsManagementProvider(ehcacheStateService)); + } + } + + @Override + public void close() { + if (managementRegistry != null) { + managementRegistry.close(); + } + } + + public void entityCreated() { + if (managementRegistry != null) { + LOGGER.trace("entityCreated({})", storeIdentifier); + managementRegistry.entityCreated(); + init(); + } + } + + public void entityPromotionCompleted() { + if (managementRegistry != null) { + LOGGER.trace("entityPromotionCompleted({})", storeIdentifier); + managementRegistry.entityPromotionCompleted(); + init(); + } + } + + // the goal of the following code is to send the management metadata from the entity into the monitoring tree AFTER the entity creation + private void init() { + ServerSideServerStore serverStore = ehcacheStateService.getStore(storeIdentifier); + ServerStoreBinding serverStoreBinding = new ServerStoreBinding(storeIdentifier, serverStore); + CompletableFuture r1 = managementRegistry.register(serverStoreBinding); + ServerSideConfiguration.Pool pool = ehcacheStateService.getDedicatedResourcePool(storeIdentifier); + CompletableFuture allOf; + if (pool != null) { + allOf = CompletableFuture.allOf(r1, managementRegistry.register(new PoolBinding(storeIdentifier, pool, PoolBinding.AllocationType.DEDICATED))); + } else { + allOf = r1; + } + allOf.thenRun(() -> { + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(serverStoreBinding, EHCACHE_SERVER_STORE_CREATED.name()); + }); + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagerBinding.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagerBinding.java similarity index 100% rename from clustered/server/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagerBinding.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagerBinding.java diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagerSettingsManagementProvider.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagerSettingsManagementProvider.java similarity index 100% rename from clustered/server/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagerSettingsManagementProvider.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagerSettingsManagementProvider.java diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/Management.java similarity index 87% rename from clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/Management.java index e1afd1b8f7..90441496af 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/Management.java @@ -82,24 +82,19 @@ protected void registerClusterTierManagerSettingsProvider() { getManagementRegistry().addManagementProvider(new ClusterTierManagerSettingsManagementProvider()); } - public void reload() { + public void entityCreated() { if (managementRegistry != null) { - managementRegistry.entityPromotionCompleted(); + LOGGER.trace("entityCreated()"); + managementRegistry.entityCreated(); init(); } } - // the goal of the following code is to send the management metadata from the entity into the monitoring tre AFTER the entity creation - public void init() { + public void entityPromotionCompleted() { if (managementRegistry != null) { - LOGGER.trace("init()"); - - CompletableFuture.allOf( - managementRegistry.register(generateClusterTierManagerBinding()), - // PoolBinding.ALL_SHARED is a marker so that we can send events not specifically related to 1 pool - // this object is ignored from the stats and descriptors - managementRegistry.register(PoolBinding.ALL_SHARED) - ).thenRun(managementRegistry::refresh); + LOGGER.trace("entityPromotionCompleted()"); + managementRegistry.entityPromotionCompleted(); + init(); } } @@ -118,4 +113,14 @@ public void sharedPoolsConfigured() { } } + // the goal of the following code is to send the management metadata from the entity into the monitoring tre AFTER the entity creation + private void init() { + CompletableFuture.allOf( + managementRegistry.register(generateClusterTierManagerBinding()), + // PoolBinding.ALL_SHARED is a marker so that we can send events not specifically related to 1 pool + // this object is ignored from the stats and descriptors + managementRegistry.register(PoolBinding.ALL_SHARED) + ).thenRun(managementRegistry::refresh); + } + } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Notification.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/Notification.java similarity index 100% rename from clustered/server/src/main/java/org/ehcache/clustered/server/management/Notification.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/Notification.java diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java similarity index 100% rename from clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java similarity index 100% rename from clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java similarity index 86% rename from clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java index bcbc015abf..867674a8a5 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java @@ -17,11 +17,11 @@ import org.ehcache.clustered.server.state.EhcacheStateService; import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.stats.StatisticRegistry; import org.terracotta.management.registry.Named; import org.terracotta.management.registry.RequiredContext; import org.terracotta.management.registry.ExposedObject; import org.terracotta.management.registry.collect.StatisticProvider; -import org.terracotta.management.registry.collect.StatisticRegistry; import org.terracotta.management.service.monitoring.registry.provider.AbstractExposedStatistics; import org.terracotta.management.service.monitoring.registry.provider.AbstractStatisticsManagementProvider; @@ -31,7 +31,7 @@ import java.util.stream.Collectors; import static java.util.Arrays.asList; -import static org.terracotta.context.extended.ValueStatisticDescriptor.descriptor; +import static org.terracotta.statistics.registry.ValueStatisticDescriptor.descriptor; @Named("PoolStatistics") @RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) @@ -53,17 +53,17 @@ public Collection> getExposedObjects() { @Override protected StatisticRegistry getStatisticRegistry(PoolBinding managedObject) { if (managedObject == PoolBinding.ALL_SHARED) { - return StatisticRegistry.noop(); + return new StatisticRegistry(null, () -> getTimeSource().getTimestamp()); } String poolName = managedObject.getAlias(); PoolBinding.AllocationType allocationType = managedObject.getAllocationType(); if (allocationType == PoolBinding.AllocationType.DEDICATED) { - return new StatisticRegistry(ehcacheStateService.getDedicatedResourcePageSource(poolName)); + return new StatisticRegistry(ehcacheStateService.getDedicatedResourcePageSource(poolName), () -> getTimeSource().getTimestamp()); } else { - return new StatisticRegistry(ehcacheStateService.getSharedResourcePageSource(poolName)); + return new StatisticRegistry(ehcacheStateService.getSharedResourcePageSource(poolName), () -> getTimeSource().getTimestamp()); } } @@ -76,7 +76,7 @@ private static class PoolExposedStatistics extends AbstractExposedStatistics { + + private final String clusterTierManagerIdentifier; + private final String defaultServerResource; + + ServerStoreSettingsManagementProvider(String clusterTierManagerIdentifier, String defaultServerResource) { + super(ServerStoreBinding.class); + this.clusterTierManagerIdentifier = clusterTierManagerIdentifier; + this.defaultServerResource = defaultServerResource; + } + + @Override + public Collection getDescriptors() { + Collection descriptors = new ArrayList<>(super.getDescriptors()); + descriptors.add(new Settings() + .set("type", getCapabilityName()) + .set("clusterTierManager", clusterTierManagerIdentifier) + .set("time", System.currentTimeMillis()) + .set("defaultServerResource", defaultServerResource)); + return descriptors; + } + + @Override + protected ExposedServerStoreBinding internalWrap(Context context, ServerStoreBinding managedObject) { + if (defaultServerResource != null) { + context = context.with("defaultServerResource", defaultServerResource); + } + return new ExposedServerStoreBinding(context, managedObject); + } + + private static class ExposedServerStoreBinding extends ExposedAliasBinding { + + ExposedServerStoreBinding(Context context, ServerStoreBinding binding) { + super(context.with("type", "ServerStore"), binding); + } + + @Override + public Collection getDescriptors() { + return Collections.singleton(getSettings()); + } + + Settings getSettings() { + // names taken from ServerStoreConfiguration.isCompatible() + ServerSideServerStore value = getBinding().getValue(); + PoolAllocation poolAllocation = value.getStoreConfiguration().getPoolAllocation(); + Settings settings = new Settings(getContext()) + .set("resourcePoolType", poolAllocation.getClass().getSimpleName().toLowerCase()); + if (value instanceof MapInternals) { + MapInternals internals = (MapInternals) value; + settings.set("allocatedMemoryAtTime", internals.getAllocatedMemory()) + .set("tableCapacityAtTime", internals.getTableCapacity()) + .set("vitalMemoryAtTime", internals.getVitalMemory()) + .set("longSizeAtTime", internals.getSize()) + .set("dataAllocatedMemoryAtTime", internals.getDataAllocatedMemory()) + .set("dataOccupiedMemoryAtTime", internals.getDataOccupiedMemory()) + .set("dataSizeAtTime", internals.getDataSize()) + .set("dataVitalMemoryAtTime", internals.getDataVitalMemory()); + } + if (poolAllocation instanceof PoolAllocation.DedicatedPoolAllocation) { + String resourceName = ((PoolAllocation.DedicatedPoolAllocation) poolAllocation).getResourceName(); + settings.set("resourcePoolDedicatedResourceName", resourceName != null ? resourceName : settings.getString("defaultServerResource")); + settings.set("resourcePoolDedicatedSize", ((PoolAllocation.DedicatedPoolAllocation) poolAllocation).getSize()); + } else if (poolAllocation instanceof PoolAllocation.SharedPoolAllocation) { + settings.set("resourcePoolSharedPoolName", ((PoolAllocation.SharedPoolAllocation) poolAllocation).getResourcePoolName()); + } + return settings; + } + } + +} diff --git a/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java new file mode 100644 index 0000000000..c5696c8016 --- /dev/null +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java @@ -0,0 +1,68 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.stats.StatisticRegistry; +import org.terracotta.management.registry.Named; +import org.terracotta.management.registry.RequiredContext; +import org.terracotta.management.registry.collect.StatisticProvider; +import org.terracotta.management.service.monitoring.registry.provider.AbstractExposedStatistics; +import org.terracotta.management.service.monitoring.registry.provider.AbstractStatisticsManagementProvider; + +import java.util.HashSet; +import java.util.Set; + +import static java.util.Arrays.asList; +import static org.terracotta.statistics.registry.ValueStatisticDescriptor.descriptor; + +@Named("ServerStoreStatistics") +@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) +@StatisticProvider +class ServerStoreStatisticsManagementProvider extends AbstractStatisticsManagementProvider { + + ServerStoreStatisticsManagementProvider() { + super(ServerStoreBinding.class); + } + + @Override + protected AbstractExposedStatistics internalWrap(Context context, ServerStoreBinding managedObject, StatisticRegistry statisticRegistry) { + return new ServerStoreExposedStatistics(context, managedObject, statisticRegistry); + } + + private static class ServerStoreExposedStatistics extends AbstractExposedStatistics { + + ServerStoreExposedStatistics(Context context, ServerStoreBinding binding, StatisticRegistry statisticRegistry) { + super(context.with("type", "ServerStore"), binding, statisticRegistry); + + getStatisticRegistry().registerStatistic("AllocatedMemory", descriptor("allocatedMemory", tags("tier", "Store"))); + getStatisticRegistry().registerStatistic("DataAllocatedMemory", descriptor("dataAllocatedMemory", tags("tier", "Store"))); + getStatisticRegistry().registerStatistic("OccupiedMemory", descriptor("occupiedMemory", tags("tier", "Store"))); + getStatisticRegistry().registerStatistic("DataOccupiedMemory", descriptor("dataOccupiedMemory", tags("tier", "Store"))); + getStatisticRegistry().registerStatistic("Entries", descriptor("entries", tags("tier", "Store"))); + getStatisticRegistry().registerStatistic("UsedSlotCount", descriptor("usedSlotCount", tags("tier", "Store"))); + getStatisticRegistry().registerStatistic("DataVitalMemory", descriptor("dataVitalMemory", tags("tier", "Store"))); + getStatisticRegistry().registerStatistic("VitalMemory", descriptor("vitalMemory", tags("tier", "Store"))); + getStatisticRegistry().registerStatistic("RemovedSlotCount", descriptor("removedSlotCount", tags("tier", "Store"))); + getStatisticRegistry().registerStatistic("DataSize", descriptor("dataSize", tags("tier", "Store"))); + getStatisticRegistry().registerStatistic("TableCapacity", descriptor("tableCapacity", tags("tier", "Store"))); + } + + } + + private static Set tags(String... tags) {return new HashSet<>(asList(tags));} + +} diff --git a/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ClusterTierActiveEntity.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ClusterTierActiveEntity.java new file mode 100644 index 0000000000..3519f5bbd0 --- /dev/null +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ClusterTierActiveEntity.java @@ -0,0 +1,1029 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.store; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.exceptions.ClusterException; +import org.ehcache.clustered.common.internal.exceptions.InvalidOperationException; +import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; +import org.ehcache.clustered.common.internal.exceptions.LifecycleException; +import org.ehcache.clustered.common.internal.messages.ClusterTierReconnectMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.server.internal.messages.EhcacheMessageTrackerCatchup; +import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateServerStore; +import org.ehcache.clustered.common.internal.messages.ReconnectMessageCodec; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.AppendMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAck; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAllAck; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.EnableEventListenerMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.IteratorAdvanceMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.IteratorCloseMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.IteratorOpenMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.KeyBasedServerStoreOpMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.LockMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ReplaceAtHeadMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.UnlockMessage; +import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.ClusterTierEntityConfiguration; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.server.CommunicatorServiceConfiguration; +import org.ehcache.clustered.server.KeySegmentMapper; +import org.ehcache.clustered.server.ServerSideServerStore; +import org.ehcache.clustered.server.ServerStoreCompatibility; +import org.ehcache.clustered.server.ServerStoreEventListener; +import org.ehcache.clustered.server.internal.messages.EhcacheDataSyncMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheMessageTrackerMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; +import org.ehcache.clustered.server.management.ClusterTierManagement; +import org.ehcache.clustered.server.offheap.InternalChain; +import org.ehcache.clustered.server.state.EhcacheStateContext; +import org.ehcache.clustered.server.state.EhcacheStateService; +import org.ehcache.clustered.server.state.InvalidationTracker; +import org.ehcache.clustered.server.state.config.EhcacheStoreStateServiceConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.client.message.tracker.OOOMessageHandler; +import org.terracotta.client.message.tracker.OOOMessageHandlerConfiguration; +import org.terracotta.entity.ActiveInvokeContext; +import org.terracotta.entity.ActiveServerEntity; +import org.terracotta.entity.BasicServiceConfiguration; +import org.terracotta.entity.ClientCommunicator; +import org.terracotta.entity.ClientDescriptor; +import org.terracotta.entity.ClientSourceId; +import org.terracotta.entity.ConfigurationException; +import org.terracotta.entity.EntityUserException; +import org.terracotta.entity.IEntityMessenger; +import org.terracotta.entity.InvokeContext; +import org.terracotta.entity.MessageCodecException; +import org.terracotta.entity.PassiveSynchronizationChannel; +import org.terracotta.entity.ServiceException; +import org.terracotta.entity.ServiceRegistry; +import org.terracotta.entity.StateDumpCollector; + +import java.nio.ByteBuffer; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executor; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.SynchronousQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +import static java.util.Collections.emptyMap; +import static java.util.stream.Collectors.toMap; +import static java.util.stream.Collectors.toSet; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.allInvalidationDone; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.clientInvalidateAll; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.clientInvalidateHash; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.failure; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.getResponse; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.hashInvalidationDone; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.iteratorBatchResponse; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.lockFailure; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.lockSuccess; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.resolveRequest; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.serverAppend; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.serverInvalidateHash; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.success; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isLifecycleMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStateRepoOperationMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStoreOperationMessage; +import static org.ehcache.clustered.server.ConcurrencyStrategies.DEFAULT_KEY; +import static org.ehcache.clustered.server.ConcurrencyStrategies.DefaultConcurrencyStrategy.TRACKER_SYNC_KEY; + +/** + * ClusterTierActiveEntity + */ +public class ClusterTierActiveEntity implements ActiveServerEntity { + + private static final Logger LOGGER = LoggerFactory.getLogger(ClusterTierActiveEntity.class); + static final String SYNC_DATA_SIZE_PROP = "ehcache.sync.data.size.threshold"; + private static final long DEFAULT_SYNC_DATA_SIZE_THRESHOLD = 2 * 1024 * 1024; + static final String SYNC_DATA_GETS_PROP = "ehcache.sync.data.gets.threshold"; + + // threshold for max number of chains per data sync message. + // typically hits this threshold first before data size threshold for caches having small sized values. + private static final int DEFAULT_SYNC_DATA_GETS_THRESHOLD = 8 * 1024; + + static final String CHAIN_COMPACTION_THRESHOLD_PROP = "ehcache.server.chain.compaction.threshold"; + private static final int DEFAULT_CHAIN_COMPACTION_THRESHOLD = 8; + + private final Executor syncGetsExecutor; + + private final String storeIdentifier; + private final ServerStoreConfiguration configuration; + private final ClientCommunicator clientCommunicator; + private final EhcacheStateService stateService; + private final OOOMessageHandler messageHandler; + private final IEntityMessenger entityMessenger; + private final ServerStoreCompatibility storeCompatibility = new ServerStoreCompatibility(); + private final AtomicInteger invalidationIdGenerator = new AtomicInteger(); + private final ConcurrentMap clientsWaitingForInvalidation = new ConcurrentHashMap<>(); + private final ReconnectMessageCodec reconnectMessageCodec = new ReconnectMessageCodec(); + private final ClusterTierManagement management; + private final String managerIdentifier; + private final Set eventListeners = new HashSet<>(); // accesses are synchronized on eventListeners itself + private final Map connectedClients = new ConcurrentHashMap<>(); + private final Map>>> liveIterators = new ConcurrentHashMap<>(); + private final int chainCompactionLimit; + private final ServerLockManager lockManager; + + private final long dataSizeThreshold = Long.getLong(SYNC_DATA_SIZE_PROP, DEFAULT_SYNC_DATA_SIZE_THRESHOLD); + private final int dataGetsThreshold = Integer.getInteger(SYNC_DATA_GETS_PROP, DEFAULT_SYNC_DATA_GETS_THRESHOLD); + private volatile Integer dataMapInitialCapacity = null; + + @SuppressWarnings("unchecked") + public ClusterTierActiveEntity(ServiceRegistry registry, ClusterTierEntityConfiguration entityConfiguration, KeySegmentMapper defaultMapper, Executor getSyncExecutor) throws ConfigurationException { + if (entityConfiguration == null) { + throw new ConfigurationException("ClusteredStoreEntityConfiguration cannot be null"); + } + storeIdentifier = entityConfiguration.getStoreIdentifier(); + configuration = entityConfiguration.getConfiguration(); + managerIdentifier = entityConfiguration.getManagerIdentifier(); + try { + clientCommunicator = registry.getService(new CommunicatorServiceConfiguration()); + stateService = registry.getService(new EhcacheStoreStateServiceConfig(entityConfiguration.getManagerIdentifier(), defaultMapper)); + entityMessenger = registry.getService(new BasicServiceConfiguration<>(IEntityMessenger.class)); + messageHandler = registry.getService(new OOOMessageHandlerConfiguration<>(managerIdentifier + "###" + storeIdentifier, + ClusterTierActiveEntity::isTrackedMessage)); + } catch (ServiceException e) { + throw new ConfigurationException("Unable to retrieve service: " + e.getMessage()); + } + if (entityMessenger == null) { + throw new AssertionError("Server failed to retrieve IEntityMessenger service."); + } + management = new ClusterTierManagement(registry, stateService, true, storeIdentifier, entityConfiguration.getManagerIdentifier()); + chainCompactionLimit = Integer.getInteger(CHAIN_COMPACTION_THRESHOLD_PROP, DEFAULT_CHAIN_COMPACTION_THRESHOLD); + if (configuration.isLoaderWriterConfigured()) { + lockManager = new LockManagerImpl(); + } else { + lockManager = new NoopLockManager(); + } + syncGetsExecutor = getSyncExecutor; + } + + static boolean isTrackedMessage(EhcacheEntityMessage msg) { + if (msg instanceof EhcacheOperationMessage) { + return EhcacheMessageType.isTrackedOperationMessage(((EhcacheOperationMessage) msg).getMessageType()); + } else { + return false; + } + } + + @Override + public void addStateTo(StateDumpCollector dump) { + ClusterTierDump.dump(dump, managerIdentifier, storeIdentifier, configuration); + Set clients = new HashSet<>(getConnectedClients()); + + List> allClients = new ArrayList<>(clients.size()); + for (ClientDescriptor entry : clients) { + Map clientMap = new HashMap<>(1); + clientMap.put("clientDescriptor", entry.toString()); + allClients.add(clientMap); + } + dump.addState("clientCount", String.valueOf(clients.size())); + dump.addState("clients", allClients); + } + + @Override + public void createNew() throws ConfigurationException { + ServerSideServerStore store = stateService.createStore(storeIdentifier, configuration, true); + store.setEventListener(new Listener()); + management.entityCreated(); + } + + @Override + public void loadExisting() { + stateService.loadStore(storeIdentifier, configuration).setEventListener(new Listener()); + management.entityPromotionCompleted(); + } + + private class Listener implements ServerStoreEventListener { + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + Set clients = new HashSet<>(getValidatedClients()); + for (ClientDescriptor clientDescriptor : clients) { + LOGGER.debug("SERVER: append happened in cache {}; notifying client {} ", storeIdentifier, clientDescriptor); + try { + clientCommunicator.sendNoResponse(clientDescriptor, serverAppend(appended.duplicate(), beforeAppend)); + } catch (MessageCodecException mce) { + throw new AssertionError("Codec error", mce); + } + } + } + @Override + public void onEviction(long key, InternalChain evictedChain) { + Set clientsToInvalidate = new HashSet<>(getValidatedClients()); + if (!clientsToInvalidate.isEmpty()) { + Chain detachedChain = evictedChain.detach(); + for (ClientDescriptor clientDescriptorThatHasToInvalidate : clientsToInvalidate) { + LOGGER.debug("SERVER: eviction happened; asking client {} to invalidate hash {} from cache {}", clientDescriptorThatHasToInvalidate, key, storeIdentifier); + try { + boolean eventsEnabledForClient = isEventsEnabledFor(clientDescriptorThatHasToInvalidate); + clientCommunicator.sendNoResponse(clientDescriptorThatHasToInvalidate, serverInvalidateHash(key, eventsEnabledForClient ? detachedChain : null)); + } catch (MessageCodecException mce) { + throw new AssertionError("Codec error", mce); + } + } + } + } + } + + @Override + public void connected(ClientDescriptor clientDescriptor) { + connectedClients.put(clientDescriptor, Boolean.FALSE); + } + + @Override + public void disconnected(ClientDescriptor clientDescriptor) { + // cleanup all invalidation requests waiting for a ack from this client + Set invalidationIds = clientsWaitingForInvalidation.keySet(); + for (Integer invalidationId : invalidationIds) { + clientInvalidated(clientDescriptor, invalidationId); + } + + // cleanup all invalidation request this client was blocking on + for(Iterator> it = clientsWaitingForInvalidation.entrySet().iterator(); it.hasNext();) { + Map.Entry next = it.next(); + ClientDescriptor clientDescriptorWaitingForInvalidation = next.getValue().clientDescriptorWaitingForInvalidation; + if (clientDescriptorWaitingForInvalidation != null && clientDescriptorWaitingForInvalidation.equals(clientDescriptor)) { + it.remove(); + } + } + + lockManager.sweepLocksForClient(clientDescriptor, + configuration.isWriteBehindConfigured() ? null : heldKeys -> heldKeys.forEach(stateService.getStore(storeIdentifier)::remove)); + + liveIterators.remove(clientDescriptor); + + removeEventListener(clientDescriptor, stateService.getStore(storeIdentifier)); + + connectedClients.remove(clientDescriptor); + } + + @Override + public EhcacheEntityResponse invokeActive(ActiveInvokeContext context, EhcacheEntityMessage message) throws EntityUserException { + return messageHandler.invoke(context, message, this::invokeActiveInternal); + } + + @SuppressWarnings("try") + private EhcacheEntityResponse invokeActiveInternal(InvokeContext context, EhcacheEntityMessage message) { + + try { + if (message instanceof EhcacheOperationMessage) { + EhcacheOperationMessage operationMessage = (EhcacheOperationMessage) message; + try (EhcacheStateContext ignored = stateService.beginProcessing(operationMessage, storeIdentifier)) { + EhcacheMessageType messageType = operationMessage.getMessageType(); + if (isStoreOperationMessage(messageType)) { + return invokeServerStoreOperation(context, (ServerStoreOpMessage) message); + } else if (isLifecycleMessage(messageType)) { + return invokeLifeCycleOperation(context, (LifecycleMessage) message); + } else if (isStateRepoOperationMessage(messageType)) { + return invokeStateRepositoryOperation((StateRepositoryOpMessage) message); + } + } + } + throw new AssertionError("Unsupported message : " + message.getClass()); + } catch (ClusterException e) { + return failure(e); + } catch (Exception e) { + LOGGER.error("Unexpected exception raised during operation: " + message, e); + return failure(new InvalidOperationException(e)); + } + } + + private EhcacheEntityResponse invokeStateRepositoryOperation(StateRepositoryOpMessage message) { + return stateService.getStateRepositoryManager().invoke(message); + } + + private EhcacheEntityResponse invokeLifeCycleOperation(InvokeContext context, LifecycleMessage message) throws ClusterException { + @SuppressWarnings("unchecked") + ActiveInvokeContext activeInvokeContext = (ActiveInvokeContext) context; + switch (message.getMessageType()) { + case VALIDATE_SERVER_STORE: + validateServerStore(activeInvokeContext.getClientDescriptor(), (ValidateServerStore) message); + break; + default: + throw new AssertionError("Unsupported LifeCycle operation " + message); + } + return success(); + } + + private void validateServerStore(ClientDescriptor clientDescriptor, ValidateServerStore validateServerStore) throws ClusterException { + ServerStoreConfiguration clientConfiguration = validateServerStore.getStoreConfiguration(); + LOGGER.info("Client {} validating cluster tier '{}'", clientDescriptor, storeIdentifier); + ServerSideServerStore store = stateService.getStore(storeIdentifier); + if (store != null) { + storeCompatibility.verify(store.getStoreConfiguration(), clientConfiguration); + connectedClients.put(clientDescriptor, Boolean.TRUE); + } else { + throw new InvalidStoreException("cluster tier '" + storeIdentifier + "' does not exist"); + } + } + + private EhcacheEntityResponse invokeServerStoreOperation(InvokeContext context, ServerStoreOpMessage message) throws ClusterException { + @SuppressWarnings("unchecked") + ActiveInvokeContext activeInvokeContext = (ActiveInvokeContext) context; + ClientDescriptor clientDescriptor = activeInvokeContext.getClientDescriptor(); + + ServerSideServerStore cacheStore = stateService.getStore(storeIdentifier); + if (cacheStore == null) { + // An operation on a non-existent store should never get out of the client + throw new LifecycleException("cluster tier does not exist : '" + storeIdentifier + "'"); + } + + switch (message.getMessageType()) { + case GET_STORE: { + GetMessage getMessage = (GetMessage) message; + try { + return getResponse(cacheStore.get(getMessage.getKey())); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception", e); + } + } + case APPEND: { + AppendMessage appendMessage = (AppendMessage)message; + + long key = appendMessage.getKey(); + InvalidationTracker invalidationTracker = stateService.getInvalidationTracker(storeIdentifier); + if (invalidationTracker != null) { + invalidationTracker.trackHashInvalidation(key); + } + + final Chain newChain; + try { + cacheStore.append(key, appendMessage.getPayload()); + newChain = cacheStore.get(key); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception", e); + } + sendMessageToSelfAndDeferRetirement(activeInvokeContext, appendMessage, newChain); + invalidateHashForClient(clientDescriptor, key); + if (newChain.length() > chainCompactionLimit) { + requestChainResolution(clientDescriptor, key, newChain); + } + if (!configuration.isWriteBehindConfigured()) { + lockManager.unlock(key); + } + return success(); + } + case GET_AND_APPEND: { + ServerStoreOpMessage.GetAndAppendMessage getAndAppendMessage = (ServerStoreOpMessage.GetAndAppendMessage)message; + LOGGER.trace("Message {} : GET_AND_APPEND on key {} from client {}", message, getAndAppendMessage.getKey(), context.getClientSource().toLong()); + + InvalidationTracker invalidationTracker = stateService.getInvalidationTracker(storeIdentifier); + if (invalidationTracker != null) { + invalidationTracker.trackHashInvalidation(getAndAppendMessage.getKey()); + } + + final Chain result; + final Chain newChain; + try { + result = cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload()); + newChain = cacheStore.get(getAndAppendMessage.getKey()); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception", e); + } + sendMessageToSelfAndDeferRetirement(activeInvokeContext, getAndAppendMessage, newChain); + LOGGER.debug("Send invalidations for key {}", getAndAppendMessage.getKey()); + invalidateHashForClient(clientDescriptor, getAndAppendMessage.getKey()); + return getResponse(result); + } + case REPLACE: { + ReplaceAtHeadMessage replaceAtHeadMessage = (ReplaceAtHeadMessage) message; + cacheStore.replaceAtHead(replaceAtHeadMessage.getKey(), replaceAtHeadMessage.getExpect(), replaceAtHeadMessage.getUpdate()); + return success(); + } + case CLIENT_INVALIDATION_ACK: { + ClientInvalidationAck clientInvalidationAck = (ClientInvalidationAck) message; + int invalidationId = clientInvalidationAck.getInvalidationId(); + LOGGER.debug("SERVER: got notification of invalidation ack in cache {} from {} (ID {})", storeIdentifier, clientDescriptor, invalidationId); + clientInvalidated(clientDescriptor, invalidationId); + return success(); + } + case CLIENT_INVALIDATION_ALL_ACK: { + ClientInvalidationAllAck clientInvalidationAllAck = (ClientInvalidationAllAck) message; + int invalidationId = clientInvalidationAllAck.getInvalidationId(); + LOGGER.debug("SERVER: got notification of invalidation ack in cache {} from {} (ID {})", storeIdentifier, clientDescriptor, invalidationId); + clientInvalidated(clientDescriptor, invalidationId); + return success(); + } + case CLEAR: { + LOGGER.info("Clearing cluster tier {}", storeIdentifier); + try { + cacheStore.clear(); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception", e); + } + + InvalidationTracker invalidationTracker = stateService.getInvalidationTracker(storeIdentifier); + if (invalidationTracker != null) { + invalidationTracker.setClearInProgress(true); + } + invalidateAll(clientDescriptor); + return success(); + } + case LOCK: { + LockMessage lockMessage = (LockMessage) message; + if (lockManager.lock(lockMessage.getHash(), activeInvokeContext.getClientDescriptor())) { + try { + Chain chain = cacheStore.get(lockMessage.getHash()); + return lockSuccess(chain); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception", e); + } + } else { + return lockFailure(); + } + } + case UNLOCK: { + UnlockMessage unlockMessage = (UnlockMessage) message; + lockManager.unlock(unlockMessage.getHash()); + return success(); + } + case ITERATOR_OPEN: { + IteratorOpenMessage iteratorOpenMessage = (IteratorOpenMessage) message; + try { + Iterator> iterator = cacheStore.iterator(); + List> batch = iteratorBatch(iterator, iteratorOpenMessage.getBatchSize()); + + if (iterator.hasNext()) { + Map>> liveIterators = this.liveIterators.computeIfAbsent(clientDescriptor, client -> new ConcurrentHashMap<>()); + UUID id; + do { + id = UUID.randomUUID(); + } while (liveIterators.putIfAbsent(id, iterator) != null); + return iteratorBatchResponse(id, batch, false); + } else { + return iteratorBatchResponse(UUID.randomUUID(), batch, true); + } + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception", e); + } + } + case ITERATOR_CLOSE: { + IteratorCloseMessage iteratorCloseMessage = (IteratorCloseMessage) message; + liveIterators.computeIfPresent(clientDescriptor, (client, iterators) -> { + iterators.remove(iteratorCloseMessage.getIdentity()); + if (iterators.isEmpty()) { + return null; + } else { + return iterators; + } + }); + return success(); + } + case ITERATOR_ADVANCE: { + IteratorAdvanceMessage iteratorAdvanceMessage = (IteratorAdvanceMessage) message; + UUID id = iteratorAdvanceMessage.getIdentity(); + + Iterator> iterator = liveIterators.getOrDefault(clientDescriptor, emptyMap()).get(id); + if (iterator == null) { + return failure(new InvalidOperationException("Referenced iterator is already closed (or never existed)")); + } else { + List> batch = iteratorBatch(iterator, iteratorAdvanceMessage.getBatchSize()); + if (iterator.hasNext()) { + return iteratorBatchResponse(id, batch, false); + } else { + liveIterators.computeIfPresent(clientDescriptor, (client, iterators) -> { + iterators.remove(id); + return iterators.isEmpty() ? null : iterators; + }); + return iteratorBatchResponse(id, batch, true); + } + } + } + case ENABLE_EVENT_LISTENER: { + // we need to keep a count of how many clients have registered as events listeners + // as we want to disable events from the store once all listeners are gone + // so we need to keep all requesting client descriptors in a set so that duplicate + // registrations from a single client are ignored + EnableEventListenerMessage enableEventListenerMessage = (EnableEventListenerMessage) message; + if (enableEventListenerMessage.isEnable()) { + addEventListener(clientDescriptor, cacheStore); + } else { + removeEventListener(clientDescriptor, cacheStore); + } + return success(); + } + default: + throw new AssertionError("Unsupported ServerStore operation : " + message); + } + } + + private void addEventListener(ClientDescriptor clientDescriptor, ServerSideServerStore cacheStore) { + synchronized (eventListeners) { + if (eventListeners.add(clientDescriptor)) { + cacheStore.enableEvents(true); + } + } + } + + private void removeEventListener(ClientDescriptor clientDescriptor, ServerSideServerStore cacheStore) { + synchronized (eventListeners) { + if (eventListeners.remove(clientDescriptor) && eventListeners.isEmpty()) { + cacheStore.enableEvents(false); + } + } + } + + private boolean isEventsEnabledFor(ClientDescriptor clientDescriptor) { + synchronized (eventListeners) { + return eventListeners.contains(clientDescriptor); + } + } + + // for testing + Set getEventListeners() { + synchronized (eventListeners) { + return new HashSet<>(eventListeners); + } + } + + private List> iteratorBatch(Iterator> iterator, int batchSize) { + List> chains = new ArrayList<>(); + int size = 0; + while (iterator.hasNext() && size < batchSize && size >= 0) { + Map.Entry nextChain = iterator.next(); + chains.add(new AbstractMap.SimpleImmutableEntry<>(nextChain.getKey(), nextChain.getValue())); + for (Element e: nextChain.getValue()) { + size += e.getPayload().remaining(); + } + } + return chains; + } + + private void invalidateAll(ClientDescriptor originatingClientDescriptor) { + int invalidationId = invalidationIdGenerator.getAndIncrement(); + Set clientsToInvalidate = new HashSet<>(getValidatedClients()); + if (originatingClientDescriptor != null) { + clientsToInvalidate.remove(originatingClientDescriptor); + } + + InvalidationHolder invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate); + clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); + + LOGGER.debug("SERVER: requesting {} client(s) invalidation of all in cache {} (ID {})", clientsToInvalidate.size(), storeIdentifier, invalidationId); + for (ClientDescriptor clientDescriptorThatHasToInvalidate : clientsToInvalidate) { + LOGGER.debug("SERVER: asking client {} to invalidate all from cache {} (ID {})", clientDescriptorThatHasToInvalidate, storeIdentifier, invalidationId); + try { + clientCommunicator.sendNoResponse(clientDescriptorThatHasToInvalidate, clientInvalidateAll(invalidationId)); + } catch (MessageCodecException mce) { + throw new AssertionError("Codec error", mce); + } + } + + if (clientsToInvalidate.isEmpty()) { + clientInvalidated(invalidationHolder.clientDescriptorWaitingForInvalidation, invalidationId); + } + } + + private void clientInvalidated(ClientDescriptor clientDescriptor, int invalidationId) { + InvalidationHolder invalidationHolder = clientsWaitingForInvalidation.get(invalidationId); + + if (invalidationHolder == null) { // Happens when client is re-sending/sending invalidations for which server has lost track since fail-over happened. + LOGGER.debug("Ignoring invalidation from client {} " + clientDescriptor); + return; + } + + invalidationHolder.clientsHavingToInvalidate.remove(clientDescriptor); + if (invalidationHolder.clientsHavingToInvalidate.isEmpty()) { + if (clientsWaitingForInvalidation.remove(invalidationId) != null) { + try { + Long key = invalidationHolder.key; + if (key == null) { + if (isStrong()) { + clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, allInvalidationDone()); + LOGGER.debug("SERVER: notifying originating client that all other clients invalidated all in cache {} from {} (ID {})", storeIdentifier, clientDescriptor, invalidationId); + } else { + entityMessenger.messageSelf(new ClearInvalidationCompleteMessage()); + + InvalidationTracker invalidationTracker = stateService.getInvalidationTracker(storeIdentifier); + if (invalidationTracker != null) { + invalidationTracker.setClearInProgress(false); + } + + } + } else { + if (isStrong()) { + clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, hashInvalidationDone(key)); + LOGGER.debug("SERVER: notifying originating client that all other clients invalidated key {} in cache {} from {} (ID {})", key, storeIdentifier, clientDescriptor, invalidationId); + } else { + entityMessenger.messageSelf(new InvalidationCompleteMessage(key)); + + InvalidationTracker invalidationTracker = stateService.getInvalidationTracker(storeIdentifier); + if (invalidationTracker != null) { + invalidationTracker.untrackHashInvalidation(key); + } + } + } + } catch (MessageCodecException mce) { + throw new AssertionError("Codec error", mce); + } + } + } + } + + private void invalidateHashForClient(ClientDescriptor originatingClientDescriptor, long key) { + int invalidationId = invalidationIdGenerator.getAndIncrement(); + Set validatedClients = getValidatedClients(); + Set clientsToInvalidate = ConcurrentHashMap.newKeySet(validatedClients.size()); + clientsToInvalidate.addAll(validatedClients); + if (originatingClientDescriptor != null) { + clientsToInvalidate.remove(originatingClientDescriptor); + } + + InvalidationHolder invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, key); + clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); + + LOGGER.debug("SERVER: requesting {} client(s) invalidation of hash {} in cache {} (ID {})", clientsToInvalidate.size(), key, storeIdentifier, invalidationId); + for (ClientDescriptor clientDescriptorThatHasToInvalidate : clientsToInvalidate) { + LOGGER.debug("SERVER: asking client {} to invalidate hash {} from cache {} (ID {})", clientDescriptorThatHasToInvalidate, key, storeIdentifier, invalidationId); + try { + clientCommunicator.sendNoResponse(clientDescriptorThatHasToInvalidate, clientInvalidateHash(key, invalidationId)); + } catch (MessageCodecException mce) { + throw new AssertionError("Codec error", mce); + } + } + + if (clientsToInvalidate.isEmpty()) { + clientInvalidated(invalidationHolder.clientDescriptorWaitingForInvalidation, invalidationId); + } + } + + private void requestChainResolution(ClientDescriptor clientDescriptor, long key, Chain chain) { + try { + clientCommunicator.sendNoResponse(clientDescriptor, resolveRequest(key, chain)); + } catch (MessageCodecException e) { + throw new AssertionError("Codec error", e); + } + } + + /** + * Send a {@link PassiveReplicationMessage} to the passive, reuse the same transaction id and client id as the original message since this + * original message won't ever be sent to the passive and these ids will be used to prevent duplication if the active goes down and the + * client resends the original message to the passive (now our new active). + * + * @param context context of the message + * @param message message to be forwarded + * @param newChain resulting chain to send + */ + private void sendMessageToSelfAndDeferRetirement(ActiveInvokeContext context, KeyBasedServerStoreOpMessage message, Chain newChain) { + try { + long clientId = context.getClientSource().toLong(); + entityMessenger.messageSelfAndDeferRetirement(message, new PassiveReplicationMessage.ChainReplicationMessage(message.getKey(), newChain, + context.getCurrentTransactionId(), context.getOldestTransactionId(), clientId)); + } catch (MessageCodecException e) { + throw new AssertionError("Codec error", e); + } + } + + @Override + public void notifyDestroyed(ClientSourceId sourceId) { + messageHandler.untrackClient(sourceId); + } + + @Override + public ReconnectHandler startReconnect() { + try { + this.entityMessenger.messageSelf(new EhcacheMessageTrackerCatchup(this.messageHandler.getRecordedMessages().filter(m->m.getRequest() != null).collect(Collectors.toList()))); + } catch (MessageCodecException mce) { + throw new AssertionError("Codec error", mce); + } + + List inflightInvalidations = new ArrayList<>(); + + InvalidationTracker invalidationTracker = stateService.getInvalidationTracker(storeIdentifier); + if (invalidationTracker != null) { + inflightInvalidations.add(new InvalidationTuple(null, invalidationTracker.getTrackedKeys(), invalidationTracker.isClearInProgress())); + invalidationTracker.clear(); + } + + return new ReconnectHandler() { + + @Override + public void handleReconnect(ClientDescriptor clientDescriptor, byte[] bytes) { + ClusterTierReconnectMessage reconnectMessage = reconnectMessageCodec.decode(bytes); + ServerSideServerStore serverStore = stateService.getStore(storeIdentifier); + addInflightInvalidationsForStrongCache(clientDescriptor, reconnectMessage, serverStore); + lockManager.createLockStateAfterFailover(clientDescriptor, reconnectMessage.getLocksHeld()); + if (reconnectMessage.isEventsEnabled()) { + addEventListener(clientDescriptor, serverStore); + } + + LOGGER.info("Client '{}' successfully reconnected to newly promoted ACTIVE after failover.", clientDescriptor); + + connectedClients.put(clientDescriptor, Boolean.TRUE); + } + + private void addInflightInvalidationsForStrongCache(ClientDescriptor clientDescriptor, ClusterTierReconnectMessage reconnectMessage, ServerSideServerStore serverStore) { + if (serverStore.getStoreConfiguration().getConsistency().equals(Consistency.STRONG)) { + Set invalidationsInProgress = reconnectMessage.getInvalidationsInProgress(); + LOGGER.debug("Number of Inflight Invalidations from client ID {} for cache {} is {}.", clientDescriptor.getSourceId().toLong(), storeIdentifier, invalidationsInProgress + .size()); + inflightInvalidations.add(new InvalidationTuple(clientDescriptor, invalidationsInProgress, reconnectMessage.isClearInProgress())); + } + } + + @Override + public void close() { + LOGGER.debug("Stalling all operations for cluster tier {} for firing inflight invalidations again.", storeIdentifier); + inflightInvalidations.forEach(invalidationState -> { + if (invalidationState.isClearInProgress()) { + invalidateAll(invalidationState.getClientDescriptor()); + } + invalidationState.getInvalidationsInProgress() + .forEach(hashInvalidationToBeResent -> invalidateHashForClient(invalidationState.getClientDescriptor(), hashInvalidationToBeResent)); + }); + } + }; + } + + @Override + public void synchronizeKeyToPassive(PassiveSynchronizationChannel syncChannel, int concurrencyKey) { + LOGGER.info("Sync started for concurrency key {}.", concurrencyKey); + if (concurrencyKey == DEFAULT_KEY) { + stateService.getStateRepositoryManager().syncMessageFor(storeIdentifier).forEach(syncChannel::synchronizeToPassive); + } else if (concurrencyKey == TRACKER_SYNC_KEY) { + sendMessageTrackerReplication(syncChannel); + } else { + boolean interrupted = false; + BlockingQueue messageQ = new SynchronousQueue<>(); + int segmentId = concurrencyKey - DEFAULT_KEY - 1; + Thread thisThread = Thread.currentThread(); + CompletableFuture asyncGet = CompletableFuture.runAsync( + () -> doGetsForSync(segmentId, messageQ, syncChannel, thisThread), syncGetsExecutor); + try { + try { + while (messageQ.take().execute()) ; + } catch (InterruptedException e) { + interrupted = true; + } + if (interrupted) { + // here we may have been interrupted due to a genuine exception on the async get thread + // let us try and not loose that exception as it takes precedence over the interrupt + asyncGet.get(10, TimeUnit.SECONDS); + // we received a genuine interrupt + throw new InterruptedException(); + } else { + asyncGet.get(); + } + } catch (InterruptedException | ExecutionException | TimeoutException e) { + throw new RuntimeException(e); + } + } + LOGGER.info("Sync complete for concurrency key {}.", concurrencyKey); + } + + private void doGetsForSync(int segmentId, BlockingQueue messageQ, + PassiveSynchronizationChannel syncChannel, Thread waitingThread) { + int numKeyGets = 0; + long dataSize = 0; + try { + ServerSideServerStore store = stateService.getStore(storeIdentifier); + Set keys = store.getSegmentKeySets().get(segmentId); + int remainingKeys = keys.size(); + Map mappingsToSend = new HashMap<>(computeInitialMapCapacity(remainingKeys)); + boolean capacityAdjusted = false; + for (Long key : keys) { + final Chain chain; + try { + chain = store.get(key); + if (chain.isEmpty()) { + // evicted just continue with next + remainingKeys--; + continue; + } + numKeyGets++; + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } + for (Element element : chain) { + dataSize += element.getPayload().remaining(); + } + mappingsToSend.put(key, chain); + if (dataSize > dataSizeThreshold || numKeyGets >= dataGetsThreshold) { + putMessage(messageQ, syncChannel, mappingsToSend); + if (!capacityAdjusted && segmentId == 0) { + capacityAdjusted = true; + adjustInitialCapacity(numKeyGets); + } + remainingKeys -= numKeyGets; + mappingsToSend = new HashMap<>(computeMapCapacity(remainingKeys, numKeyGets)); + dataSize = 0; + numKeyGets = 0; + } + } + if (!mappingsToSend.isEmpty()) { + putMessage(messageQ, syncChannel, mappingsToSend); + } + // put the last message indicator into the queue + putMessage(messageQ, null, null); + } catch (Throwable e) { + // ensure waiting peer thread is interrupted, if we run into trouble + waitingThread.interrupt(); + throw e; + } + } + + private void putMessage(BlockingQueue messageQ, + PassiveSynchronizationChannel syncChannel, + Map mappingsToSend) { + try { + if (syncChannel != null) { + final EhcacheDataSyncMessage msg = new EhcacheDataSyncMessage(mappingsToSend); + messageQ.put(() -> { + syncChannel.synchronizeToPassive(msg); + return true; + }); + } else { + // we are done + messageQ.put(() -> false); + } + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + /** + * Compute map capacity based on {@code remainingSize} and {@code expectedGets}. Both varies depending on the size of + * the chains and number of keys in the map. + *

+ * NOTE: if expected gets dips below 32, keep at 32 as it indicates a large segment with possibly smaller number of keys + * which means the next iteration may show more keys in the map. + * + * @param remainingSize is the number of keys left in the segment yet to be send + * @param expectedGets is the max expected number of keys that could be put in the map before the map gets full + * @return required capacity for the map. + */ + private int computeMapCapacity(int remainingSize, int expectedGets) { + if (remainingSize < 16) { + return 16; + } else if (expectedGets < 32) { + return 32; + } else if (remainingSize < expectedGets) { + return (int) ((float) remainingSize / 0.75f + 1.0f); + } else { + return (int) ((float) expectedGets / 0.75f + 1.0f); + } + } + + /** + * Adjust {@link this#dataMapInitialCapacity} based on what we learned about the cache during iteration of segment 0. + * + * NOTE: The required capacity calculation and the initial capacity adjustment assumes some sort of symmetry across + * multiple segments, but it is possible that in a given segment, some keys has chain with LARGE data and some keys + * has small chain with smaller data sizes. But on a larger sweep that should even out. Even if it does not even out, + * this should perform better as the initial size is reset back to a minimum of 32 and not 16 when a cache is large + * and when a cache is very small it starts with initial size of 16 as the {@link this#computeInitialMapCapacity(int)} + * will take the total number of keys in the segment into account. + * + * @param actualKeyGets the actual number of keys we got when the map got full + */ + private void adjustInitialCapacity(int actualKeyGets) { + // even when there are larger data chains with less keys..let us keep the lower bound at 32. + dataMapInitialCapacity = (actualKeyGets < 32) ? 32 : (int) ((float) actualKeyGets / 0.75f + 1.0f); + } + + /** + * Starts with an initial size of configured {@link this#dataGetsThreshold} or adjusted initial size, unless the + * segment of the cache is smaller than the initial expected size. + * + * @param totalKeys is the total number of keys in this segment + */ + private int computeInitialMapCapacity(int totalKeys) { + if (dataMapInitialCapacity == null) { + dataMapInitialCapacity = (int) ((float) dataGetsThreshold / 0.75f + 1.0f); + } + if (totalKeys < 16) { + return 16; + } else if (totalKeys < dataMapInitialCapacity) { + return (int) ((float) totalKeys / 0.75f + 1.0f); + } else { + return dataMapInitialCapacity; + } + } + + /** + * Executes message sending asynchronously to preparation of message. + */ + @FunctionalInterface + private interface DataSyncMessageHandler { + boolean execute(); + } + + private void sendMessageTrackerReplication(PassiveSynchronizationChannel syncChannel) { + Map> clientSourceIdTrackingMap = messageHandler.getTrackedClients() + .collect(toMap(ClientSourceId::toLong, clientSourceId -> messageHandler.getRecordedMessages().filter(r->r.getClientSourceId().toLong() == clientSourceId.toLong()).collect(Collectors.toMap(rm->rm.getTransactionId(), rm->rm.getResponse())))); + if (!clientSourceIdTrackingMap.isEmpty()) { + syncChannel.synchronizeToPassive(new EhcacheMessageTrackerMessage(clientSourceIdTrackingMap)); + } + } + + @Override + public void destroy() { + LOGGER.info("Destroying cluster tier '{}'", storeIdentifier); + try { + stateService.destroyServerStore(storeIdentifier); + } catch (ClusterException e) { + LOGGER.error("Failed to destroy server store - does not exist", e); + } + messageHandler.destroy(); + management.close(); + } + + protected Set getConnectedClients() { + return connectedClients.keySet(); + } + + protected Set getValidatedClients() { + return connectedClients.entrySet().stream().filter(Map.Entry::getValue).map(Map.Entry::getKey).collect(toSet()); + } + + ConcurrentMap getClientsWaitingForInvalidation() { + return clientsWaitingForInvalidation; + } + + private boolean isStrong() { + return this.configuration.getConsistency() == Consistency.STRONG; + } + + static class InvalidationHolder { + final ClientDescriptor clientDescriptorWaitingForInvalidation; + final Set clientsHavingToInvalidate; + final Long key; + + InvalidationHolder(ClientDescriptor clientDescriptorWaitingForInvalidation, Set clientsHavingToInvalidate, Long key) { + this.clientDescriptorWaitingForInvalidation = clientDescriptorWaitingForInvalidation; + this.clientsHavingToInvalidate = clientsHavingToInvalidate; + this.key = key; + } + + InvalidationHolder(ClientDescriptor clientDescriptorWaitingForInvalidation, Set clientsHavingToInvalidate) { + this(clientDescriptorWaitingForInvalidation, clientsHavingToInvalidate, null); + } + } + + private static class InvalidationTuple { + private final ClientDescriptor clientDescriptor; + private final Set invalidationsInProgress; + private final boolean isClearInProgress; + + InvalidationTuple(ClientDescriptor clientDescriptor, Set invalidationsInProgress, boolean isClearInProgress) { + this.clientDescriptor = clientDescriptor; + this.invalidationsInProgress = invalidationsInProgress; + this.isClearInProgress = isClearInProgress; + } + + ClientDescriptor getClientDescriptor() { + return clientDescriptor; + } + + Set getInvalidationsInProgress() { + return invalidationsInProgress; + } + + boolean isClearInProgress() { + return isClearInProgress; + } + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/store/ClusterTierDump.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ClusterTierDump.java similarity index 100% rename from clustered/server/src/main/java/org/ehcache/clustered/server/store/ClusterTierDump.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ClusterTierDump.java diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/store/ClusterTierPassiveEntity.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ClusterTierPassiveEntity.java similarity index 81% rename from clustered/server/src/main/java/org/ehcache/clustered/server/store/ClusterTierPassiveEntity.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ClusterTierPassiveEntity.java index 8ee9348c4f..c865aa4fe8 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/store/ClusterTierPassiveEntity.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ClusterTierPassiveEntity.java @@ -36,13 +36,16 @@ import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheMessageTrackerCatchup; import org.ehcache.clustered.server.management.ClusterTierManagement; +import org.ehcache.clustered.server.state.EhcacheStateContext; import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.config.EhcacheStoreStateServiceConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.client.message.tracker.OOOMessageHandler; import org.terracotta.client.message.tracker.OOOMessageHandlerConfiguration; +import org.terracotta.client.message.tracker.RecordedMessage; import org.terracotta.entity.ClientSourceId; import org.terracotta.entity.ConfigurationException; import org.terracotta.entity.EntityUserException; @@ -51,15 +54,17 @@ import org.terracotta.entity.ServiceException; import org.terracotta.entity.ServiceRegistry; import org.terracotta.entity.StateDumpCollector; +import org.terracotta.offheapstore.exceptions.OversizeMappingException; import java.util.concurrent.TimeoutException; +import java.util.stream.Stream; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.getResponse; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.success; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveReplicationMessage; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStateRepoOperationMessage; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStoreOperationMessage; -import static org.ehcache.clustered.server.ConcurrencyStrategies.clusterTierConcurrency; + /** * ClusterTierPassiveEntity @@ -85,7 +90,7 @@ public ClusterTierPassiveEntity(ServiceRegistry registry, ClusterTierEntityConfi try { stateService = registry.getService(new EhcacheStoreStateServiceConfig(config.getManagerIdentifier(), defaultMapper)); messageHandler = registry.getService(new OOOMessageHandlerConfiguration<>(managerIdentifier + "###" + storeIdentifier, - ClusterTierActiveEntity::isTrackedMessage, defaultMapper.getSegments() + 1, new MessageToTrackerSegmentFunction(clusterTierConcurrency(defaultMapper)))); + ClusterTierActiveEntity::isTrackedMessage)); } catch (ServiceException e) { throw new ConfigurationException("Unable to retrieve service: " + e.getMessage()); } @@ -111,7 +116,7 @@ public void addStateTo(StateDumpCollector dump) { @Override public void createNew() throws ConfigurationException { stateService.createStore(storeIdentifier, configuration, false); - management.init(); + management.entityCreated(); } private boolean isEventual() { @@ -160,11 +165,12 @@ public int getConcurrencyKey() { messageHandler.invoke(realContext, message, this::invokePassiveInternal); } + @SuppressWarnings("try") private EhcacheEntityResponse invokePassiveInternal(InvokeContext context, EhcacheEntityMessage message) { if (message instanceof EhcacheOperationMessage) { EhcacheOperationMessage operationMessage = (EhcacheOperationMessage) message; - EhcacheMessageType messageType = operationMessage.getMessageType(); - try { + try (EhcacheStateContext ignored = stateService.beginProcessing(operationMessage, storeIdentifier)) { + EhcacheMessageType messageType = operationMessage.getMessageType(); if (isStoreOperationMessage(messageType)) { invokeServerStoreOperation((ServerStoreOpMessage) message); } else if (isStateRepoOperationMessage(messageType)) { @@ -174,16 +180,14 @@ private EhcacheEntityResponse invokePassiveInternal(InvokeContext context, Ehcac } else { throw new AssertionError("Unsupported EhcacheOperationMessage: " + operationMessage.getMessageType()); } - } catch (ClusterException e) { + } catch (ClusterException | OversizeMappingException e) { // The above operations are not critical enough to fail a passive, so just log the exception LOGGER.error("Unexpected exception raised during operation: " + message, e); } } else if (message instanceof EhcacheSyncMessage) { - try { - invokeSyncOperation(context, (EhcacheSyncMessage) message); - } catch (ClusterException e) { - throw new IllegalStateException("Sync operation failed", e); - } + invokeSyncOperation(context, (EhcacheSyncMessage) message); + } else if (message instanceof EhcacheMessageTrackerCatchup) { + invokeCatchup(context, (EhcacheMessageTrackerCatchup) message); } else { throw new AssertionError("Unsupported EhcacheEntityMessage: " + message.getClass()); } @@ -192,7 +196,49 @@ private EhcacheEntityResponse invokePassiveInternal(InvokeContext context, Ehcac return success(); } - private void invokeSyncOperation(InvokeContext context, EhcacheSyncMessage message) throws ClusterException { + private void invokeCatchup(InvokeContext context, EhcacheMessageTrackerCatchup catchup) { + catchup.getTrackedMessages().forEach(r -> { + InvokeContext replay = new InvokeContext() { + @Override + public ClientSourceId getClientSource() { + return context.makeClientSourceId(r.getClientSourceId().toLong()); + } + + @Override + public long getCurrentTransactionId() { + return r.getTransactionId(); + } + + @Override + public long getOldestTransactionId() { + return context.getOldestTransactionId(); + } + + @Override + public boolean isValidClientInformation() { + return true; + } + + @Override + public ClientSourceId makeClientSourceId(long l) { + return context.makeClientSourceId(l); + } + + @Override + public int getConcurrencyKey() { + return 0; + } + }; + try { + messageHandler.invoke(replay, r.getRequest(), this::invokePassiveInternal); + } catch (EntityUserException use) { + // swallow any user exceptions here + } + }); + } + + @SuppressWarnings("deprecation") + private void invokeSyncOperation(InvokeContext context, EhcacheSyncMessage message) { switch (message.getMessageType()) { case DATA: EhcacheDataSyncMessage dataSyncMessage = (EhcacheDataSyncMessage) message; @@ -205,8 +251,31 @@ private void invokeSyncOperation(InvokeContext context, EhcacheSyncMessage messa break; case MESSAGE_TRACKER: EhcacheMessageTrackerMessage messageTrackerMessage = (EhcacheMessageTrackerMessage) message; - messageTrackerMessage.getTrackedMessages().forEach((key, value) -> - messageHandler.loadTrackedResponsesForSegment(messageTrackerMessage.getSegmentId(), context.makeClientSourceId(key), value)); + Stream> converted = + messageTrackerMessage.getTrackedMessages().entrySet().stream().flatMap(e -> e.getValue().entrySet().stream().map(v -> { + return new RecordedMessage() { + @Override + public ClientSourceId getClientSourceId() { + return context.makeClientSourceId(e.getKey()); + } + + @Override + public long getTransactionId() { + return v.getKey(); + } + + @Override + public EhcacheEntityMessage getRequest() { + return null; + } + + @Override + public EhcacheEntityResponse getResponse() { + return v.getValue(); + } + }; + })); + messageHandler.loadRecordedMessages(converted); break; default: throw new AssertionError("Unsupported Sync operation " + message.getMessageType()); @@ -307,6 +376,7 @@ public void destroy() { throw new AssertionError(e); } management.close(); + messageHandler.destroy(); } @Override diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/store/ClusterTierServerEntityService.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ClusterTierServerEntityService.java similarity index 88% rename from clustered/server/src/main/java/org/ehcache/clustered/server/store/ClusterTierServerEntityService.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ClusterTierServerEntityService.java index 0e6fff599c..b5a1ebd576 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/store/ClusterTierServerEntityService.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ClusterTierServerEntityService.java @@ -16,6 +16,12 @@ package org.ehcache.clustered.server.store; +import java.io.Closeable; +import java.io.IOException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import org.ehcache.clustered.common.internal.messages.CommonConfigCodec; import org.ehcache.clustered.common.internal.messages.ConfigCodec; import org.ehcache.clustered.common.internal.messages.EhcacheCodec; @@ -46,12 +52,15 @@ /** * ClusterTierServerEntityService */ -public class ClusterTierServerEntityService implements EntityServerService { +public class ClusterTierServerEntityService implements EntityServerService, Closeable { private static final long ENTITY_VERSION = 10L; private static final int DEFAULT_CONCURRENCY = 16; private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(DEFAULT_CONCURRENCY); private static final ConfigCodec CONFIG_CODEC = new CommonConfigCodec(); + private static final int MAX_SYNC_CONCURRENCY = 1; + private final ExecutorService syncGets = new ThreadPoolExecutor(0, MAX_SYNC_CONCURRENCY, + 20, TimeUnit.SECONDS, new LinkedBlockingQueue<>()); private final EntityConfigurationCodec configCodec = new EntityConfigurationCodec(CONFIG_CODEC); @@ -65,10 +74,15 @@ public boolean handlesEntityType(String typeName) { return typeName.equals("org.ehcache.clustered.client.internal.store.InternalClusterTierClientEntity"); } + @Override + public void close() throws IOException { + syncGets.shutdownNow(); + } + @Override public ClusterTierActiveEntity createActiveEntity(ServiceRegistry registry, byte[] configuration) throws ConfigurationException { ClusterTierEntityConfiguration clusterTierEntityConfiguration = configCodec.decodeClusteredStoreConfiguration(configuration); - return new ClusterTierActiveEntity(registry, clusterTierEntityConfiguration, DEFAULT_MAPPER); + return new ClusterTierActiveEntity(registry, clusterTierEntityConfiguration, DEFAULT_MAPPER, syncGets); } @Override diff --git a/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/LockManagerImpl.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/LockManagerImpl.java new file mode 100644 index 0000000000..cbaf00ee2a --- /dev/null +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/LockManagerImpl.java @@ -0,0 +1,72 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.store; + +import org.terracotta.entity.ClientDescriptor; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Consumer; + +public class LockManagerImpl implements ServerLockManager { + + private final Map blockedKeys = new ConcurrentHashMap<>(); + + @Override + public boolean lock(long key, ClientDescriptor client) { + if (blockedKeys.containsKey(key)) { + return false; + } + blockedKeys.put(key, client); + return true; + } + + @Override + public void unlock(long key) { + blockedKeys.remove(key); + } + + @Override + public void createLockStateAfterFailover(ClientDescriptor client, Set locksHeld) { + locksHeld.forEach(key -> { + ClientDescriptor absent = blockedKeys.putIfAbsent(key, client); + if (absent != null) { + throw new IllegalStateException("Key is already locked"); + } + }); + } + + @Override + public void sweepLocksForClient(ClientDescriptor client, Consumer> removeHeldKeys) { + Set> entries = new HashSet<>(Collections.unmodifiableSet(blockedKeys.entrySet())); + List heldKeys = new ArrayList<>(); + entries.forEach(entry -> { + if (entry.getValue().equals(client)) { + heldKeys.add(entry.getKey()); + blockedKeys.remove(entry.getKey()); + } + }); + + if (removeHeldKeys != null) { + removeHeldKeys.accept(heldKeys); + } + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/store/MessageToTrackerSegmentFunction.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/MessageToTrackerSegmentFunction.java similarity index 94% rename from clustered/server/src/main/java/org/ehcache/clustered/server/store/MessageToTrackerSegmentFunction.java rename to clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/MessageToTrackerSegmentFunction.java index 79f73c4157..3cc048688f 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/store/MessageToTrackerSegmentFunction.java +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/MessageToTrackerSegmentFunction.java @@ -26,12 +26,12 @@ */ class MessageToTrackerSegmentFunction implements ToIntFunction { - private ConcurrencyStrategy concurrencyStrategy; + private final ConcurrencyStrategy concurrencyStrategy; MessageToTrackerSegmentFunction(ConcurrencyStrategy concurrencyStrategy) { - this.concurrencyStrategy = concurrencyStrategy; } + @Override public int applyAsInt(EhcacheEntityMessage value) { // Concurrency is 1 based, segments are 0 based diff --git a/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/NoopLockManager.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/NoopLockManager.java new file mode 100644 index 0000000000..89be57a019 --- /dev/null +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/NoopLockManager.java @@ -0,0 +1,43 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.store; + +import org.terracotta.entity.ClientDescriptor; + +import java.util.List; +import java.util.Set; +import java.util.function.Consumer; + +public class NoopLockManager implements ServerLockManager { + @Override + public boolean lock(long key, ClientDescriptor client) { + return false; + } + + @Override + public void unlock(long key) { + } + + @Override + public void createLockStateAfterFailover(ClientDescriptor client, Set locksHeld) { + + } + + @Override + public void sweepLocksForClient(ClientDescriptor client, Consumer> removeHeldKeys) { + + } +} diff --git a/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ServerLockManager.java b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ServerLockManager.java new file mode 100644 index 0000000000..2a22de6df2 --- /dev/null +++ b/clustered/server/ehcache-entity/src/main/java/org/ehcache/clustered/server/store/ServerLockManager.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.store; + +import org.terracotta.entity.ClientDescriptor; + +import java.util.List; +import java.util.Set; +import java.util.function.Consumer; + +public interface ServerLockManager { + + boolean lock(long key, ClientDescriptor client); + + void unlock(long key); + + void createLockStateAfterFailover(ClientDescriptor client, Set locksHeld); + + void sweepLocksForClient(ClientDescriptor client, Consumer> removeHeldKeys); + +} diff --git a/clustered/server/src/main/resources/META-INF/services/org.terracotta.entity.EntityServerService b/clustered/server/ehcache-entity/src/main/resources/META-INF/services/org.terracotta.entity.EntityServerService similarity index 100% rename from clustered/server/src/main/resources/META-INF/services/org.terracotta.entity.EntityServerService rename to clustered/server/ehcache-entity/src/main/resources/META-INF/services/org.terracotta.entity.EntityServerService diff --git a/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockActiveEntityTest.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockActiveEntityTest.java new file mode 100644 index 0000000000..4f6112cbd5 --- /dev/null +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockActiveEntityTest.java @@ -0,0 +1,164 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.lock.server; + +import org.ehcache.clustered.common.internal.lock.LockMessaging; +import org.ehcache.clustered.common.internal.lock.LockMessaging.LockTransition; +import org.hamcrest.beans.HasPropertyWithValue; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.terracotta.entity.ActiveInvokeContext; +import org.terracotta.entity.ClientCommunicator; +import org.terracotta.entity.ClientDescriptor; +import org.terracotta.entity.EntityResponse; +import org.terracotta.entity.MessageCodecException; + +import static org.ehcache.clustered.common.internal.lock.LockMessaging.HoldType.READ; +import static org.ehcache.clustered.common.internal.lock.LockMessaging.HoldType.WRITE; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.hamcrest.MockitoHamcrest.argThat; + +public class VoltronReadWriteLockActiveEntityTest { + + @Rule + public MockitoRule rule = MockitoJUnit.rule(); + + @Mock + private ClientCommunicator communicator = mock(ClientCommunicator.class); + + @InjectMocks + VoltronReadWriteLockActiveEntity entity; + + private ActiveInvokeContext context = newContext(); + + private static ActiveInvokeContext newContext() { + @SuppressWarnings("unchecked") + ActiveInvokeContext context = mock(ActiveInvokeContext.class); + when(context.getClientDescriptor()).thenReturn(mock(ClientDescriptor.class)); + return context; + } + + @Test + public void testWriteLock() { + LockTransition transition = entity.invokeActive(context, LockMessaging.lock(WRITE)); + + assertThat(transition.isAcquired(), is(true)); + } + + @Test + public void testReadLock() { + LockTransition transition = entity.invokeActive(context, LockMessaging.lock(READ)); + + assertThat(transition.isAcquired(), is(true)); + } + + @Test + public void testWriteUnlock() { + entity.invokeActive(context, LockMessaging.lock(WRITE)); + + LockTransition transition = entity.invokeActive(context, LockMessaging.unlock(WRITE)); + + assertThat(transition.isReleased(), is(true)); + } + + @Test + public void testReadUnlock() { + entity.invokeActive(context, LockMessaging.lock(READ)); + + LockTransition transition = entity.invokeActive(context, LockMessaging.unlock(READ)); + + assertThat(transition.isReleased(), is(true)); + } + + @Test + public void testTryWriteLockWhenWriteLocked() { + entity.invokeActive(context, LockMessaging.lock(WRITE)); + + LockTransition transition = entity.invokeActive(newContext(), LockMessaging.tryLock(WRITE)); + + assertThat(transition.isAcquired(), is(false)); + } + + @Test + public void testTryReadLockWhenWriteLocked() { + entity.invokeActive(context, LockMessaging.lock(WRITE)); + + LockTransition transition = entity.invokeActive(newContext(), LockMessaging.tryLock(READ)); + + assertThat(transition.isAcquired(), is(false)); + } + + @Test + public void testTryWriteLockWhenReadLocked() { + entity.invokeActive(context, LockMessaging.lock(READ)); + + LockTransition transition = entity.invokeActive(newContext(), LockMessaging.tryLock(WRITE)); + + assertThat(transition.isAcquired(), is(false)); + } + + @Test + public void testTryReadLockWhenReadLocked() { + entity.invokeActive(context, LockMessaging.lock(READ)); + + LockTransition transition = entity.invokeActive(newContext(), LockMessaging.tryLock(READ)); + + assertThat(transition.isAcquired(), is(true)); + } + + @Test + public void testWriteUnlockNotifiesListeners() throws MessageCodecException { + ActiveInvokeContext locker = newContext(); + ActiveInvokeContext waiter = newContext(); + + ClientDescriptor waiterDescriptor = mock(ClientDescriptor.class); + when(waiter.getClientDescriptor()).thenReturn(waiterDescriptor); + + entity.invokeActive(locker, LockMessaging.lock(WRITE)); + entity.invokeActive(waiter, LockMessaging.lock(WRITE)); + entity.invokeActive(locker, LockMessaging.unlock(WRITE)); + + verify(communicator).sendNoResponse(same(waiterDescriptor), argThat( + HasPropertyWithValue.hasProperty("released", is(true)))); + } + + @Test + public void testReadUnlockNotifiesListeners() throws MessageCodecException { + ActiveInvokeContext locker = newContext(); + ActiveInvokeContext waiter = newContext(); + + ClientDescriptor waiterDescriptor = mock(ClientDescriptor.class); + when(waiter.getClientDescriptor()).thenReturn(waiterDescriptor); + + entity.invokeActive(locker, LockMessaging.lock(READ)); + entity.invokeActive(waiter, LockMessaging.lock(WRITE)); + entity.invokeActive(locker, LockMessaging.unlock(READ)); + + verify(communicator).sendNoResponse(same(waiterDescriptor), argThat( + HasPropertyWithValue.hasProperty("released", is(true)))); + } + + +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/ClusterTierManagerActiveEntityTest.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/ClusterTierManagerActiveEntityTest.java similarity index 87% rename from clustered/server/src/test/java/org/ehcache/clustered/server/ClusterTierManagerActiveEntityTest.java rename to clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/ClusterTierManagerActiveEntityTest.java index 9fcf2a967e..c58061c0cb 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/ClusterTierManagerActiveEntityTest.java +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/ClusterTierManagerActiveEntityTest.java @@ -38,6 +38,7 @@ import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; import org.terracotta.offheapresource.OffHeapResources; +import org.terracotta.offheapresource.OffHeapUsageEvent; import org.terracotta.offheapstore.util.MemoryUnit; import java.util.Collection; @@ -47,14 +48,15 @@ import java.util.Map; import java.util.Set; import java.util.UUID; +import java.util.function.Consumer; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; @@ -81,7 +83,7 @@ public void testDisconnectedNotConnected() throws Exception { EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(blankConfiguration, registry, DEFAULT_MAPPER)); final ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(blankConfiguration, ehcacheStateService, management); - ClientDescriptor client = new TestClientDescriptor(); + ClientDescriptor client = TestClientDescriptor.newClient(); activeEntity.disconnected(client); // Not expected to fail ... } @@ -104,7 +106,7 @@ public void testConfigure() throws Exception { ClusterTierManagerConfiguration configuration = new ClusterTierManagerConfiguration("identifier", serverSideConfiguration); EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(configuration, registry, DEFAULT_MAPPER)); final ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(configuration, ehcacheStateService, management); - ClientDescriptor client = new TestClientDescriptor(); + ClientDescriptor client = TestClientDescriptor.newClient(); activeEntity.connected(client); assertThat(registry.getStoreManagerService().getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); @@ -183,14 +185,14 @@ public void testConfigureMissingDefaultResource() throws Exception { public void testConfigureLargeSharedPool() throws Exception { final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); registry.addResource("defaultServerResource", 64, MemoryUnit.MEGABYTES); - registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); - registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) - .sharedPool("tooBig", "serverResource2", 64, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 4, MemoryUnit.MEGABYTES) + .sharedPool("tooBig", "serverResource2", 16, MemoryUnit.MEGABYTES) .build(); ClusterTierManagerConfiguration configuration = new ClusterTierManagerConfiguration("identifier", serverSideConfiguration); EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(configuration, registry, DEFAULT_MAPPER)); @@ -226,16 +228,16 @@ public void testValidate2Clients() throws Exception { EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(configuration, registry, DEFAULT_MAPPER)); final ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(configuration, ehcacheStateService, management); - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - assertSuccess(activeEntity.invokeActive(context, MESSAGE_FACTORY.validateStoreManager(serverSideConfig))); + TestClientDescriptor client1 = TestClientDescriptor.newClient(); + activeEntity.connected(client1); + + assertSuccess(activeEntity.invokeActive(client1.invokeContext(), MESSAGE_FACTORY.validateStoreManager(serverSideConfig))); - UUID client2Id = UUID.randomUUID(); - TestInvokeContext context2 = new TestInvokeContext(); - activeEntity.connected(context2.getClientDescriptor()); + TestClientDescriptor client2 = TestClientDescriptor.newClient(); + activeEntity.connected(client2); - assertSuccess(activeEntity.invokeActive(context2, MESSAGE_FACTORY.validateStoreManager(serverSideConfig))); + assertSuccess(activeEntity.invokeActive(client2.invokeContext(), MESSAGE_FACTORY.validateStoreManager(serverSideConfig))); } @Test @@ -253,9 +255,9 @@ public void testValidateAfterConfigure() throws Exception { EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(configuration, registry, DEFAULT_MAPPER)); final ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(configuration, ehcacheStateService, management); - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - assertSuccess(activeEntity.invokeActive(context, MESSAGE_FACTORY.validateStoreManager(serverSideConfig))); + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + assertSuccess(activeEntity.invokeActive(client.invokeContext(), MESSAGE_FACTORY.validateStoreManager(serverSideConfig))); } @Test @@ -273,10 +275,10 @@ public void testValidateExtraResource() throws Exception { ClusterTierManagerConfiguration configuration = new ClusterTierManagerConfiguration("identifier", serverSideConfiguration); EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(configuration, registry, DEFAULT_MAPPER)); final ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(configuration, ehcacheStateService, management); - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); - assertFailure(activeEntity.invokeActive(context, + assertFailure(activeEntity.invokeActive(client.invokeContext(), MESSAGE_FACTORY.validateStoreManager(new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -299,10 +301,10 @@ public void testValidateNoDefaultResource() throws Exception { ClusterTierManagerConfiguration configuration = new ClusterTierManagerConfiguration("identifier", serverSideConfiguration); EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(configuration, registry, DEFAULT_MAPPER)); final ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(configuration, ehcacheStateService, management); - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); - assertFailure(activeEntity.invokeActive(context, + assertFailure(activeEntity.invokeActive(client.invokeContext(), MESSAGE_FACTORY.validateStoreManager(new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -342,10 +344,10 @@ public void testValidateIdenticalConfiguration() throws Exception { EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(configuration, registry, DEFAULT_MAPPER)); final ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(configuration, ehcacheStateService, management); - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); - assertThat(activeEntity.invokeActive(context, MESSAGE_FACTORY.validateStoreManager(validateConfig)).getResponseType(), is(EhcacheResponseType.SUCCESS)); + assertThat(activeEntity.invokeActive(client.invokeContext(), MESSAGE_FACTORY.validateStoreManager(validateConfig)).getResponseType(), is(EhcacheResponseType.SUCCESS)); } @Test @@ -364,14 +366,14 @@ public void testValidateSharedPoolNamesDifferent() throws Exception { EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(configuration, registry, DEFAULT_MAPPER)); ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(configuration, ehcacheStateService, management); - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); ServerSideConfiguration validate = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("ternary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - assertFailure(activeEntity.invokeActive(context, MESSAGE_FACTORY.validateStoreManager(validate)), InvalidServerSideConfigurationException.class, "Pool names not equal."); + assertFailure(activeEntity.invokeActive(client.invokeContext(), MESSAGE_FACTORY.validateStoreManager(validate)), InvalidServerSideConfigurationException.class, "Pool names not equal."); } @Test @@ -390,18 +392,18 @@ public void testValidateDefaultResourceNameDifferent() throws Exception { EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(configuration, registry, DEFAULT_MAPPER)); ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(configuration, ehcacheStateService, management); - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); ServerSideConfiguration validate = new ServerSideConfigBuilder() .defaultResource("defaultServerResource2") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - assertFailure(activeEntity.invokeActive(context, MESSAGE_FACTORY.validateStoreManager(validate)), InvalidServerSideConfigurationException.class, "Default resource not aligned."); + assertFailure(activeEntity.invokeActive(client.invokeContext(), MESSAGE_FACTORY.validateStoreManager(validate)), InvalidServerSideConfigurationException.class, "Default resource not aligned."); } @Test - public void testValidateClientSharedPoolSizeTooBig() throws Exception { + public void testValidateClientSharedPoolSizeDifferent() throws Exception { OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); registry.addResource("defaultServerResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); @@ -410,20 +412,20 @@ public void testValidateClientSharedPoolSizeTooBig() throws Exception { ServerSideConfiguration configure = new ServerSideConfigBuilder() .defaultResource("defaultServerResource1") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary", "serverResource2", 32, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); ClusterTierManagerConfiguration configuration = new ClusterTierManagerConfiguration("identifier", configure); EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(configuration, registry, DEFAULT_MAPPER)); ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(configuration, ehcacheStateService, management); - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); ServerSideConfiguration validate = new ServerSideConfigBuilder() .defaultResource("defaultServerResource1") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("secondary", "serverResource2", 36, MemoryUnit.MEGABYTES) .build(); - assertFailure(activeEntity.invokeActive(context, MESSAGE_FACTORY.validateStoreManager(validate)),InvalidServerSideConfigurationException.class, "Pool 'secondary' not equal."); + assertSuccess(activeEntity.invokeActive(client.invokeContext(), MESSAGE_FACTORY.validateStoreManager(validate))); } @Test @@ -442,9 +444,9 @@ public void testValidateSecondClientInheritsFirstClientConfig() throws Exception EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(configuration, registry, DEFAULT_MAPPER)); ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(configuration, ehcacheStateService, management); - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - assertSuccess(activeEntity.invokeActive(context, MESSAGE_FACTORY.validateStoreManager(null))); + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + assertSuccess(activeEntity.invokeActive(client.invokeContext(), MESSAGE_FACTORY.validateStoreManager(null))); } @Test @@ -455,11 +457,11 @@ public void testInvalidMessageThrowsError() throws Exception { EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(blankConfiguration, registry, DEFAULT_MAPPER)); final ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(blankConfiguration, ehcacheStateService, management); - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); try { - activeEntity.invokeActive(context, new InvalidMessage()); + activeEntity.invokeActive(client.invokeContext(), new InvalidMessage()); fail("Invalid message should result in AssertionError"); } catch (AssertionError e) { assertThat(e.getMessage(), containsString("Unsupported")); @@ -474,10 +476,10 @@ public void testPrepareForDestroy() throws Exception { EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(blankConfiguration, registry, DEFAULT_MAPPER)); final ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(blankConfiguration, ehcacheStateService, management); - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); - activeEntity.invokeActive(context, MESSAGE_FACTORY.prepareForDestroy()); + activeEntity.invokeActive(client.invokeContext(), MESSAGE_FACTORY.prepareForDestroy()); try { ehcacheStateService.validate(null); @@ -497,10 +499,10 @@ public void testPrepareForDestroyInProgress() throws Exception { final ClusterTierManagerActiveEntity activeEntity = new ClusterTierManagerActiveEntity(blankConfiguration, ehcacheStateService, management); - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); - assertFailure(activeEntity.invokeActive(context, MESSAGE_FACTORY.validateStoreManager(null)), DestroyInProgressException.class, "in progress for destroy"); + assertFailure(activeEntity.invokeActive(client.invokeContext(), MESSAGE_FACTORY.validateStoreManager(null)), DestroyInProgressException.class, "in progress for destroy"); } @@ -608,6 +610,11 @@ public Set getAllIdentifiers() { public OffHeapResource getOffHeapResource(OffHeapResourceIdentifier identifier) { return pools.get(identifier); } + + @Override + public boolean addOffHeapResource(OffHeapResourceIdentifier identifier, long capacity) { + return false; + } }, config.getConfig().getConfiguration(), DEFAULT_MAPPER, service -> {}); } return (T) (this.storeManagerService); @@ -667,6 +674,21 @@ public long capacity() { return capacity; } + @Override + public boolean setCapacity(long size) throws IllegalArgumentException { + throw new UnsupportedOperationException("Not supported"); + } + + @Override + public void addUsageListener(UUID listenerUUID, float threshold, Consumer consumer) { + + } + + @Override + public void removeUsageListener(UUID listenerUUID) throws IllegalArgumentException { + + } + private long getUsed() { return used; } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/ClusterTierManagerPassiveEntityTest.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/ClusterTierManagerPassiveEntityTest.java similarity index 95% rename from clustered/server/src/test/java/org/ehcache/clustered/server/ClusterTierManagerPassiveEntityTest.java rename to clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/ClusterTierManagerPassiveEntityTest.java index 4dfdecef80..1844b33e55 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/ClusterTierManagerPassiveEntityTest.java +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/ClusterTierManagerPassiveEntityTest.java @@ -37,6 +37,7 @@ import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; import org.terracotta.offheapresource.OffHeapResources; +import org.terracotta.offheapresource.OffHeapUsageEvent; import org.terracotta.offheapstore.util.MemoryUnit; import java.util.Collection; @@ -45,12 +46,14 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.UUID; +import java.util.function.Consumer; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; @@ -167,14 +170,14 @@ public void testConfigureMissingDefaultResource() throws Exception { public void testConfigureLargeSharedPool() throws Exception { final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); registry.addResource("defaultServerResource", 64, MemoryUnit.MEGABYTES); - registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); - registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) - .sharedPool("tooBig", "serverResource2", 64, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 4, MemoryUnit.MEGABYTES) + .sharedPool("tooBig", "serverResource2", 16, MemoryUnit.MEGABYTES) .build(); ClusterTierManagerConfiguration configuration = new ClusterTierManagerConfiguration("identifier", serverSideConfiguration); EhcacheStateService ehcacheStateService = registry.getService(new EhcacheStateServiceConfig(configuration, registry, DEFAULT_MAPPER)); @@ -366,6 +369,11 @@ public Set getAllIdentifiers() { public OffHeapResource getOffHeapResource(OffHeapResourceIdentifier identifier) { return pools.get(identifier); } + + @Override + public boolean addOffHeapResource(OffHeapResourceIdentifier identifier, long capacity) { + return false; + } }, config.getConfig().getConfiguration(), DEFAULT_MAPPER, service -> {}); } return (T) (this.storeManagerService); @@ -429,6 +437,21 @@ public long capacity() { return capacity; } + @Override + public boolean setCapacity(long size) throws IllegalArgumentException { + throw new UnsupportedOperationException("Not supported"); + } + + @Override + public void addUsageListener(UUID listenerUUID, float threshold, Consumer consumer) { + + } + + @Override + public void removeUsageListener(UUID listenerUUID) throws IllegalArgumentException { + + } + private long getUsed() { return used; } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/DefaultConcurrencyStrategyTest.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/DefaultConcurrencyStrategyTest.java similarity index 100% rename from clustered/server/src/test/java/org/ehcache/clustered/server/DefaultConcurrencyStrategyTest.java rename to clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/DefaultConcurrencyStrategyTest.java diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java similarity index 93% rename from clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java rename to clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java index e8661cbb8c..a4e2709666 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java @@ -25,9 +25,9 @@ import org.ehcache.clustered.common.PoolAllocation.Unknown; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; /** @@ -52,14 +52,14 @@ public void testStoredKeyTypeMismatch() { STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, String.class.getName(), STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -78,14 +78,14 @@ public void testStoredValueTypeMismatch() { STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, Long.class.getName(), KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -104,14 +104,14 @@ public void testKeySerializerTypeMismatch() { STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, STORED_VALUE_TYPE, Double.class.getName(), VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -130,14 +130,14 @@ public void testValueSerializerTypeMismatch() { STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, Double.class.getName(), - Consistency.EVENTUAL); + Consistency.EVENTUAL, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -156,14 +156,14 @@ public void testConsitencyMismatch() { STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -176,55 +176,45 @@ public void testConsitencyMismatch() { } @Test - public void testDedicatedPoolResourceTooBig() { + public void testDedicatedPoolResourceTooBig() throws InvalidServerStoreConfigurationException { ServerStoreConfiguration serverConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(new Dedicated("primary",8), STORED_KEY_TYPE, STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); - try { - serverStoreCompatibility.verify(serverConfiguration, clientConfiguration); - fail("Expected InvalidServerStoreConfigurationException"); - } catch(InvalidServerStoreConfigurationException e) { - assertThat(e.getMessage(), containsString("resourcePoolType")); - } + serverStoreCompatibility.verify(serverConfiguration, clientConfiguration); } @Test - public void testDedicatedPoolResourceTooSmall() { + public void testDedicatedPoolResourceTooSmall() throws InvalidServerStoreConfigurationException { ServerStoreConfiguration serverConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(new Dedicated("primary",2), STORED_KEY_TYPE, STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); - try { - serverStoreCompatibility.verify(serverConfiguration, clientConfiguration); - fail("Expected InvalidServerStoreConfigurationException"); - } catch(InvalidServerStoreConfigurationException e) { - assertThat(e.getMessage(), containsString("resourcePoolType")); - } + serverStoreCompatibility.verify(serverConfiguration, clientConfiguration); } @Test @@ -234,14 +224,14 @@ public void testDedicatedPoolResourceNameMismatch() { STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(new Dedicated("primaryBad",4), STORED_KEY_TYPE, STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -260,14 +250,14 @@ public void testSharedPoolResourceNameMismatch() { STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(new Shared("sharedPoolBad"), STORED_KEY_TYPE, STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -287,14 +277,14 @@ public void testAllResourceParametersMatch() throws Exception STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -308,14 +298,14 @@ public void testPoolResourceTypeMismatch() { STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(SHARED_POOL_ALLOCATION, STORED_KEY_TYPE, STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -334,14 +324,14 @@ public void testClientStoreConfigurationUnknownPoolResource() throws InvalidServ STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(UNKNOWN_POOL_ALLOCATION, STORED_KEY_TYPE, STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -356,14 +346,14 @@ public void testServerStoreConfigurationUnknownPoolResourceInvalidKeyType() { STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(UNKNOWN_POOL_ALLOCATION, String.class.getName(), STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -383,9 +373,12 @@ public void testServerStoreConfigurationExtendedPoolAllocationType() { STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); PoolAllocation extendedPoolAllocation = new PoolAllocation.DedicatedPoolAllocation() { + + private static final long serialVersionUID = 1L; + @Override public long getSize() { return 4; @@ -407,7 +400,7 @@ public boolean isCompatible(final PoolAllocation other) { STORED_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, false); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); diff --git a/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/TestClientDescriptor.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/TestClientDescriptor.java new file mode 100644 index 0000000000..dd0dfdbc3e --- /dev/null +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/TestClientDescriptor.java @@ -0,0 +1,76 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server; + +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.terracotta.entity.ActiveInvokeContext; +import org.terracotta.entity.ClientDescriptor; +import org.terracotta.entity.ClientSourceId; + +import java.util.concurrent.atomic.AtomicLong; + +public final class TestClientDescriptor implements ClientDescriptor { + private static final AtomicLong counter = new AtomicLong(1L); + + private final long clientId; + private final AtomicLong transactionId = new AtomicLong(1L); + + public static TestClientDescriptor newClient() { + return new TestClientDescriptor(counter.getAndIncrement()); + } + + private TestClientDescriptor(long clientId) { + this.clientId = clientId; + } + + public ActiveInvokeContext invokeContext() { + return new TestInvokeContext(this, transactionId.getAndIncrement()); + } + @Override + public ClientSourceId getSourceId() { + return new TestClientSourceId(clientId); + } + + @Override + public boolean isValidClient() { + return true; + } + + @Override + public String toString() { + return "TestClientDescriptor[" + clientId + "]"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + TestClientDescriptor that = (TestClientDescriptor) o; + + return clientId == that.clientId; + } + + @Override + public int hashCode() { + return Long.hashCode(clientId); + } +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/TestClientSourceId.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/TestClientSourceId.java similarity index 95% rename from clustered/server/src/test/java/org/ehcache/clustered/server/TestClientSourceId.java rename to clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/TestClientSourceId.java index 9624e3815d..4b287cd07d 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/TestClientSourceId.java +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/TestClientSourceId.java @@ -32,6 +32,11 @@ public long toLong() { return id; } + @Override + public boolean isValidClient() { + return true; + } + @Override public boolean matches(ClientDescriptor clientDescriptor) { return clientDescriptor.getSourceId().toLong() == id; diff --git a/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/TestInvokeContext.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/TestInvokeContext.java new file mode 100644 index 0000000000..e1d000868d --- /dev/null +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/TestInvokeContext.java @@ -0,0 +1,80 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server; + +import java.util.Properties; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.terracotta.entity.ActiveInvokeChannel; +import org.terracotta.entity.ActiveInvokeContext; +import org.terracotta.entity.ClientDescriptor; +import org.terracotta.entity.ClientSourceId; + +final class TestInvokeContext implements ActiveInvokeContext { + + private final ClientDescriptor clientDescriptor; + private final long txnId; + + TestInvokeContext(ClientDescriptor clientDescriptor, long txnId) { + this.clientDescriptor = clientDescriptor; + this.txnId = txnId; + } + + @Override + public ClientDescriptor getClientDescriptor() { + return clientDescriptor; + } + + @Override + public ActiveInvokeChannel openInvokeChannel() { + throw new UnsupportedOperationException(); + } + + @Override + public ClientSourceId getClientSource() { + return clientDescriptor.getSourceId(); + } + + @Override + public long getCurrentTransactionId() { + return txnId; + } + + @Override + public long getOldestTransactionId() { + return 0; + } + + @Override + public boolean isValidClientInformation() { + return true; + } + + @Override + public ClientSourceId makeClientSourceId(long l) { + return new TestClientSourceId(l); + } + + @Override + public int getConcurrencyKey() { + return 1; + } + + @Override + public Properties getClientSourceProperties() { + return new Properties(); + } +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerMessageTest.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerMessageTest.java similarity index 97% rename from clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerMessageTest.java rename to clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerMessageTest.java index b9e294e85b..a0cee7625b 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerMessageTest.java +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerMessageTest.java @@ -57,7 +57,7 @@ public void before() { trackingMap.put(id1.toLong(), res1); trackingMap.put(id2.toLong(), res2); - message = new EhcacheMessageTrackerMessage(2, trackingMap); + message = new EhcacheMessageTrackerMessage(trackingMap); } @Test diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java similarity index 98% rename from clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java rename to clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java index 06b0d98fe6..9f0bb360f5 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java @@ -56,6 +56,8 @@ public void setUp() { public void testDelegatesToEhcacheCodeForEncoding() throws Exception { LifecycleMessage lifecycleMessage = new LifecycleMessage() { + private static final long serialVersionUID = 1L; + @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.APPEND; diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java similarity index 88% rename from clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java rename to clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java index d3cfbf137b..ab5ab59a55 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java @@ -19,15 +19,16 @@ import org.ehcache.clustered.common.internal.messages.ResponseCodec; import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.server.TestClientSourceId; +import org.hamcrest.MatcherAssert; import org.junit.Test; import java.util.HashMap; import java.util.Map; import static org.assertj.core.api.Assertions.assertThat; -import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; -import static org.ehcache.clustered.common.internal.store.Util.createPayload; -import static org.ehcache.clustered.common.internal.store.Util.getChain; +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.ehcache.clustered.ChainUtils.sequencedChainOf; +import static org.ehcache.clustered.Matchers.matchesChain; import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -41,7 +42,7 @@ public class EhcacheSyncMessageCodecTest { @Test public void testDataSyncMessageEncodeDecode() throws Exception { Map chainMap = new HashMap<>(); - Chain chain = getChain(true, createPayload(10L), createPayload(100L), createPayload(1000L)); + Chain chain = sequencedChainOf(createPayload(10L), createPayload(100L), createPayload(1000L)); chainMap.put(1L, chain); chainMap.put(2L, chain); chainMap.put(3L, chain); @@ -50,14 +51,14 @@ public void testDataSyncMessageEncodeDecode() throws Exception { EhcacheDataSyncMessage decoded = (EhcacheDataSyncMessage) codec.decode(0, encodedMessage); Map decodedChainMap = decoded.getChainMap(); assertThat(decodedChainMap).hasSize(3); - assertThat(chainsEqual(decodedChainMap.get(1L), chain)).isTrue(); - assertThat(chainsEqual(decodedChainMap.get(2L), chain)).isTrue(); - assertThat(chainsEqual(decodedChainMap.get(3L), chain)).isTrue(); + MatcherAssert.assertThat(decodedChainMap.get(1L), matchesChain(chain)); + MatcherAssert.assertThat(decodedChainMap.get(2L), matchesChain(chain)); + MatcherAssert.assertThat(decodedChainMap.get(3L), matchesChain(chain)); } @Test public void testMessageTrackerSyncEncodeDecode_emptyMessage() throws Exception { - EhcacheMessageTrackerMessage message = new EhcacheMessageTrackerMessage(1, new HashMap<>()); + EhcacheMessageTrackerMessage message = new EhcacheMessageTrackerMessage(new HashMap<>()); byte[] encodedMessage = codec.encode(0, message); EhcacheMessageTrackerMessage decoded = (EhcacheMessageTrackerMessage) codec.decode(0, encodedMessage); assertThat(decoded.getTrackedMessages()).isEmpty(); @@ -68,7 +69,7 @@ public void testMessageTrackerSyncEncodeDecode_clientWithoutMessage() throws Exc HashMap> trackerMap = new HashMap<>(); trackerMap.put(1L, new HashMap<>()); - EhcacheMessageTrackerMessage message = new EhcacheMessageTrackerMessage(1, trackerMap); + EhcacheMessageTrackerMessage message = new EhcacheMessageTrackerMessage(trackerMap); byte[] encodedMessage = codec.encode(0, message); EhcacheMessageTrackerMessage decoded = (EhcacheMessageTrackerMessage) codec.decode(0, encodedMessage); assertThat(decoded.getTrackedMessages()).isEmpty(); @@ -103,7 +104,7 @@ public void testMessageTrackerSyncEncodeDecode_messages() throws Exception { responses2.put(5L, r5); trackerMap.put(2L, responses2); - EhcacheMessageTrackerMessage message = new EhcacheMessageTrackerMessage(1, trackerMap); + EhcacheMessageTrackerMessage message = new EhcacheMessageTrackerMessage(trackerMap); byte[] encodedMessage = codec.encode(0, message); EhcacheMessageTrackerMessage decoded = (EhcacheMessageTrackerMessage) codec.decode(0, encodedMessage); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java similarity index 87% rename from clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java rename to clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java index 264b41105b..68054367ee 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java @@ -24,14 +24,12 @@ import org.junit.Test; import static java.nio.ByteBuffer.wrap; -import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; -import static org.ehcache.clustered.common.internal.store.Util.createPayload; -import static org.ehcache.clustered.common.internal.store.Util.getChain; +import static org.ehcache.clustered.ChainUtils.chainOf; +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.ehcache.clustered.Matchers.matchesChain; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; - public class PassiveReplicationMessageCodecTest { @@ -39,7 +37,7 @@ public class PassiveReplicationMessageCodecTest { @Test public void testChainReplicationMessageCodec() { - Chain chain = getChain(false, createPayload(2L), createPayload(20L)); + Chain chain = chainOf(createPayload(2L), createPayload(20L)); ChainReplicationMessage chainReplicationMessage = new ChainReplicationMessage(2L, chain, 200L, 100L, 1L); byte[] encoded = codec.encode(chainReplicationMessage); @@ -49,7 +47,7 @@ public void testChainReplicationMessageCodec() { assertThat(decodedMsg.getTransactionId(), is(chainReplicationMessage.getTransactionId())); assertThat(decodedMsg.getOldestTransactionId(), is(chainReplicationMessage.getOldestTransactionId())); assertThat(decodedMsg.getKey(), is(chainReplicationMessage.getKey())); - assertTrue(chainsEqual(decodedMsg.getChain(), chainReplicationMessage.getChain())); + assertThat(decodedMsg.getChain(), matchesChain(chainReplicationMessage.getChain())); } diff --git a/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/store/ClusterTierActiveEntityTest.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/store/ClusterTierActiveEntityTest.java new file mode 100644 index 0000000000..12eb467c5e --- /dev/null +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/store/ClusterTierActiveEntityTest.java @@ -0,0 +1,1461 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.store; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.PoolAllocation.Dedicated; +import org.ehcache.clustered.common.PoolAllocation.Shared; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.exceptions.InvalidOperationException; +import org.ehcache.clustered.common.internal.exceptions.InvalidServerStoreConfigurationException; +import org.ehcache.clustered.common.internal.messages.ClusterTierReconnectMessage; +import org.ehcache.clustered.common.internal.messages.ConcurrentEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.EhcacheResponseType; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.common.internal.messages.ReconnectMessageCodec; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.ClusterTierEntityConfiguration; +import org.ehcache.clustered.server.CommunicatorServiceConfiguration; +import org.ehcache.clustered.server.ConcurrencyStrategies; +import org.ehcache.clustered.server.EhcacheStateServiceImpl; +import org.ehcache.clustered.server.KeySegmentMapper; +import org.ehcache.clustered.server.ServerSideServerStore; +import org.ehcache.clustered.server.ServerStoreEventListener; +import org.ehcache.clustered.server.TestClientDescriptor; +import org.ehcache.clustered.server.internal.messages.EhcacheDataSyncMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.server.state.EhcacheStateService; +import org.ehcache.clustered.server.state.InvalidationTracker; +import org.ehcache.clustered.server.store.ClusterTierActiveEntity.InvalidationHolder; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; +import org.hamcrest.core.IsInstanceOf; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.InOrder; +import org.mockito.Mockito; +import org.terracotta.client.message.tracker.OOOMessageHandler; +import org.terracotta.client.message.tracker.OOOMessageHandlerConfiguration; +import org.terracotta.client.message.tracker.OOOMessageHandlerImpl; +import org.terracotta.entity.ActiveInvokeContext; +import org.mockito.ArgumentMatchers; +import org.terracotta.entity.ActiveServerEntity; +import org.terracotta.entity.ClientCommunicator; +import org.terracotta.entity.ClientDescriptor; +import org.terracotta.entity.ConfigurationException; +import org.terracotta.entity.EntityMessage; +import org.terracotta.entity.EntityResponse; +import org.terracotta.entity.IEntityMessenger; +import org.terracotta.entity.PassiveSynchronizationChannel; +import org.terracotta.entity.ServiceConfiguration; +import org.terracotta.entity.ServiceRegistry; +import org.terracotta.management.service.monitoring.EntityManagementRegistry; +import org.terracotta.management.service.monitoring.EntityManagementRegistryConfiguration; +import org.terracotta.management.service.monitoring.EntityMonitoringService; +import org.terracotta.offheapresource.OffHeapResource; +import org.terracotta.offheapresource.OffHeapResourceIdentifier; +import org.terracotta.offheapresource.OffHeapResources; +import org.terracotta.offheapresource.OffHeapUsageEvent; +import org.terracotta.offheapstore.util.MemoryUnit; + +import java.nio.ByteBuffer; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; +import java.util.function.Consumer; + +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.ehcache.clustered.Matchers.entry; +import static org.ehcache.clustered.Matchers.hasPayloads; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; +import static org.hamcrest.core.CombinableMatcher.both; +import static org.hamcrest.core.CombinableMatcher.either; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.isNotNull; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; + +public class ClusterTierActiveEntityTest { + + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); + private static final int MAX_SYNC_CONCURRENCY = 1; + private static final ExecutorService SYNC_GETS_EXECUTOR = new ThreadPoolExecutor(0, MAX_SYNC_CONCURRENCY, + 20, TimeUnit.SECONDS, new LinkedBlockingQueue<>()); + + private String defaultStoreName = "store"; + private String defaultResource = "default"; + private String defaultSharedPool = "defaultShared"; + private String identifier = "identifier"; + private OffHeapIdentifierRegistry defaultRegistry; + private ServerStoreConfiguration defaultStoreConfiguration; + private ClusterTierEntityConfiguration defaultConfiguration; + + @Before + public void setUp() { + defaultRegistry = new OffHeapIdentifierRegistry(); + defaultRegistry.addResource(defaultResource, 10, MemoryUnit.MEGABYTES); + defaultStoreConfiguration = new ServerStoreConfigBuilder().dedicated(defaultResource, 1024, MemoryUnit.KILOBYTES).build(); + defaultConfiguration = new ClusterTierEntityConfiguration(identifier, defaultStoreName, + defaultStoreConfiguration); + } + + @Test(expected = ConfigurationException.class) + public void testConfigNull() throws Exception { + new ClusterTierActiveEntity(mock(ServiceRegistry.class), null, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + } + + @Test + public void testConnected() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + + ClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + Set connectedClients = activeEntity.getConnectedClients(); + assertThat(connectedClients, hasSize(1)); + assertThat(connectedClients, hasItem(client)); + } + + @Test + public void testConnectedAgain() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + + ClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + activeEntity.connected(client); + Set connectedClients = activeEntity.getConnectedClients(); + assertThat(connectedClients, hasSize(1)); + assertThat(connectedClients, hasItem(client)); + } + + @Test + public void testConnectedSecond() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + + ClientDescriptor client1 = TestClientDescriptor.newClient(); + activeEntity.connected(client1); + + ClientDescriptor client2 = TestClientDescriptor.newClient(); + activeEntity.connected(client2); + + Set connectedClients = activeEntity.getConnectedClients(); + assertThat(connectedClients, hasSize(2)); + assertThat(connectedClients, hasItems(client1, client2)); + } + + @Test + public void testDisconnectedNotConnected() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + + ClientDescriptor client1 = TestClientDescriptor.newClient(); + activeEntity.disconnected(client1); + // Not expected to fail ... + } + + /** + * Ensures the disconnect of a connected client is properly tracked. + */ + @Test + public void testDisconnected() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + + ClientDescriptor client1 = TestClientDescriptor.newClient(); + activeEntity.connected(client1); + activeEntity.disconnected(client1); + + assertThat(activeEntity.getConnectedClients(), hasSize(0)); + } + + @Test + public void testEventListenerEnabledTracking() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client1 = TestClientDescriptor.newClient(); + TestClientDescriptor client2 = TestClientDescriptor.newClient(); + + // check that connecting clients does not enable listeners by default + activeEntity.connected(client1); + activeEntity.connected(client2); + assertThat(activeEntity.getEventListeners().size(), is(0)); + + assertThat(activeEntity.invokeActive(client1.invokeContext(), new ServerStoreOpMessage.EnableEventListenerMessage(true)), succeeds()); + assertThat(activeEntity.getEventListeners().size(), is(1)); + // a client can register as many times as it wants, it's considered a single listener + assertThat(activeEntity.invokeActive(client1.invokeContext(), new ServerStoreOpMessage.EnableEventListenerMessage(true)), succeeds()); + assertThat(activeEntity.getEventListeners().size(), is(1)); + assertThat(activeEntity.invokeActive(client2.invokeContext(), new ServerStoreOpMessage.EnableEventListenerMessage(true)), succeeds()); + assertThat(activeEntity.getEventListeners().size(), is(2)); + + // check that disabling events is accounted for + assertThat(activeEntity.invokeActive(client2.invokeContext(), new ServerStoreOpMessage.EnableEventListenerMessage(false)), succeeds()); + assertThat(activeEntity.getEventListeners().size(), is(1)); + // check that disabling events from a client that does not have events enabled is a noop + assertThat(activeEntity.invokeActive(client2.invokeContext(), new ServerStoreOpMessage.EnableEventListenerMessage(false)), succeeds()); + assertThat(activeEntity.getEventListeners().size(), is(1)); + + // check that disconnected clients are accounted for + activeEntity.disconnected(client1); + assertThat(activeEntity.getEventListeners().size(), is(0)); + } + + /** + * Ensures the disconnect of a connected client is properly tracked and does not affect others. + */ + @Test + public void testDisconnectedSecond() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + + ClientDescriptor client1 = TestClientDescriptor.newClient(); + activeEntity.connected(client1); + + ClientDescriptor client2 = TestClientDescriptor.newClient(); + activeEntity.connected(client2); + + assertThat(activeEntity.getConnectedClients(), hasSize(2)); + + activeEntity.disconnected(client1); + + Set connectedClients = activeEntity.getConnectedClients(); + assertThat(connectedClients, hasSize(1)); + assertThat(connectedClients, hasItem(client2)); + } + + @Test + public void testLoadExistingRegistersEvictionListener() throws Exception { + EhcacheStateService stateService = mock(EhcacheStateService.class); + + ServerSideServerStore store = mock(ServerSideServerStore.class); + when(stateService.loadStore(eq(defaultStoreName), any())).thenReturn(store); + + IEntityMessenger entityMessenger = mock(IEntityMessenger.class); + ServiceRegistry registry = getCustomMockedServiceRegistry(stateService, null, entityMessenger, null, null); + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(registry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.loadExisting(); + verify(store).setEventListener(any(ServerStoreEventListener.class)); + } + + @Test + public void testEnableEventListenerMessageEnablesOrDisablesEventsOnStore() throws Exception { + EhcacheStateService stateService = mock(EhcacheStateService.class); + + ServerSideServerStore store = mock(ServerSideServerStore.class); + InOrder storeOrderVerifier = Mockito.inOrder(store); + when(stateService.createStore(eq(defaultStoreName), any(), anyBoolean())).thenReturn(store); + when(stateService.getStore(eq(defaultStoreName))).thenReturn(store); + + IEntityMessenger entityMessenger = mock(IEntityMessenger.class); + ServiceRegistry registry = getCustomMockedServiceRegistry(stateService, null, entityMessenger, null, null); + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(registry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + + activeEntity.connected(client); + // also check that duplicating enable/disable calls has no effect + assertThat(activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.EnableEventListenerMessage(true)), succeeds()); + assertThat(activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.EnableEventListenerMessage(true)), succeeds()); + assertThat(activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.EnableEventListenerMessage(false)), succeeds()); + assertThat(activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.EnableEventListenerMessage(false)), succeeds()); + + storeOrderVerifier.verify(store).enableEvents(eq(true)); + storeOrderVerifier.verify(store).enableEvents(eq(false)); + } + + @Test + public void testAppendInvalidationAcksTakenIntoAccount() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client1 = TestClientDescriptor.newClient(); + TestClientDescriptor client2 = TestClientDescriptor.newClient(); + TestClientDescriptor client3 = TestClientDescriptor.newClient(); + activeEntity.connected(client1); + activeEntity.connected(client2); + activeEntity.connected(client3); + + + // attach to the store + assertThat(activeEntity.invokeActive(client1.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + assertThat(activeEntity.invokeActive(client2.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + assertThat(activeEntity.invokeActive(client3.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + // perform an append + assertThat(activeEntity.invokeActive(client1.invokeContext(), new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L))), succeeds()); + + // assert that an invalidation request is pending + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(1)); + InvalidationHolder invalidationHolder = activeEntity.getClientsWaitingForInvalidation().values().iterator().next(); + assertThat(invalidationHolder.clientDescriptorWaitingForInvalidation, is(client1)); + assertThat(invalidationHolder.clientsHavingToInvalidate.size(), is(2)); + assertThat(invalidationHolder.clientsHavingToInvalidate, containsInAnyOrder(client2, client3)); + + // client 2 acks + assertThat(activeEntity.invokeActive(client2.invokeContext(), new ServerStoreOpMessage.ClientInvalidationAck(1L, activeEntity.getClientsWaitingForInvalidation().keySet().iterator().next())), succeeds()); + + // assert that client 2 is not waited for anymore + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(1)); + invalidationHolder = activeEntity.getClientsWaitingForInvalidation().values().iterator().next(); + assertThat(invalidationHolder.clientDescriptorWaitingForInvalidation, is(client1)); + assertThat(invalidationHolder.clientsHavingToInvalidate.size(), is(1)); + assertThat(invalidationHolder.clientsHavingToInvalidate, contains(client3)); + + // client 3 acks + assertThat(activeEntity.invokeActive(client3.invokeContext(), new ServerStoreOpMessage.ClientInvalidationAck(1L, activeEntity.getClientsWaitingForInvalidation().keySet().iterator().next())), succeeds()); + + // assert that the invalidation request is done since all clients disconnected + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); + } + + @Test + public void testClearInvalidationAcksTakenIntoAccount() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client1 = TestClientDescriptor.newClient(); + TestClientDescriptor client2 = TestClientDescriptor.newClient(); + TestClientDescriptor client3 = TestClientDescriptor.newClient(); + activeEntity.connected(client1); + activeEntity.connected(client2); + activeEntity.connected(client3); + + // attach to the store + assertThat(activeEntity.invokeActive(client1.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + assertThat(activeEntity.invokeActive(client2.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + assertThat(activeEntity.invokeActive(client3.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + // perform a clear + assertThat(activeEntity.invokeActive(client1.invokeContext(), new ServerStoreOpMessage.ClearMessage()), succeeds()); + + // assert that an invalidation request is pending + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(1)); + InvalidationHolder invalidationHolder = activeEntity.getClientsWaitingForInvalidation().values().iterator().next(); + assertThat(invalidationHolder.clientDescriptorWaitingForInvalidation, is(client1)); + assertThat(invalidationHolder.clientsHavingToInvalidate.size(), is(2)); + assertThat(invalidationHolder.clientsHavingToInvalidate, containsInAnyOrder(client2, client3)); + + // client 2 acks + assertThat(activeEntity.invokeActive(client2.invokeContext(), new ServerStoreOpMessage.ClientInvalidationAllAck(activeEntity.getClientsWaitingForInvalidation().keySet().iterator().next())), succeeds()); + + // assert that client 2 is not waited for anymore + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(1)); + invalidationHolder = activeEntity.getClientsWaitingForInvalidation().values().iterator().next(); + assertThat(invalidationHolder.clientDescriptorWaitingForInvalidation, is(client1)); + assertThat(invalidationHolder.clientsHavingToInvalidate.size(), is(1)); + assertThat(invalidationHolder.clientsHavingToInvalidate, contains(client3)); + + // client 3 acks + assertThat(activeEntity.invokeActive(client3.invokeContext(), new ServerStoreOpMessage.ClientInvalidationAllAck(activeEntity.getClientsWaitingForInvalidation().keySet().iterator().next())), succeeds()); + + // assert that the invalidation request is done since all clients disconnected + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); + } + + @Test + public void testAppendInvalidationDisconnectionOfInvalidatingClientsTakenIntoAccount() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client1 = TestClientDescriptor.newClient(); + TestClientDescriptor client2 = TestClientDescriptor.newClient(); + TestClientDescriptor client3 = TestClientDescriptor.newClient(); + activeEntity.connected(client1); + activeEntity.connected(client2); + activeEntity.connected(client3); + + // attach to the store + assertThat(activeEntity.invokeActive(client1.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + assertThat(activeEntity.invokeActive(client2.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + assertThat(activeEntity.invokeActive(client3.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + // perform an append + assertThat(activeEntity.invokeActive(client1.invokeContext(), new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L))), succeeds()); + + // disconnect client2 + activeEntity.disconnected(client2); + + // assert that client 2 is not waited for anymore + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(1)); + InvalidationHolder invalidationHolder = activeEntity.getClientsWaitingForInvalidation().values().iterator().next(); + assertThat(invalidationHolder.clientDescriptorWaitingForInvalidation, is(client1)); + assertThat(invalidationHolder.clientsHavingToInvalidate.size(), is(1)); + assertThat(invalidationHolder.clientsHavingToInvalidate, contains(client3)); + + // disconnect client3 + activeEntity.disconnected(client3); + + // assert that the invalidation request is done since all clients disconnected + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); + } + + @Test + public void testClearInvalidationDisconnectionOfInvalidatingClientsTakenIntoAccount() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client1 = TestClientDescriptor.newClient(); + TestClientDescriptor client2 = TestClientDescriptor.newClient(); + TestClientDescriptor client3 = TestClientDescriptor.newClient(); + activeEntity.connected(client1); + activeEntity.connected(client2); + activeEntity.connected(client3); + + // attach to the store + assertThat(activeEntity.invokeActive(client1.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + assertThat(activeEntity.invokeActive(client2.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + assertThat(activeEntity.invokeActive(client3.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + // perform an append + assertThat(activeEntity.invokeActive(client1.invokeContext(), new ServerStoreOpMessage.ClearMessage()), succeeds()); + + // disconnect client2 + activeEntity.disconnected(client2); + + // assert that client 2 is not waited for anymore + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(1)); + InvalidationHolder invalidationHolder = activeEntity.getClientsWaitingForInvalidation().values().iterator().next(); + assertThat(invalidationHolder.clientDescriptorWaitingForInvalidation, is(client1)); + assertThat(invalidationHolder.clientsHavingToInvalidate.size(), is(1)); + assertThat(invalidationHolder.clientsHavingToInvalidate, contains(client3)); + + // disconnect client3 + activeEntity.disconnected(client3); + + // assert that the invalidation request is done since all clients disconnected + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); + } + + @Test + public void testAppendInvalidationDisconnectionOfBlockingClientTakenIntoAccount() throws Exception { + ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfigBuilder() + .dedicated(defaultResource, 4, MemoryUnit.MEGABYTES) + .consistency(Consistency.STRONG) + .build(); + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, + new ClusterTierEntityConfiguration(identifier, defaultStoreName, serverStoreConfiguration), DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client1 = TestClientDescriptor.newClient(); + TestClientDescriptor client2 = TestClientDescriptor.newClient(); + TestClientDescriptor client3 = TestClientDescriptor.newClient(); + activeEntity.connected(client1); + activeEntity.connected(client2); + activeEntity.connected(client3); + + // attach to the store + assertThat(activeEntity.invokeActive(client1.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, serverStoreConfiguration)), succeeds()); + assertThat(activeEntity.invokeActive(client2.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, serverStoreConfiguration)), succeeds()); + assertThat(activeEntity.invokeActive(client3.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, serverStoreConfiguration)), succeeds()); + + // perform an append + assertThat(activeEntity.invokeActive(client1.invokeContext(), new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L))), succeeds()); + + // disconnect client1 + activeEntity.disconnected(client1); + + // assert that the invalidation request is done since the originating client disconnected + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); + } + + @Test + public void testClearInvalidationDisconnectionOfBlockingClientTakenIntoAccount() throws Exception { + ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfigBuilder() + .dedicated(defaultResource, 4, MemoryUnit.MEGABYTES) + .consistency(Consistency.STRONG) + .build(); + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, + new ClusterTierEntityConfiguration(identifier, defaultStoreName, serverStoreConfiguration), DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client1 = TestClientDescriptor.newClient(); + TestClientDescriptor client2 = TestClientDescriptor.newClient(); + TestClientDescriptor client3 = TestClientDescriptor.newClient(); + activeEntity.connected(client1); + activeEntity.connected(client2); + activeEntity.connected(client3); + + // attach to the store + assertThat(activeEntity.invokeActive(client1.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, serverStoreConfiguration)), succeeds()); + assertThat(activeEntity.invokeActive(client2.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, serverStoreConfiguration)), succeeds()); + assertThat(activeEntity.invokeActive(client3.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, serverStoreConfiguration)), succeeds()); + + // perform an append + assertThat(activeEntity.invokeActive(client1.invokeContext(), new ServerStoreOpMessage.ClearMessage()), succeeds()); + + // disconnect client1 + activeEntity.disconnected(client1); + + // assert that the invalidation request is done since the originating client disconnected + assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); + } + + @Test + public void testWithAttachmentSucceedsInvokingServerStoreOperation() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + // attach to the store + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L))), succeeds()); + + EhcacheEntityResponse response = activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.GetMessage(1L)); + assertThat(response, instanceOf(EhcacheEntityResponse.GetResponse.class)); + EhcacheEntityResponse.GetResponse getResponse = (EhcacheEntityResponse.GetResponse) response; + assertThat(getResponse.getChain().isEmpty(), is(false)); + } + + @Test + public void testCreateDedicatedServerStore() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + assertThat(defaultRegistry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder(defaultStoreName)); + + assertThat(defaultRegistry.getResource(defaultResource).getUsed(), is(MemoryUnit.MEGABYTES.toBytes(1L))); + + assertThat(activeEntity.getConnectedClients(), empty()); + assertThat(defaultRegistry.getStoreManagerService().getStores(), containsInAnyOrder(defaultStoreName)); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + assertThat(activeEntity.getConnectedClients(), contains(client)); + + /* + * Ensure the dedicated resource pool remains after client disconnect. + */ + activeEntity.disconnected(client); + + assertThat(defaultRegistry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder(defaultStoreName)); + + assertThat(activeEntity.getConnectedClients(), empty()); + assertThat(defaultRegistry.getStoreManagerService().getStores(), containsInAnyOrder(defaultStoreName)); + } + + @Test + public void testCreateDedicatedServerStoreExisting() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + ClusterTierActiveEntity otherEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + try { + otherEntity.createNew(); + fail("Duplicate creation should fail with an exception"); + } catch (ConfigurationException e) { + assertThat(e.getMessage(), containsString("already exists")); + } + } + + @Test + public void testValidateDedicatedServerStore() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client1 = TestClientDescriptor.newClient(); + activeEntity.connected(client1); + + TestClientDescriptor client2 = TestClientDescriptor.newClient(); + activeEntity.connected(client2); + + assertThat(activeEntity.invokeActive(client2.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + assertThat(defaultRegistry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder(defaultStoreName)); + + assertThat(defaultRegistry.getResource(defaultResource).getUsed(), is(MemoryUnit.MEGABYTES.toBytes(1L))); + + assertThat(activeEntity.getConnectedClients(), hasSize(2)); + assertThat(activeEntity.getConnectedClients(), containsInAnyOrder(client1, client2)); + assertThat(defaultRegistry.getStoreManagerService().getStores(), contains(defaultStoreName)); + } + + @Test + public void testValidateDedicatedServerStoreBad() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + assertThat(activeEntity.invokeActive(client.invokeContext(), + new LifecycleMessage.ValidateServerStore(defaultStoreName, + new ServerStoreConfigBuilder() + .dedicated("banana", 1024, MemoryUnit.KILOBYTES) + .build())), + failsWith(instanceOf(InvalidServerStoreConfigurationException.class))); + } + + @Test + public void testValidateUnknown() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, + new ServerStoreConfigBuilder().unknown().build())), succeeds()); + } + + @Test + public void testCreateSharedServerStore() throws Exception { + defaultRegistry.addSharedPool(defaultSharedPool, MemoryUnit.MEGABYTES.toBytes(2), defaultResource); + ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() + .shared(defaultSharedPool) + .build(); + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, + new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + + assertThat(defaultRegistry.getStoreManagerService().getStores(), containsInAnyOrder(defaultStoreName)); + + assertThat(defaultRegistry.getStoreManagerService().getSharedResourcePoolIds(), containsInAnyOrder(defaultSharedPool)); + assertThat(defaultRegistry.getStoreManagerService().getDedicatedResourcePoolIds(), empty()); + + assertThat(defaultRegistry.getResource(defaultResource).getUsed(), is(MemoryUnit.MEGABYTES.toBytes(2L))); + + } + + @Test + public void testCreateSharedServerStoreExisting() throws Exception { + defaultRegistry.addSharedPool(defaultSharedPool, MemoryUnit.MEGABYTES.toBytes(2), defaultResource); + ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() + .shared(defaultSharedPool) + .build(); + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, + new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + ClusterTierActiveEntity otherEntity = new ClusterTierActiveEntity(defaultRegistry, + new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + try { + otherEntity.createNew(); + fail("Duplicate creation should fail with an exception"); + } catch (ConfigurationException e) { + assertThat(e.getMessage(), containsString("already exists")); + } + } + + @Test + public void testValidateSharedServerStore() throws Exception { + defaultRegistry.addSharedPool(defaultSharedPool, MemoryUnit.MEGABYTES.toBytes(2), defaultResource); + ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() + .shared(defaultSharedPool) + .build(); + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, + new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, storeConfiguration)), succeeds()); + + assertThat(activeEntity.getConnectedClients(), contains(client)); + } + + @Test + public void testValidateServerStore_DedicatedStoresDifferentSizes() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() + .dedicated(defaultResource, 2, MemoryUnit.MEGABYTES) + .build(); + + String expectedMessageContent = "Existing ServerStore configuration is not compatible with the desired configuration: " + + "\n\t" + + "resourcePoolType existing: " + + defaultStoreConfiguration.getPoolAllocation() + + ", desired: " + + storeConfiguration.getPoolAllocation(); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, storeConfiguration)), + succeeds()); + } + + @Test + public void testValidateServerStore_DedicatedStoreResourceNamesDifferent() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() + .dedicated("otherResource", 1, MemoryUnit.MEGABYTES) + .build(); + + String expectedMessageContent = "Existing ServerStore configuration is not compatible with the desired configuration: " + + "\n\t" + + "resourcePoolType existing: " + + defaultStoreConfiguration.getPoolAllocation() + + ", desired: " + + storeConfiguration.getPoolAllocation(); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, storeConfiguration)), + failsWith(both(IsInstanceOf.any(InvalidServerStoreConfigurationException.class)).and(withMessage(containsString(expectedMessageContent))))); + } + + @Test + public void testValidateServerStore_DifferentSharedPools() throws Exception { + defaultRegistry.addSharedPool(defaultSharedPool, MemoryUnit.MEGABYTES.toBytes(2), defaultResource); + ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() + .shared(defaultSharedPool) + .build(); + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, + new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + ServerStoreConfiguration otherConfiguration = new ServerStoreConfigBuilder() + .shared("other") + .build(); + + String expectedMessageContent = "Existing ServerStore configuration is not compatible with the desired configuration: " + + "\n\t" + + "resourcePoolType existing: " + + storeConfiguration.getPoolAllocation() + + ", desired: " + + otherConfiguration.getPoolAllocation(); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, otherConfiguration)), + failsWith(both(IsInstanceOf.any(InvalidServerStoreConfigurationException.class)).and(withMessage(containsString(expectedMessageContent))))); + } + + @Test + public void testDestroyServerStore() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + activeEntity.destroy(); + + assertThat(defaultRegistry.getResource(defaultResource).getUsed(), is(0L)); + + assertThat(defaultRegistry.getStoreManagerService().getStores(), empty()); + assertThat(defaultRegistry.getStoreManagerService().getDedicatedResourcePoolIds(), empty()); + } + + /** + * Ensures shared pool and store (cache) name spaces are independent. + * The cache alias is used as the name for a {@code ServerStore} instance; this name can be + * the same as, but is independent of, the shared pool name. The + */ + @Test + public void testSharedPoolCacheNameCollision() throws Exception { + defaultRegistry.addSharedPool(defaultStoreName, MemoryUnit.MEGABYTES.toBytes(2), defaultResource); + + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + assertThat(defaultRegistry.getStoreManagerService().getSharedResourcePoolIds(), contains(defaultStoreName)); + assertThat(defaultRegistry.getStoreManagerService().getDedicatedResourcePoolIds(), contains(defaultStoreName)); + assertThat(defaultRegistry.getStoreManagerService().getStores(), containsInAnyOrder(defaultStoreName)); + } + + @Test + public void testCreateNonExistentSharedPool() throws Exception { + ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() + .shared(defaultSharedPool) + .build(); + + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, + new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + try { + activeEntity.createNew(); + fail("Creation with non-existent shared pool should have failed"); + } catch (ConfigurationException e) { + assertThat(e.getMessage(), containsString("undefined")); + } + } + + @Test + public void testCreateUnknownServerResource() throws Exception { + ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() + .dedicated("unknown", 2, MemoryUnit.MEGABYTES) + .build(); + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, + new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + try { + activeEntity.createNew(); + fail("Creation with non-existent shared pool should have failed"); + } catch (ConfigurationException e) { + assertThat(e.getMessage(), containsString("Non-existent server side resource")); + } + } + + @Test + public void testSyncToPassiveNoData() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.synchronizeKeyToPassive(syncChannel, 3); + + verifyZeroInteractions(syncChannel); + } + + @Test + public void testSyncToPassiveBatchedByDefault() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + ByteBuffer payload = ByteBuffer.allocate(512); + // Put keys that maps to the same concurrency key + assertThat(activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(1L, payload)), succeeds()); + assertThat(activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(-2L, payload)), succeeds()); + assertThat(activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(17L, payload)), succeeds()); + + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.synchronizeKeyToPassive(syncChannel, 3); + + verify(syncChannel).synchronizeToPassive(any(EhcacheDataSyncMessage.class)); + } + + @Test + public void testDataSyncToPassiveCustomBatchSize() throws Exception { + System.setProperty(ClusterTierActiveEntity.SYNC_DATA_SIZE_PROP, "512"); + try { + prepareAndRunActiveEntityForPassiveSync((activeEntity, concurrencyKey) -> { + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.synchronizeKeyToPassive(syncChannel, concurrencyKey); + verify(syncChannel, atLeast(2)).synchronizeToPassive(any(EhcacheDataSyncMessage.class)); + }); + } finally { + System.clearProperty(ClusterTierActiveEntity.SYNC_DATA_SIZE_PROP); + } + } + + @Test + public void testDataSyncToPassiveCustomGets() throws Exception { + System.setProperty(ClusterTierActiveEntity.SYNC_DATA_GETS_PROP, "2"); + try { + prepareAndRunActiveEntityForPassiveSync((activeEntity, concurrencyKey) -> { + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.synchronizeKeyToPassive(syncChannel, concurrencyKey); + verify(syncChannel, atLeast(2)).synchronizeToPassive(any(EhcacheDataSyncMessage.class)); + }); + } finally { + System.clearProperty(ClusterTierActiveEntity.SYNC_DATA_GETS_PROP); + } + } + + @Test + public void testDataSyncToPassiveException() throws Exception { + System.setProperty(ClusterTierActiveEntity.SYNC_DATA_GETS_PROP, "1"); + try { + prepareAndRunActiveEntityForPassiveSync((activeEntity, concurrencyKey) -> { + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.destroy(); + try { + activeEntity.synchronizeKeyToPassive(syncChannel, concurrencyKey); + fail("Destroyed entity not expected to sync"); + } catch (RuntimeException e) { + assertThat(e.getCause(), instanceOf(ExecutionException.class)); + } + }); + } finally { + System.clearProperty(ClusterTierActiveEntity.SYNC_DATA_GETS_PROP); + } + } + + @Test + public void testLoadExistingRecoversInflightInvalidationsForEventualCache() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + EhcacheStateServiceImpl ehcacheStateService = defaultRegistry.getStoreManagerService(); + ehcacheStateService.createStore(defaultStoreName, defaultStoreConfiguration, false); //Passive would have done this before failover + + InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(defaultStoreName); + + Random random = new Random(); + random.ints(0, 100).distinct().limit(10).forEach(invalidationTracker::trackHashInvalidation); + + ClientDescriptor client = mock(ClientDescriptor.class); + try (ActiveServerEntity.ReconnectHandler reconnect = activeEntity.startReconnect()) { + reconnect.handleReconnect(client, new ReconnectMessageCodec().encode(new ClusterTierReconnectMessage(false))); + } + + ClientCommunicator clientCommunicator = defaultRegistry.getService(new CommunicatorServiceConfiguration()); + + verify(clientCommunicator, times(10)).sendNoResponse(ArgumentMatchers.eq(client), ArgumentMatchers.isA(EhcacheEntityResponse.ClientInvalidateHash.class)); + } + + @Test + @SuppressWarnings("unchecked") + public void testReplicationMessageAndOriginalServerStoreOpMessageHasSameConcurrency() throws Exception { + + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + IEntityMessenger entityMessenger = defaultRegistry.getEntityMessenger(); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + reset(entityMessenger); + EhcacheEntityMessage getAndAppend = new ServerStoreOpMessage.GetAndAppendMessage(1L, createPayload(1L)); + activeEntity.invokeActive(client.invokeContext(), getAndAppend); + + ArgumentCaptor captor = ArgumentCaptor.forClass(PassiveReplicationMessage.ChainReplicationMessage.class); + verify(entityMessenger).messageSelfAndDeferRetirement(isNotNull(), captor.capture()); + PassiveReplicationMessage.ChainReplicationMessage replicatedMessage = captor.getValue(); + + assertThat(replicatedMessage.concurrencyKey(), is(((ConcurrentEntityMessage) getAndAppend).concurrencyKey())); + } + + @Test + public void testInvalidMessageThrowsError() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + try { + activeEntity.invokeActive(client.invokeContext(), new InvalidMessage()); + fail("Invalid message should result in AssertionError"); + } catch (AssertionError e) { + assertThat(e.getMessage(), containsString("Unsupported")); + } + } + + @Test + public void testActiveMessageTracking() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + EhcacheStateServiceImpl ehcacheStateService = defaultRegistry.getStoreManagerService(); + ehcacheStateService.createStore(defaultStoreName, defaultStoreConfiguration, false); //hack to enable message tracking on active + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + ActiveInvokeContext context = client.invokeContext(); + + EhcacheEntityResponse expected = activeEntity.invokeActive(context, new ServerStoreOpMessage.GetAndAppendMessage(1L, createPayload(1L))); + + // this invoke should be rejected due to duplicate message id + EhcacheEntityResponse actual = activeEntity.invokeActive(context, new ServerStoreOpMessage.GetAndAppendMessage(1L, createPayload(2L))); + + assertThat(actual, sameInstance(expected)); + + EhcacheEntityResponse.GetResponse response = (EhcacheEntityResponse.GetResponse) activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.GetMessage(1L)); + assertThat(response.getChain(), hasPayloads(1L)); + } + + @Test @SuppressWarnings("unchecked") + public void testShortIterationIsNotTracked() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + EhcacheStateServiceImpl ehcacheStateService = defaultRegistry.getStoreManagerService(); + ehcacheStateService.createStore(defaultStoreName, defaultStoreConfiguration, false); //hack to enable message tracking on active + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L))); + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(1L, createPayload(2L))); + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(2L, createPayload(3L))); + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(2L, createPayload(4L))); + + EhcacheEntityResponse.IteratorBatch iteratorBatch = (EhcacheEntityResponse.IteratorBatch) activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.IteratorOpenMessage(Integer.MAX_VALUE)); + + assertThat(iteratorBatch.isLast(), is(true)); + assertThat(iteratorBatch.getChains(), containsInAnyOrder( + entry(is(1L), hasPayloads(1L, 2L)), + entry(is(2L), hasPayloads(3L, 4L)) + )); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.IteratorAdvanceMessage(iteratorBatch.getIdentity(), Integer.MAX_VALUE)), failsWith(instanceOf(InvalidOperationException.class))); + } + + @Test + public void testLongIteration() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + EhcacheStateServiceImpl ehcacheStateService = defaultRegistry.getStoreManagerService(); + ehcacheStateService.createStore(defaultStoreName, defaultStoreConfiguration, false); //hack to enable message tracking on active + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L))); + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(1L, createPayload(2L))); + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(2L, createPayload(3L))); + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(2L, createPayload(4L))); + + EhcacheEntityResponse.IteratorBatch batchOne = (EhcacheEntityResponse.IteratorBatch) activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.IteratorOpenMessage(1)); + + Matcher> chainOne = entry(is(1L), hasPayloads(1L, 2L)); + Matcher> chainTwo = entry(is(2L), hasPayloads(3L, 4L)); + + assertThat(batchOne.isLast(), is(false)); + assertThat(batchOne.getChains(), either(contains(chainOne)).or(contains(chainTwo))); + + EhcacheEntityResponse.IteratorBatch batchTwo = (EhcacheEntityResponse.IteratorBatch) activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.IteratorAdvanceMessage(batchOne.getIdentity(), Integer.MAX_VALUE)); + assertThat(batchTwo.isLast(), is(true)); + if (contains(chainOne).matches(batchOne.getChains())) { + assertThat(batchTwo.getChains(), contains(chainTwo)); + } else { + assertThat(batchTwo.getChains(), contains(chainOne)); + } + + assertThat(activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.IteratorAdvanceMessage(batchOne.getIdentity(), Integer.MAX_VALUE)), failsWith(instanceOf(InvalidOperationException.class))); + } + + @Test + public void testExplicitIteratorClose() throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + EhcacheStateServiceImpl ehcacheStateService = defaultRegistry.getStoreManagerService(); + ehcacheStateService.createStore(defaultStoreName, defaultStoreConfiguration, false); //hack to enable message tracking on active + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L))); + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(1L, createPayload(2L))); + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(2L, createPayload(3L))); + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(2L, createPayload(4L))); + + EhcacheEntityResponse.IteratorBatch batchOne = (EhcacheEntityResponse.IteratorBatch) activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.IteratorOpenMessage(1)); + + Matcher> chainOne = entry(is(1L), hasPayloads(1L, 2L)); + Matcher> chainTwo = entry(is(2L), hasPayloads(3L, 4L)); + + assertThat(batchOne.isLast(), is(false)); + assertThat(batchOne.getChains(), either(contains(chainOne)).or(contains(chainTwo))); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.IteratorCloseMessage(batchOne.getIdentity())), succeeds()); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.IteratorAdvanceMessage(batchOne.getIdentity(), Integer.MAX_VALUE)), failsWith(instanceOf(InvalidOperationException.class))); + } + + private void prepareAndRunActiveEntityForPassiveSync(BiConsumer testConsumer) throws Exception { + ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER, SYNC_GETS_EXECUTOR); + activeEntity.createNew(); + + TestClientDescriptor client = TestClientDescriptor.newClient(); + activeEntity.connected(client); + + assertThat(activeEntity.invokeActive(client.invokeContext(), new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)), succeeds()); + + ByteBuffer payload = ByteBuffer.allocate(512); + // Put keys that maps to the same concurrency key + ServerStoreOpMessage.AppendMessage testMessage = new ServerStoreOpMessage.AppendMessage(1L, payload); + activeEntity.invokeActive(client.invokeContext(), testMessage); + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(-2L, payload)); + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(17L, payload)); + activeEntity.invokeActive(client.invokeContext(), new ServerStoreOpMessage.AppendMessage(33L, payload)); + + ConcurrencyStrategies.DefaultConcurrencyStrategy concurrencyStrategy = new ConcurrencyStrategies.DefaultConcurrencyStrategy(DEFAULT_MAPPER); + int concurrencyKey = concurrencyStrategy.concurrencyKey(testMessage); + testConsumer.accept(activeEntity, concurrencyKey); + } + + private Matcher succeeds() { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(EhcacheEntityResponse item) { + return EhcacheResponseType.SUCCESS.equals(item.getResponseType()); + } + + @Override + public void describeTo(Description description) { + description.appendText(" a success response"); + } + }; + } + + private Matcher failsWith(Matcher failure) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(EhcacheEntityResponse item) { + if (EhcacheResponseType.FAILURE.equals(item.getResponseType())) { + return failure.matches(((EhcacheEntityResponse.Failure) item).getCause()); + } else { + return false; + } + } + + @Override + public void describeTo(Description description) { + description.appendText(" failure caused by ").appendDescriptionOf(failure); + } + }; + } + + private Matcher withMessage(Matcher message) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(Throwable item) { + return message.matches(item.getMessage()); + } + + @Override + public void describeTo(Description description) { + description.appendText(" throwable with message ").appendDescriptionOf(message); + } + }; + } + + @SuppressWarnings("unchecked") + ServiceRegistry getCustomMockedServiceRegistry(EhcacheStateService stateService, ClientCommunicator clientCommunicator, + IEntityMessenger entityMessenger, EntityMonitoringService entityMonitoringService, + EntityManagementRegistry entityManagementRegistry) { + return new ServiceRegistry() { + @Override + public T getService(final ServiceConfiguration configuration) { + Class serviceType = configuration.getServiceType(); + if (serviceType.isAssignableFrom(ClientCommunicator.class)) { + return (T) clientCommunicator; + } else if (serviceType.isAssignableFrom(IEntityMessenger.class)) { + return (T) entityMessenger; + } else if (serviceType.isAssignableFrom(EhcacheStateService.class)) { + return (T) stateService; + } else if (serviceType.isAssignableFrom(EntityMonitoringService.class)) { + return (T) entityMonitoringService; + } else if (serviceType.isAssignableFrom(EntityManagementRegistry.class)) { + return (T) entityManagementRegistry; + } else if (serviceType.isAssignableFrom(OOOMessageHandler.class)) { + return (T) new OOOMessageHandlerImpl<>(message -> true, () -> {}); + } + throw new AssertionError("Unknown service configuration of type: " + serviceType); + } + + @Override + public Collection getServices(ServiceConfiguration configuration) { + return Collections.singleton(getService(configuration)); + } + }; + } + + /** + * Builder for {@link ServerStoreConfiguration} instances. + */ + private static final class ServerStoreConfigBuilder { + private PoolAllocation poolAllocation; + private String storedKeyType = "java.lang.Long"; + private String storedValueType = "java.lang.String"; + private String keySerializerType; + private String valueSerializerType; + private Consistency consistency = Consistency.EVENTUAL; + + + ServerStoreConfigBuilder consistency(Consistency consistency) { + this.consistency = consistency; + return this; + } + + ServerStoreConfigBuilder dedicated(String resourceName, int size, MemoryUnit unit) { + this.poolAllocation = new Dedicated(resourceName, unit.toBytes(size)); + return this; + } + + ServerStoreConfigBuilder shared(String resourcePoolName) { + this.poolAllocation = new Shared(resourcePoolName); + return this; + } + + ServerStoreConfigBuilder unknown() { + this.poolAllocation = new PoolAllocation.Unknown(); + return this; + } + + ServerStoreConfigBuilder setStoredKeyType(Class storedKeyType) { + this.storedKeyType = storedKeyType.getName(); + return this; + } + + ServerStoreConfigBuilder setStoredValueType(Class storedValueType) { + this.storedValueType = storedValueType.getName(); + return this; + } + + ServerStoreConfigBuilder setKeySerializerType(Class keySerializerType) { + this.keySerializerType = keySerializerType.getName(); + return this; + } + + ServerStoreConfigBuilder setValueSerializerType(Class valueSerializerType) { + this.valueSerializerType = valueSerializerType.getName(); + return this; + } + + ServerStoreConfiguration build() { + return new ServerStoreConfiguration(poolAllocation, storedKeyType, storedValueType, + keySerializerType, valueSerializerType, consistency, false, false); + } + } + + /** + * Provides a {@link ServiceRegistry} for off-heap resources. This is a "server-side" object. + */ + private static final class OffHeapIdentifierRegistry implements ServiceRegistry { + + private final long offHeapSize; + private final String defaultResource; + + private EhcacheStateServiceImpl storeManagerService; + + private IEntityMessenger entityMessenger; + + private ClientCommunicator clientCommunicator; + + private final Map pools = + new HashMap<>(); + + private final Map sharedPools = new HashMap<>(); + + /** + * Instantiate an "open" {@code ServiceRegistry}. Using this constructor creates a + * registry that creates {@code OffHeapResourceIdentifier} entries as they are + * referenced. + */ + private OffHeapIdentifierRegistry(String defaultResource) { + this.defaultResource = defaultResource; + this.offHeapSize = 0; + } + + /** + * Instantiate a "closed" {@code ServiceRegistry}. Using this constructor creates a + * registry that only returns {@code OffHeapResourceIdentifier} entries supplied + * through the {@link #addResource} method. + */ + private OffHeapIdentifierRegistry() { + this(null); + } + + private void addSharedPool(String name, long size, String resourceName) { + sharedPools.put(name, new ServerSideConfiguration.Pool(size, resourceName)); + } + + /** + * Adds an off-heap resource of the given name to this registry. + * + * @param name the name of the resource + * @param offHeapSize the off-heap size + * @param unit the size unit type + * @return {@code this} {@code OffHeapIdentifierRegistry} + */ + private OffHeapIdentifierRegistry addResource(String name, int offHeapSize, MemoryUnit unit) { + this.pools.put(OffHeapResourceIdentifier.identifier(name), new TestOffHeapResource(unit.toBytes(offHeapSize))); + return this; + } + + private TestOffHeapResource getResource(String resourceName) { + return this.pools.get(OffHeapResourceIdentifier.identifier(resourceName)); + } + + private EhcacheStateServiceImpl getStoreManagerService() { + return this.storeManagerService; + } + + + private IEntityMessenger getEntityMessenger() { + return entityMessenger; + } + + @SuppressWarnings("unchecked") + @Override + public T getService(ServiceConfiguration serviceConfiguration) { + if (serviceConfiguration.getServiceType().equals(ClientCommunicator.class)) { + if (this.clientCommunicator == null) { + this.clientCommunicator = mock(ClientCommunicator.class); + } + return (T) this.clientCommunicator; + } else if (serviceConfiguration.getServiceType().equals(EhcacheStateService.class)) { + if (storeManagerService == null) { + this.storeManagerService = new EhcacheStateServiceImpl(new OffHeapResources() { + @Override + public Set getAllIdentifiers() { + return pools.keySet(); + } + + @Override + public OffHeapResource getOffHeapResource(OffHeapResourceIdentifier identifier) { + return pools.get(identifier); + } + + @Override + public boolean addOffHeapResource(OffHeapResourceIdentifier identifier, long capacity) { + return false; + } + }, new ServerSideConfiguration(sharedPools), DEFAULT_MAPPER, service -> {}); + try { + this.storeManagerService.configure(); + } catch (ConfigurationException e) { + throw new AssertionError("Test setup failed!"); + } + } + return (T) (this.storeManagerService); + } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { + if (this.entityMessenger == null) { + this.entityMessenger = mock(IEntityMessenger.class); + } + return (T) this.entityMessenger; + } else if(serviceConfiguration instanceof EntityManagementRegistryConfiguration) { + return null; + } else if(serviceConfiguration instanceof OOOMessageHandlerConfiguration) { + OOOMessageHandlerConfiguration oooMessageHandlerConfiguration = (OOOMessageHandlerConfiguration) serviceConfiguration; + return (T) new OOOMessageHandlerImpl<>(oooMessageHandlerConfiguration.getTrackerPolicy(), () -> {}); + } + + throw new UnsupportedOperationException("Registry.getService does not support " + serviceConfiguration.getClass().getName()); + } + + @Override + public Collection getServices(ServiceConfiguration configuration) { + return Collections.singleton(getService(configuration)); + } + } + + /** + * Testing implementation of {@link OffHeapResource}. This is a "server-side" object. + */ + private static final class TestOffHeapResource implements OffHeapResource { + + private long capacity; + private long used; + + private TestOffHeapResource(long capacity) { + this.capacity = capacity; + } + + @Override + public boolean reserve(long size) throws IllegalArgumentException { + if (size < 0) { + throw new IllegalArgumentException(); + } + if (size > available()) { + return false; + } else { + this.used += size; + return true; + } + } + + @Override + public void release(long size) throws IllegalArgumentException { + if (size < 0) { + throw new IllegalArgumentException(); + } + this.used -= size; + } + + @Override + public long available() { + return this.capacity - this.used; + } + + @Override + public long capacity() { + return capacity; + } + + @Override + public boolean setCapacity(long size) throws IllegalArgumentException { + throw new UnsupportedOperationException("Not supported"); + } + + @Override + public void addUsageListener(UUID listenerUUID, float threshold, Consumer consumer) { + + } + + @Override + public void removeUsageListener(UUID listenerUUID) throws IllegalArgumentException { + + } + + private long getUsed() { + return used; + } + } + + @SuppressWarnings("unchecked") + public static T mock(Class clazz) { + return Mockito.mock((Class) clazz); + } +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/store/ClusterTierPassiveEntityTest.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/store/ClusterTierPassiveEntityTest.java similarity index 79% rename from clustered/server/src/test/java/org/ehcache/clustered/server/store/ClusterTierPassiveEntityTest.java rename to clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/store/ClusterTierPassiveEntityTest.java index 78d685c733..ad6604a27f 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/store/ClusterTierPassiveEntityTest.java +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/store/ClusterTierPassiveEntityTest.java @@ -20,12 +20,12 @@ import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.ClusterTierEntityConfiguration; -import org.ehcache.clustered.common.internal.store.Util; import org.ehcache.clustered.server.EhcacheStateServiceImpl; import org.ehcache.clustered.server.KeySegmentMapper; -import org.ehcache.clustered.server.TestInvokeContext; +import org.ehcache.clustered.server.TestClientDescriptor; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.server.state.EhcacheStateService; import org.junit.Before; @@ -34,6 +34,8 @@ import org.terracotta.client.message.tracker.OOOMessageHandlerImpl; import org.terracotta.entity.BasicServiceConfiguration; import org.terracotta.entity.ConfigurationException; +import org.terracotta.entity.EntityMessage; +import org.terracotta.entity.EntityResponse; import org.terracotta.entity.IEntityMessenger; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; @@ -42,6 +44,7 @@ import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; import org.terracotta.offheapresource.OffHeapResources; +import org.terracotta.offheapresource.OffHeapUsageEvent; import org.terracotta.offheapstore.util.MemoryUnit; import java.util.Collection; @@ -50,13 +53,16 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.UUID; +import java.util.function.Consumer; -import static org.ehcache.clustered.common.internal.store.Util.createPayload; +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.ehcache.clustered.ChainUtils.sequencedChainOf; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; @@ -127,9 +133,9 @@ public void testDestroyServerStore() throws Exception { @Test public void testInvalidMessageThrowsError() throws Exception { ClusterTierPassiveEntity passiveEntity = new ClusterTierPassiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - TestInvokeContext context = new TestInvokeContext(); + TestClientDescriptor client = TestClientDescriptor.newClient(); try { - passiveEntity.invokePassive(context, new InvalidMessage()); + passiveEntity.invokePassive(client.invokeContext(), new InvalidMessage()); fail("Invalid message should result in AssertionError"); } catch (AssertionError e) { assertThat(e.getMessage(), containsString("Unsupported")); @@ -141,31 +147,64 @@ public void testPassiveTracksMessageDuplication() throws Exception { ClusterTierPassiveEntity passiveEntity = new ClusterTierPassiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); passiveEntity.createNew(); - Chain chain = Util.getChain(true, createPayload(1L)); - TestInvokeContext context = new TestInvokeContext(); + Chain chain = sequencedChainOf(createPayload(1L)); + TestClientDescriptor client = TestClientDescriptor.newClient(); long clientId = 3; PassiveReplicationMessage message1 = new PassiveReplicationMessage.ChainReplicationMessage(2, chain, 2L, 1L, clientId); - passiveEntity.invokePassive(context, message1); + passiveEntity.invokePassive(client.invokeContext(), message1); // Should be added assertThat(passiveEntity.getStateService().getStore(passiveEntity.getStoreIdentifier()).get(2).isEmpty(), is(false)); - Chain emptyChain = Util.getChain(true); + Chain emptyChain = sequencedChainOf(); PassiveReplicationMessage message2 = new PassiveReplicationMessage.ChainReplicationMessage(2, emptyChain, 2L, 1L, clientId); - passiveEntity.invokePassive(context, message2); + passiveEntity.invokePassive(client.invokeContext(), message2); // Should not be cleared, message is a duplicate assertThat(passiveEntity.getStateService().getStore(passiveEntity.getStoreIdentifier()).get(2).isEmpty(), is(false)); PassiveReplicationMessage message3 = new PassiveReplicationMessage.ChainReplicationMessage(2, chain, 3L, 1L, clientId); - passiveEntity.invokePassive(context, message3); + passiveEntity.invokePassive(client.invokeContext(), message3); // Should be added as well, different message id assertThat(passiveEntity.getStateService().getStore(passiveEntity.getStoreIdentifier()).get(2).isEmpty(), is(false)); } + @Test + public void testOversizeReplaceAtHeadMessage() throws Exception { + ClusterTierPassiveEntity passiveEntity = new ClusterTierPassiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); + passiveEntity.createNew(); + TestClientDescriptor client = TestClientDescriptor.newClient(); + + int key = 2; + + Chain chain = sequencedChainOf(createPayload(1L)); + PassiveReplicationMessage message = new PassiveReplicationMessage.ChainReplicationMessage(key, chain, 2L, 1L, 3L); + passiveEntity.invokePassive(client.invokeContext(), message); + + Chain oversizeChain = sequencedChainOf(createPayload(2L, 1024 * 1024)); + ServerStoreOpMessage.ReplaceAtHeadMessage oversizeMsg = new ServerStoreOpMessage.ReplaceAtHeadMessage(key, chain, oversizeChain); + passiveEntity.invokePassive(client.invokeContext(), oversizeMsg); + // Should be evicted, the value is oversize. + assertThat(passiveEntity.getStateService().getStore(passiveEntity.getStoreIdentifier()).get(key).isEmpty(), is(true)); + } + + @Test + public void testOversizeChainReplicationMessage() throws Exception { + ClusterTierPassiveEntity passiveEntity = new ClusterTierPassiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); + passiveEntity.createNew(); + TestClientDescriptor client = TestClientDescriptor.newClient(); + + long key = 2L; + Chain oversizeChain = sequencedChainOf(createPayload(key, 1024 * 1024)); + PassiveReplicationMessage oversizeMsg = new PassiveReplicationMessage.ChainReplicationMessage(key, oversizeChain, 2L, 1L, (long) 3); + passiveEntity.invokePassive(client.invokeContext(), oversizeMsg); + // Should be cleared, the value is oversize. + assertThat(passiveEntity.getStateService().getStore(passiveEntity.getStoreIdentifier()).get(key).isEmpty(), is(true)); + } + /** * Builder for {@link ServerStoreConfiguration} instances. */ @@ -220,7 +259,7 @@ ServerStoreConfigBuilder setValueSerializerType(Class valueSerializerType) { ServerStoreConfiguration build() { return new ServerStoreConfiguration(poolAllocation, storedKeyType, storedValueType, - keySerializerType, valueSerializerType, consistency); + keySerializerType, valueSerializerType, consistency, false, false); } } @@ -305,6 +344,11 @@ public Set getAllIdentifiers() { public OffHeapResource getOffHeapResource(OffHeapResourceIdentifier identifier) { return pools.get(identifier); } + + @Override + public boolean addOffHeapResource(OffHeapResourceIdentifier identifier, long capacity) { + return false; + } }, new ServerSideConfiguration(sharedPools), DEFAULT_MAPPER, service -> {}); try { this.storeManagerService.configure(); @@ -320,9 +364,8 @@ public OffHeapResource getOffHeapResource(OffHeapResourceIdentifier identifier) } else if(serviceConfiguration instanceof BasicServiceConfiguration && serviceConfiguration.getServiceType() == IMonitoringProducer.class) { return null; } else if(serviceConfiguration instanceof OOOMessageHandlerConfiguration) { - OOOMessageHandlerConfiguration oooMessageHandlerConfiguration = (OOOMessageHandlerConfiguration) serviceConfiguration; - return (T) new OOOMessageHandlerImpl(oooMessageHandlerConfiguration.getTrackerPolicy(), - oooMessageHandlerConfiguration.getSegments(), oooMessageHandlerConfiguration.getSegmentationStrategy()); + OOOMessageHandlerConfiguration oooMessageHandlerConfiguration = (OOOMessageHandlerConfiguration) serviceConfiguration; + return (T) new OOOMessageHandlerImpl<>(oooMessageHandlerConfiguration.getTrackerPolicy(), () -> {}); } throw new UnsupportedOperationException("Registry.getService does not support " + serviceConfiguration.getClass().getName()); @@ -377,6 +420,21 @@ public long capacity() { return capacity; } + @Override + public boolean setCapacity(long size) throws IllegalArgumentException { + throw new UnsupportedOperationException("Not supported"); + } + + @Override + public void addUsageListener(UUID listenerUUID, float threshold, Consumer consumer) { + + } + + @Override + public void removeUsageListener(UUID listenerUUID) throws IllegalArgumentException { + + } + private long getUsed() { return used; } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/store/InvalidMessage.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/store/InvalidMessage.java similarity index 97% rename from clustered/server/src/test/java/org/ehcache/clustered/server/store/InvalidMessage.java rename to clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/store/InvalidMessage.java index 5752ab2eb3..c7e96a02a5 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/store/InvalidMessage.java +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/store/InvalidMessage.java @@ -17,7 +17,5 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; -import java.util.UUID; - public class InvalidMessage extends EhcacheEntityMessage { } diff --git a/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/store/LockManagerImplTest.java b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/store/LockManagerImplTest.java new file mode 100644 index 0000000000..ea406743e2 --- /dev/null +++ b/clustered/server/ehcache-entity/src/test/java/org/ehcache/clustered/server/store/LockManagerImplTest.java @@ -0,0 +1,115 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.store; + +import org.ehcache.clustered.server.TestClientDescriptor; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.terracotta.entity.ClientDescriptor; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Consumer; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; + +public class LockManagerImplTest { + + @Test + public void testLock() { + LockManagerImpl lockManager = new LockManagerImpl(); + ClientDescriptor clientDescriptor = TestClientDescriptor.newClient(); + assertThat(lockManager.lock(1L, clientDescriptor), is(true)); + assertThat(lockManager.lock(1L, clientDescriptor), is(false)); + assertThat(lockManager.lock(2L, clientDescriptor), is(true)); + } + + @Test + public void testUnlock() { + LockManagerImpl lockManager = new LockManagerImpl(); + ClientDescriptor clientDescriptor = TestClientDescriptor.newClient(); + assertThat(lockManager.lock(1L, clientDescriptor), is(true)); + lockManager.unlock(1L); + assertThat(lockManager.lock(1L, clientDescriptor), is(true)); + } + + @Test + @SuppressWarnings("unchecked") + public void testSweepLocksForClient() { + LockManagerImpl lockManager = new LockManagerImpl(); + ClientDescriptor clientDescriptor1 = TestClientDescriptor.newClient(); + ClientDescriptor clientDescriptor2 = TestClientDescriptor.newClient(); + + assertThat(lockManager.lock(1L, clientDescriptor1), is(true)); + assertThat(lockManager.lock(2L, clientDescriptor1), is(true)); + assertThat(lockManager.lock(3L, clientDescriptor1), is(true)); + assertThat(lockManager.lock(4L, clientDescriptor1), is(true)); + assertThat(lockManager.lock(5L, clientDescriptor2), is(true)); + assertThat(lockManager.lock(6L, clientDescriptor2), is(true)); + + AtomicInteger counter = new AtomicInteger(); + + Consumer> consumer = mock(Consumer.class); + + ArgumentCaptor> argumentCaptor = ArgumentCaptor.forClass(List.class); + + doAnswer(invocation -> counter.incrementAndGet()).when(consumer).accept(argumentCaptor.capture()); + + lockManager.sweepLocksForClient(clientDescriptor2, consumer); + + assertThat(counter.get(), is(1)); + + assertThat(argumentCaptor.getValue().size(), is(2)); + assertThat(argumentCaptor.getValue(), containsInAnyOrder(5L, 6L)); + + assertThat(lockManager.lock(5L, clientDescriptor2), is(true)); + assertThat(lockManager.lock(6L, clientDescriptor2), is(true)); + assertThat(lockManager.lock(1L, clientDescriptor1), is(false)); + assertThat(lockManager.lock(2L, clientDescriptor1), is(false)); + assertThat(lockManager.lock(3L, clientDescriptor1), is(false)); + assertThat(lockManager.lock(4L, clientDescriptor1), is(false)); + + } + + @Test + public void testCreateLockStateAfterFailover() { + LockManagerImpl lockManager = new LockManagerImpl(); + + ClientDescriptor clientDescriptor1 = TestClientDescriptor.newClient(); + + Set locks = new HashSet<>(); + locks.add(1L); + locks.add(100L); + locks.add(1000L); + + lockManager.createLockStateAfterFailover(clientDescriptor1, locks); + + ClientDescriptor clientDescriptor2 = TestClientDescriptor.newClient(); + + + assertThat(lockManager.lock(100L, clientDescriptor2), is(false)); + assertThat(lockManager.lock(1000L, clientDescriptor2), is(false)); + assertThat(lockManager.lock(1L, clientDescriptor2), is(false)); + + } + +} diff --git a/clustered/server/ehcache-service-api/build.gradle b/clustered/server/ehcache-service-api/build.gradle new file mode 100644 index 0000000000..ada3ceba19 --- /dev/null +++ b/clustered/server/ehcache-service-api/build.gradle @@ -0,0 +1,30 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.clustered-server-module' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 Clustering Server Storage API module' + description = 'The Server Storage API module of Ehcache 3' + } +} + +dependencies { + api project(':clustered:ehcache-common-api') +} diff --git a/clustered/server/ehcache-service-api/config/checkstyle-suppressions.xml b/clustered/server/ehcache-service-api/config/checkstyle-suppressions.xml new file mode 100644 index 0000000000..cb41d0baf7 --- /dev/null +++ b/clustered/server/ehcache-service-api/config/checkstyle-suppressions.xml @@ -0,0 +1,9 @@ + + + + + + + diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java similarity index 93% rename from clustered/server/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java rename to clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java index 138360f263..202c92c71d 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java @@ -16,9 +16,6 @@ package org.ehcache.clustered.server; -import com.tc.classloader.CommonComponent; - -@CommonComponent public class KeySegmentMapper { private final int segments; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java similarity index 79% rename from clustered/server/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java rename to clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java index 09424093df..92400209d2 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java @@ -18,17 +18,15 @@ import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.ServerStore; -import org.terracotta.offheapstore.MapInternals; - -import com.tc.classloader.CommonComponent; import java.util.List; import java.util.Set; -@CommonComponent -public interface ServerSideServerStore extends ServerStore, MapInternals { - void setEvictionListener(ServerStoreEvictionListener listener); +public interface ServerSideServerStore extends ServerStore { + void setEventListener(ServerStoreEventListener listener); + void enableEvents(boolean enable); ServerStoreConfiguration getStoreConfiguration(); List> getSegmentKeySets(); void put(long key, Chain chain); + void remove(long key); } diff --git a/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/ServerStoreEventListener.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/ServerStoreEventListener.java new file mode 100644 index 0000000000..e161731366 --- /dev/null +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/ServerStoreEventListener.java @@ -0,0 +1,46 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.server.offheap.InternalChain; + +import java.nio.ByteBuffer; + +/** + * ServerStore event listener interface + */ +public interface ServerStoreEventListener { + + /** + * Called when the ServerStore evicts a mapping. + *

+ * Always fired, even when events are not enabled, see: {@link ServerSideServerStore#enableEvents(boolean)}. + * @param key the key of the evicted mapping + * @param evictedChain the evicted chain. + */ + void onEviction(long key, InternalChain evictedChain); + + /** + * Called when the ServerStore appends to a mapping + *

+ * Not always fired, only when events are enabled, see: {@link ServerSideServerStore#enableEvents(boolean)}. + * @param beforeAppend the chain as it was before the append + * @param appended the appended operation + */ + void onAppend(Chain beforeAppend, ByteBuffer appended); + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateRepoSyncMessage.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateRepoSyncMessage.java similarity index 100% rename from clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateRepoSyncMessage.java rename to clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateRepoSyncMessage.java diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java similarity index 90% rename from clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java rename to clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java index ba23e4b23a..6882a9574a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java @@ -18,11 +18,6 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; -import com.tc.classloader.CommonComponent; - -import java.util.UUID; - -@CommonComponent public abstract class EhcacheSyncMessage extends EhcacheEntityMessage { public abstract SyncMessageType getMessageType(); diff --git a/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java new file mode 100644 index 0000000000..ca0b75108e --- /dev/null +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java @@ -0,0 +1,26 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +/** + * SyncMessageType + */ +public enum SyncMessageType { + STATE_REPO, + DATA, + MESSAGE_TRACKER; +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/InternalChain.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/offheap/InternalChain.java similarity index 94% rename from clustered/server/src/main/java/org/ehcache/clustered/server/offheap/InternalChain.java rename to clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/offheap/InternalChain.java index 0e7e209986..18af113114 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/InternalChain.java +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/offheap/InternalChain.java @@ -20,7 +20,7 @@ import org.ehcache.clustered.common.internal.store.Chain; -interface InternalChain extends Closeable { +public interface InternalChain extends Closeable { Chain detach(); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java similarity index 99% rename from clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java rename to clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java index e6bc05876e..9942c16e5d 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java @@ -33,7 +33,7 @@ class ServerStateRepository { private final ConcurrentMap> concurrentMapRepo = new ConcurrentHashMap<>(); - EhcacheEntityResponse invoke(StateRepositoryOpMessage message) throws ClusterException { + EhcacheEntityResponse invoke(StateRepositoryOpMessage message) { String mapId = message.getMapId(); ConcurrentMap map = getStateMap(mapId); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/repo/StateRepositoryManager.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/repo/StateRepositoryManager.java similarity index 93% rename from clustered/server/src/main/java/org/ehcache/clustered/server/repo/StateRepositoryManager.java rename to clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/repo/StateRepositoryManager.java index 2f0255f0c4..67ff72e91b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/repo/StateRepositoryManager.java +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/repo/StateRepositoryManager.java @@ -21,24 +21,21 @@ import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.ehcache.clustered.server.internal.messages.EhcacheStateRepoSyncMessage; -import com.tc.classloader.CommonComponent; - import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static java.util.Collections.emptyList; -@CommonComponent public class StateRepositoryManager { private final ConcurrentMap mapRepositoryMap = new ConcurrentHashMap<>(); - public void destroyStateRepository(String cacheId) throws ClusterException { + public void destroyStateRepository(String cacheId) { mapRepositoryMap.remove(cacheId); } - public EhcacheEntityResponse invoke(StateRepositoryOpMessage message) throws ClusterException { + public EhcacheEntityResponse invoke(StateRepositoryOpMessage message) { String cacheId = message.getCacheId(); ServerStateRepository currentRepo = getServerStateRepository(cacheId); return currentRepo.invoke(message); diff --git a/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/EhcacheStateContext.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/EhcacheStateContext.java new file mode 100644 index 0000000000..4f895c685e --- /dev/null +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/EhcacheStateContext.java @@ -0,0 +1,24 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.state; + +/** + * Marker interface to pass context between begin and end message processing. + */ +@FunctionalInterface +public interface EhcacheStateContext extends AutoCloseable { + void close(); +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java similarity index 88% rename from clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java rename to clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java index 3ae9833793..2f3e851947 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java @@ -19,28 +19,25 @@ import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.ClusterException; -import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; +import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; import org.ehcache.clustered.server.ServerSideServerStore; import org.ehcache.clustered.server.repo.StateRepositoryManager; import org.terracotta.entity.ConfigurationException; -import com.tc.classloader.CommonComponent; - import java.util.Map; import java.util.Set; -@CommonComponent public interface EhcacheStateService { String getDefaultServerResource(); Map getSharedResourcePools(); - ResourcePageSource getSharedResourcePageSource(String name); + Object getSharedResourcePageSource(String name); ServerSideConfiguration.Pool getDedicatedResourcePool(String name); - ResourcePageSource getDedicatedResourcePageSource(String name); + Object getDedicatedResourcePageSource(String name); ServerSideServerStore getStore(String name); @@ -68,4 +65,5 @@ public interface EhcacheStateService { void loadExisting(ServerSideConfiguration configuration); + EhcacheStateContext beginProcessing(EhcacheOperationMessage message, String name); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java similarity index 89% rename from clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java rename to clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java index 30c5e75e4e..fe599f20a9 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java @@ -16,12 +16,8 @@ package org.ehcache.clustered.server.state; -import com.tc.classloader.CommonComponent; - import java.util.Set; -import java.util.concurrent.ConcurrentMap; -@CommonComponent public interface InvalidationTracker { boolean isClearInProgress(); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java similarity index 96% rename from clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java rename to clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java index ab5c5a1b4a..17912bd1a3 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java @@ -22,9 +22,6 @@ import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; -import com.tc.classloader.CommonComponent; - -@CommonComponent public class EhcacheStateServiceConfig implements ServiceConfiguration { private final ClusterTierManagerConfiguration config; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStoreStateServiceConfig.java b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStoreStateServiceConfig.java similarity index 95% rename from clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStoreStateServiceConfig.java rename to clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStoreStateServiceConfig.java index 3235c18d5b..e736370dd4 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStoreStateServiceConfig.java +++ b/clustered/server/ehcache-service-api/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStoreStateServiceConfig.java @@ -20,9 +20,6 @@ import org.ehcache.clustered.server.state.EhcacheStateService; import org.terracotta.entity.ServiceConfiguration; -import com.tc.classloader.CommonComponent; - -@CommonComponent public class EhcacheStoreStateServiceConfig implements ServiceConfiguration { private final String managerIdentifier; diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java b/clustered/server/ehcache-service-api/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java similarity index 98% rename from clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java rename to clustered/server/ehcache-service-api/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java index 806b7d6e51..5e5e275bfd 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java +++ b/clustered/server/ehcache-service-api/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java @@ -24,12 +24,11 @@ import java.util.AbstractMap; import java.util.Map; import java.util.Set; -import java.util.UUID; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.*; public class ServerStateRepositoryTest { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java b/clustered/server/ehcache-service-api/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java similarity index 96% rename from clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java rename to clustered/server/ehcache-service-api/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java index b5decac5e6..9efda648a1 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java +++ b/clustered/server/ehcache-service-api/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java @@ -20,9 +20,9 @@ import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; public class StateRepositoryManagerTest { diff --git a/clustered/server/ehcache-service/build.gradle b/clustered/server/ehcache-service/build.gradle new file mode 100644 index 0000000000..c26735f52f --- /dev/null +++ b/clustered/server/ehcache-service/build.gradle @@ -0,0 +1,39 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.clustered-server-module' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 Clustering Server Storage Implementation module' + description = 'The Server Storage Implementation module of Ehcache 3' + } +} + +dependencies { + service project(':clustered:server:ehcache-service-api') + service "org.terracotta:offheap-resource:$terracottaPlatformVersion" + service "org.terracotta:statistics:$statisticVersion" + + implementation project(':clustered:ehcache-common') + implementation "org.terracotta:offheap-store:$offheapVersion" + + testImplementation project(':clustered:test-utils') + testImplementation "org.terracotta.management:monitoring-service-api:$terracottaPlatformVersion" + testImplementation "org.terracotta:passthrough-server:$terracottaPassthroughTestingVersion" +} diff --git a/clustered/server/ehcache-service/config/checkstyle-suppressions.xml b/clustered/server/ehcache-service/config/checkstyle-suppressions.xml new file mode 100644 index 0000000000..cb41d0baf7 --- /dev/null +++ b/clustered/server/ehcache-service/config/checkstyle-suppressions.xml @@ -0,0 +1,9 @@ + + + + + + + diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java similarity index 87% rename from clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java rename to clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index ae0a02a052..c8f58d50a6 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -25,7 +25,9 @@ import org.ehcache.clustered.common.internal.exceptions.InvalidServerSideConfigurationException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; import org.ehcache.clustered.common.internal.exceptions.LifecycleException; +import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; import org.ehcache.clustered.server.repo.StateRepositoryManager; +import org.ehcache.clustered.server.state.EhcacheStateContext; import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.EhcacheStateServiceProvider; import org.ehcache.clustered.server.state.InvalidationTracker; @@ -39,21 +41,23 @@ import org.terracotta.offheapresource.OffHeapResources; import org.terracotta.offheapstore.paging.PageSource; import org.terracotta.statistics.StatisticsManager; +import org.terracotta.statistics.ValueStatistic; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; -import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.function.Function; import static java.util.stream.Collectors.toMap; import static org.terracotta.offheapresource.OffHeapResourceIdentifier.identifier; +import static org.terracotta.statistics.StatisticsManager.tags; +import static org.terracotta.statistics.ValueStatistics.supply; +import static org.terracotta.statistics.StatisticType.COUNTER; +import static org.terracotta.statistics.StatisticType.GAUGE; public class EhcacheStateServiceImpl implements EhcacheStateService { @@ -65,23 +69,23 @@ public class EhcacheStateServiceImpl implements EhcacheStateService { private static final String PROPERTY_STORE_KEY = "storeName"; private static final String PROPERTY_POOL_KEY = "poolName"; - private static final Map> STAT_STORE_METHOD_REFERENCES = new HashMap<>(); - private static final Map> STAT_POOL_METHOD_REFERENCES = new HashMap<>(); + private static final Map>> STAT_STORE_METHOD_REFERENCES = new HashMap<>(11); + private static final Map>> STAT_POOL_METHOD_REFERENCES = new HashMap<>(1); static { - STAT_STORE_METHOD_REFERENCES.put("allocatedMemory", ServerStoreImpl::getAllocatedMemory); - STAT_STORE_METHOD_REFERENCES.put("dataAllocatedMemory", ServerStoreImpl::getDataAllocatedMemory); - STAT_STORE_METHOD_REFERENCES.put("occupiedMemory", ServerStoreImpl::getOccupiedMemory); - STAT_STORE_METHOD_REFERENCES.put("dataOccupiedMemory", ServerStoreImpl::getDataOccupiedMemory); - STAT_STORE_METHOD_REFERENCES.put("entries", ServerStoreImpl::getSize); - STAT_STORE_METHOD_REFERENCES.put("usedSlotCount", ServerStoreImpl::getUsedSlotCount); - STAT_STORE_METHOD_REFERENCES.put("dataVitalMemory", ServerStoreImpl::getDataVitalMemory); - STAT_STORE_METHOD_REFERENCES.put("vitalMemory", ServerStoreImpl::getVitalMemory); - STAT_STORE_METHOD_REFERENCES.put("removedSlotCount", ServerStoreImpl::getRemovedSlotCount); - STAT_STORE_METHOD_REFERENCES.put("dataSize", ServerStoreImpl::getDataSize); - STAT_STORE_METHOD_REFERENCES.put("tableCapacity", ServerStoreImpl::getTableCapacity); + STAT_STORE_METHOD_REFERENCES.put("allocatedMemory", store -> supply(GAUGE, store::getAllocatedMemory)); + STAT_STORE_METHOD_REFERENCES.put("dataAllocatedMemory", store -> supply(GAUGE, store::getDataAllocatedMemory)); + STAT_STORE_METHOD_REFERENCES.put("occupiedMemory", store -> supply(GAUGE, store::getOccupiedMemory)); + STAT_STORE_METHOD_REFERENCES.put("dataOccupiedMemory", store -> supply(GAUGE, store::getDataOccupiedMemory)); + STAT_STORE_METHOD_REFERENCES.put("entries", store -> supply(COUNTER, store::getSize)); + STAT_STORE_METHOD_REFERENCES.put("usedSlotCount", store -> supply(COUNTER, store::getUsedSlotCount)); + STAT_STORE_METHOD_REFERENCES.put("dataVitalMemory", store -> supply(GAUGE, store::getDataVitalMemory)); + STAT_STORE_METHOD_REFERENCES.put("vitalMemory", store -> supply(GAUGE, store::getVitalMemory)); + STAT_STORE_METHOD_REFERENCES.put("removedSlotCount", store -> supply(COUNTER, store::getRemovedSlotCount)); + STAT_STORE_METHOD_REFERENCES.put("dataSize", store -> supply(GAUGE, store::getDataSize)); + STAT_STORE_METHOD_REFERENCES.put("tableCapacity", store -> supply(GAUGE, store::getTableCapacity)); - STAT_POOL_METHOD_REFERENCES.put("allocatedSize", ResourcePageSource::getAllocatedSize); + STAT_POOL_METHOD_REFERENCES.put("allocatedSize", pool -> supply(GAUGE, pool::getAllocatedSize)); } private final OffHeapResources offHeapResources; @@ -207,20 +211,6 @@ private void checkConfigurationCompatibility(ServerSideConfiguration incomingCon + "Client: " + incomingConfig.getResourcePools().keySet() + " " + "Server: " + sharedResourcePools.keySet().toString()); } - - try { - for (Map.Entry pool : resolveResourcePools(incomingConfig).entrySet()) { - ServerSideConfiguration.Pool serverPool = this.sharedResourcePools.get(pool.getKey()).getPool(); - - if (!serverPool.equals(pool.getValue())) { - throw new InvalidServerSideConfigurationException("Pool '" + pool.getKey() + "' not equal. " - + "Client: " + pool.getValue() + " " - + "Server: " + serverPool); - } - } - } catch (ConfigurationException e) { - throw new InvalidServerSideConfigurationException(e.getMessage()); - } } private static Map resolveResourcePools(ServerSideConfiguration configuration) throws ConfigurationException { @@ -303,14 +293,13 @@ private ResourcePageSource createPageSource(String poolName, ServerSideConfigura } private void registerStoreStatistics(ServerStoreImpl store, String storeName) { - STAT_STORE_METHOD_REFERENCES.entrySet().stream().forEach((entry)-> - registerStatistic(store, storeName, entry.getKey(), STATISTICS_STORE_TAG, PROPERTY_STORE_KEY, () -> entry.getValue().apply(store) )); + STAT_STORE_METHOD_REFERENCES.forEach((key, value) -> + registerStatistic(store, storeName, key, STATISTICS_STORE_TAG, PROPERTY_STORE_KEY, value.apply(store))); } private void registerPoolStatistics(String poolName, ResourcePageSource pageSource) { - STAT_POOL_METHOD_REFERENCES.entrySet().stream().forEach((entry)-> - registerStatistic(pageSource, poolName, entry.getKey(), STATISTICS_POOL_TAG, PROPERTY_POOL_KEY, () -> entry.getValue().apply(pageSource)) - ); + STAT_POOL_METHOD_REFERENCES.forEach((key, value) -> + registerStatistic(pageSource, poolName, key, STATISTICS_POOL_TAG, PROPERTY_POOL_KEY, value.apply(pageSource))); } private void unRegisterStoreStatistics(ServerStoreImpl store) { @@ -329,13 +318,12 @@ private void unRegisterPoolStatistics(ResourcePageSource pageSource) { } } - private void registerStatistic(Object context, String name, String observerName, String tag, String propertyKey, Callable callable) { - Set tags = new HashSet<>(Arrays.asList(tag, "tier")); + private void registerStatistic(Object context, String name, String observerName, String tag, String propertyKey, ValueStatistic source) { Map properties = new HashMap<>(); properties.put("discriminator", tag); properties.put(propertyKey, name); - StatisticsManager.createPassThroughStatistic(context, observerName, tags, properties, callable); + StatisticsManager.createPassThroughStatistic(context, observerName, tags(tag, "tier"), properties, source); } private void releaseDedicatedPool(String name, PageSource pageSource) { @@ -407,7 +395,7 @@ public ServerStoreImpl createStore(String name, ServerStoreConfiguration serverS ServerStoreImpl serverStore; ResourcePageSource resourcePageSource = getPageSource(name, serverStoreConfiguration.getPoolAllocation()); try { - serverStore = new ServerStoreImpl(serverStoreConfiguration, resourcePageSource, mapper); + serverStore = new ServerStoreImpl(serverStoreConfiguration, resourcePageSource, mapper, serverStoreConfiguration.isWriteBehindConfigured()); } catch (RuntimeException rte) { releaseDedicatedPool(name, resourcePageSource); throw new ConfigurationException("Failed to create ServerStore.", rte); @@ -490,6 +478,11 @@ public void loadExisting(ServerSideConfiguration configuration) { } } + @Override + public EhcacheStateContext beginProcessing(EhcacheOperationMessage message, String name) { + return () -> {}; + } + public boolean isConfigured() { return configured; } diff --git a/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java new file mode 100644 index 0000000000..f96b1928ef --- /dev/null +++ b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java @@ -0,0 +1,212 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server; + +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.server.offheap.OffHeapChainMap; +import org.ehcache.clustered.server.offheap.OffHeapServerStore; +import org.ehcache.clustered.server.state.ResourcePageSource; +import org.terracotta.offheapstore.MapInternals; +import org.terracotta.offheapstore.exceptions.OversizeMappingException; +import org.terracotta.offheapstore.paging.PageSource; + +import java.nio.ByteBuffer; +import java.util.AbstractList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class ServerStoreImpl implements ServerSideServerStore, MapInternals { + + private final ServerStoreConfiguration storeConfiguration; + private final ResourcePageSource pageSource; + private final OffHeapServerStore store; + + public ServerStoreImpl(ServerStoreConfiguration configuration, ResourcePageSource source, KeySegmentMapper mapper, + List> recoveredMaps) { + this.storeConfiguration = configuration; + this.pageSource = source; + this.store = new OffHeapServerStore(recoveredMaps, mapper); + } + + public ServerStoreImpl(ServerStoreConfiguration storeConfiguration, ResourcePageSource pageSource, KeySegmentMapper mapper, boolean writeBehindConfigured) { + this.storeConfiguration = storeConfiguration; + this.pageSource = pageSource; + this.store = new OffHeapServerStore(pageSource, mapper, writeBehindConfigured); + } + + @Override + public void setEventListener(ServerStoreEventListener listener) { + store.setEventListener(listener); + } + + @Override + public void enableEvents(boolean enable) { + store.enableEvents(enable); + } + + /** + * Gets the {@link PageSource} providing storage for this {@code ServerStore}. + * + * @return the {@code PageSource} used by this {@code ServerStore} + */ + public PageSource getPageSource() { + return pageSource; + } + + public ServerStoreConfiguration getStoreConfiguration() { + return storeConfiguration; + } + + @Override + public Chain get(long key) { + return store.get(key); + } + + @Override + public void append(long key, ByteBuffer payLoad) { + checkPayLoadSize(payLoad); + store.append(key, payLoad); + } + + @Override + public Chain getAndAppend(long key, ByteBuffer payLoad) { + checkPayLoadSize(payLoad); + return store.getAndAppend(key, payLoad); + } + + @Override + public void replaceAtHead(long key, Chain expect, Chain update) { + store.replaceAtHead(key, expect, update); + } + + public void put(long key, Chain chain) { + store.put(key, chain); + } + + @Override + public void remove(long key) { + store.remove(key); + } + + @Override + public void clear() { + store.clear(); + } + + public void close() { + store.close(); + } + + @Override + public List> getSegmentKeySets() { + + return new AbstractList>() { + @Override + public Set get(int index) { + return store.getSegments().get(index).keySet(); + } + @Override + public int size() { + return store.getSegments().size(); + } + }; + } + + // stats + + + @Override + public long getSize() { + return store.getSize(); + } + + @Override + public long getTableCapacity() { + return store.getTableCapacity(); + } + + @Override + public long getUsedSlotCount() { + return store.getUsedSlotCount(); + } + + @Override + public long getRemovedSlotCount() { + return store.getRemovedSlotCount(); + } + + @Override + public long getAllocatedMemory() { + return store.getAllocatedMemory(); + } + + @Override + public long getOccupiedMemory() { + return store.getOccupiedMemory(); + } + + @Override + public long getVitalMemory() { + return store.getVitalMemory(); + } + + @Override + public long getDataAllocatedMemory() { + return store.getDataAllocatedMemory(); + } + + @Override + public long getDataOccupiedMemory() { + return store.getDataOccupiedMemory(); + } + + @Override + public long getDataVitalMemory() { + return store.getDataVitalMemory(); + } + + @Override + public long getDataSize() { + return store.getDataSize(); + } + + @Override + public int getReprobeLength() { + //TODO + //MapInternals Interface may need to change to implement this function correctly. + //Currently MapInternals Interface contains function: int getReprobeLength(); + //however OffHeapServerStore.reprobeLength() returns a long + //Thus there could be data loss + + throw new UnsupportedOperationException("Not supported yet."); + } + + private void checkPayLoadSize(ByteBuffer payLoad) { + if (payLoad.remaining() > pageSource.getPool().getSize()) { + throw new OversizeMappingException("Payload (" + payLoad.remaining() + + ") bigger than pool size (" + pageSource.getPool().getSize() + ")"); + } + } + + @Override + public Iterator> iterator() { + return store.iterator(); + } +} diff --git a/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/ChainStorageEngine.java b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/ChainStorageEngine.java new file mode 100644 index 0000000000..42d0d041bd --- /dev/null +++ b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/ChainStorageEngine.java @@ -0,0 +1,26 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.offheap; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.terracotta.offheapstore.storage.StorageEngine; + +import java.nio.ByteBuffer; + +public interface ChainStorageEngine extends StorageEngine { + InternalChain newChain(ByteBuffer element); + InternalChain newChain(Chain chain); +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/LongPortability.java b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/LongPortability.java similarity index 89% rename from clustered/server/src/main/java/org/ehcache/clustered/server/offheap/LongPortability.java rename to clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/LongPortability.java index 54a172414f..f2db3299bb 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/LongPortability.java +++ b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/LongPortability.java @@ -18,9 +18,9 @@ import java.nio.ByteBuffer; import org.terracotta.offheapstore.storage.portability.Portability; -class LongPortability implements Portability { +public class LongPortability implements Portability { - static final Portability INSTANCE = new LongPortability(); + public static final Portability INSTANCE = new LongPortability(); private LongPortability() {} diff --git a/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java new file mode 100644 index 0000000000..4bed35acdb --- /dev/null +++ b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java @@ -0,0 +1,403 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.offheap; + +import java.nio.ByteBuffer; +import java.nio.IntBuffer; +import java.util.AbstractMap; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.locks.Lock; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.terracotta.offheapstore.MapInternals; + +import org.terracotta.offheapstore.eviction.EvictionListener; +import org.terracotta.offheapstore.eviction.EvictionListeningReadWriteLockedOffHeapClockCache; +import org.terracotta.offheapstore.exceptions.OversizeMappingException; +import org.terracotta.offheapstore.paging.PageSource; +import org.terracotta.offheapstore.storage.portability.Portability; +import org.terracotta.offheapstore.util.Factory; + +import static org.ehcache.clustered.common.internal.util.ChainBuilder.chainFromList; + +public class OffHeapChainMap implements MapInternals, Iterable> { + + interface ChainMapEvictionListener { + void onEviction(K key, InternalChain evictedChain); + } + + protected final HeadMap heads; + private final ChainStorageEngine chainStorage; + private volatile ChainMapEvictionListener evictionListener; + + private OffHeapChainMap(PageSource source, ChainStorageEngine storageEngine) { + this.chainStorage = storageEngine; + EvictionListener listener = callable -> { + try { + Map.Entry entry = callable.call(); + try { + if (evictionListener != null) { + evictionListener.onEviction(entry.getKey(), entry.getValue()); + } + } finally { + entry.getValue().close(); + } + } catch (Exception e) { + throw new AssertionError(e); + } + }; + + this.heads = new HeadMap<>(listener, source, chainStorage); + } + + public OffHeapChainMap(PageSource source, Factory> storageEngineFactory) { + this(source, storageEngineFactory.newInstance()); + } + + public OffHeapChainMap(PageSource source, Portability keyPortability, int minPageSize, int maxPageSize, boolean shareByThieving) { + this(source, new OffHeapChainStorageEngine<>(source, keyPortability, minPageSize, maxPageSize, shareByThieving, shareByThieving)); + } + + //For tests + OffHeapChainMap(HeadMap heads, OffHeapChainStorageEngine chainStorage) { + this.chainStorage = chainStorage; + this.heads = heads; + } + + void setEvictionListener(ChainMapEvictionListener listener) { + evictionListener = listener; + } + + public ChainStorageEngine getStorageEngine() { + return chainStorage; + } + + public Chain get(K key) { + final Lock lock = heads.readLock(); + lock.lock(); + try { + InternalChain chain = heads.get(key); + if (chain == null) { + return EMPTY_CHAIN; + } else { + try { + return chain.detach(); + } finally { + chain.close(); + } + } + } finally { + lock.unlock(); + } + } + + public Chain getAndAppend(K key, ByteBuffer element) { + final Lock lock = heads.writeLock(); + lock.lock(); + try { + while (true) { + InternalChain chain = heads.get(key); + if (chain == null) { + heads.put(key, chainStorage.newChain(element)); + return EMPTY_CHAIN; + } else { + try { + Chain current = chain.detach(); + if (chain.append(element)) { + return current; + } else { + evict(); + } + } finally { + chain.close(); + } + } + } + } finally { + lock.unlock(); + } + } + + public void append(K key, ByteBuffer element) { + final Lock lock = heads.writeLock(); + lock.lock(); + try { + while (true) { + InternalChain chain = heads.get(key); + if (chain == null) { + heads.put(key, chainStorage.newChain(element)); + return; + } else { + try { + if (chain.append(element)) { + return; + } else { + evict(); + } + } finally { + chain.close(); + } + } + } + } finally { + lock.unlock(); + } + + } + + public void replaceAtHead(K key, Chain expected, Chain replacement) { + final Lock lock = heads.writeLock(); + lock.lock(); + try { + while (true) { + InternalChain chain = heads.get(key); + if (chain == null) { + if (expected.isEmpty()) { + throw new IllegalArgumentException("Empty expected sequence"); + } else { + return; + } + } else { + try { + if (chain.replace(expected, replacement)) { + return; + } else { + evict(); + } + } finally { + chain.close(); + } + } + } + } finally { + lock.unlock(); + } + } + + public void put(K key, Chain chain) { + final Lock lock = heads.writeLock(); + lock.lock(); + try { + InternalChain current = heads.get(key); + if (current != null) { + try { + replaceAtHead(key, current.detach(), chain); + } finally { + current.close(); + } + } else { + if (!chain.isEmpty()) { + heads.put(key, chainStorage.newChain(chain)); + } + } + } finally { + lock.unlock(); + } + } + + void remove(K key) { + Lock lock = heads.writeLock(); + lock.lock(); + try { + heads.removeNoReturn(key); + } finally { + lock.unlock(); + } + } + + public void clear() { + heads.writeLock().lock(); + try { + this.heads.clear(); + } finally { + heads.writeLock().unlock(); + } + } + + public Set keySet() { + heads.writeLock().lock(); + try { + return heads.keySet(); + } finally { + heads.writeLock().unlock(); + } + } + + @Override + public Iterator> iterator() { + Iterator> headsIterator = heads.detachedEntryIterator(); + + return new Iterator>() { + @Override + public boolean hasNext() { + return headsIterator.hasNext(); + } + + @Override + public Map.Entry next() { + final Lock lock = heads.readLock(); + lock.lock(); + try { + Map.Entry entry = headsIterator.next(); + InternalChain chain = entry.getValue(); + if (chain == null) { + return new AbstractMap.SimpleImmutableEntry<>(entry.getKey(), EMPTY_CHAIN); + } else { + try { + return new AbstractMap.SimpleImmutableEntry<>(entry.getKey(), chain.detach()); + } finally { + chain.close(); + } + } + } finally { + lock.unlock(); + } + } + }; + } + + private void evict() { + int evictionIndex = heads.getEvictionIndex(); + if (evictionIndex < 0) { + throw new OversizeMappingException("Storage Engine and Eviction Failed - Everything Pinned (" + getSize() + " mappings) \n" + "Storage Engine : " + chainStorage); + } else { + heads.evict(evictionIndex, false); + } + } + + private static final Chain EMPTY_CHAIN = chainFromList(Collections.emptyList()); + + @Override + public long getSize() { + return heads.getSize(); + } + + @Override + public long getTableCapacity() { + return heads.getTableCapacity(); + } + + @Override + public long getUsedSlotCount() { + return heads.getUsedSlotCount(); + } + + @Override + public long getRemovedSlotCount() { + return heads.getRemovedSlotCount(); + } + + @Override + public int getReprobeLength() { + return heads.getReprobeLength(); + } + + @Override + public long getAllocatedMemory() { + return heads.getAllocatedMemory(); + } + + @Override + public long getOccupiedMemory() { + return heads.getOccupiedMemory(); + } + + @Override + public long getVitalMemory() { + return heads.getVitalMemory(); + } + + @Override + public long getDataAllocatedMemory() { + return heads.getDataAllocatedMemory(); + } + + @Override + public long getDataOccupiedMemory() { + return heads.getDataOccupiedMemory(); + } + + @Override + public long getDataVitalMemory() { + return heads.getDataVitalMemory(); + } + + @Override + public long getDataSize() { + return heads.getDataSize(); + } + + public boolean shrink() { + return heads.shrink(); + } + + public Lock writeLock() { + return heads.writeLock(); + } + + protected void storageEngineFailure(Object failure) { + } + + static class HeadMap extends EvictionListeningReadWriteLockedOffHeapClockCache { + + public HeadMap(EvictionListener listener, PageSource source, ChainStorageEngine chainStorage) { + super(listener, source, chainStorage); + } + + public Iterator> detachedEntryIterator() { + Lock lock = readLock(); + lock.lock(); + try { + return new LockedEntryIterator() { + @Override + protected Entry create(IntBuffer entry) { + Entry attachedEntry = super.create(entry); + + try (InternalChain chain = attachedEntry.getValue()) { + Chain detachedChain = chain.detach(); + return new SimpleImmutableEntry<>(attachedEntry.getKey(), new InternalChain() { + @Override + public Chain detach() { + return detachedChain; + } + + @Override + public boolean append(ByteBuffer element) { + throw new UnsupportedOperationException(); + } + + @Override + public boolean replace(Chain expected, Chain replacement) { + throw new UnsupportedOperationException(); + } + + @Override + public void close() { + // + } + }); + } + } + }; + } finally { + lock.unlock(); + } + } + } +} diff --git a/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java new file mode 100644 index 0000000000..a41e80afdf --- /dev/null +++ b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java @@ -0,0 +1,900 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.offheap; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.locks.Lock; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.common.internal.store.SequencedElement; +import org.terracotta.offheapstore.paging.OffHeapStorageArea; +import org.terracotta.offheapstore.paging.PageSource; +import org.terracotta.offheapstore.storage.BinaryStorageEngine; +import org.terracotta.offheapstore.storage.PointerSize; +import org.terracotta.offheapstore.storage.StorageEngine; +import org.terracotta.offheapstore.storage.portability.Portability; +import org.terracotta.offheapstore.storage.portability.WriteContext; +import org.terracotta.offheapstore.util.Factory; + +import static java.util.Collections.emptyList; +import static org.ehcache.clustered.common.internal.util.ChainBuilder.chainFromList; + +public class OffHeapChainStorageEngine implements ChainStorageEngine, BinaryStorageEngine { + + /* + * ELEMENT + * 0 7 + * 0 | sequence number | + * 8 | length | next | + * 16 | next | [[----- + * --- contents ---]] (length bytes) + * + * `next` is the address of the next element in the chain. + * `next` on the last element in the chain points to the chain head. + * (byte 0 in the heads chain structure, not byte 0 in the element) + */ + private static final int ELEMENT_HEADER_SEQUENCE_OFFSET = 0; + private static final int ELEMENT_HEADER_LENGTH_OFFSET = 8; + private static final int ELEMENT_HEADER_NEXT_OFFSET = 12; + private static final int ELEMENT_HEADER_SIZE = 20; + + /* + * CHAIN + * 0 7 + * 0 |k-length| k-hash | + * 8 | tail | + * [[--- ELEMENT ---]] + * [[ key-contents ]] (k-length bytes) + * + * `tail` is the address of the last element in the chain + */ + private static final int CHAIN_HEADER_KEY_LENGTH_OFFSET = 0; + private static final int CHAIN_HEADER_KEY_HASH_OFFSET = 4; + private static final int CHAIN_HEADER_TAIL_OFFSET = 8; + private static final int CHAIN_HEADER_SIZE = 16; + + private static final int DETACHED_CONTIGUOUS_CHAIN_ADDRESS_OFFSET = 0; + private static final int DETACHED_CONTIGUOUS_CHAIN_HEADER_SIZE = 8; + + private final OffHeapStorageArea storage; + private final Portability keyPortability; + private final Set activeChains = Collections.newSetFromMap(new ConcurrentHashMap()); + private final int extendedChainHeaderSize; + private final ByteBuffer emptyExtendedChainHeader; + private final int totalChainHeaderSize; + + protected StorageEngine.Owner owner; + private long nextSequenceNumber = 0; + private volatile boolean hasContiguousChains = false; + + public static Factory> + createFactory(final PageSource source, + final Portability keyPortability, + final int minPageSize, final int maxPageSize, + final boolean thief, final boolean victim) { + return (Factory>)() -> new OffHeapChainStorageEngine<>(source, keyPortability, + minPageSize, maxPageSize, thief, victim); + } + + OffHeapChainStorageEngine(PageSource source, Portability keyPortability, int minPageSize, int maxPageSize, boolean thief, boolean victim) { + this(source, keyPortability, minPageSize, maxPageSize, thief, victim, ByteBuffer.allocate(0)); + } + + protected OffHeapChainStorageEngine(PageSource source, Portability keyPortability, int minPageSize, int maxPageSize, boolean thief, boolean victim, + final ByteBuffer emptyExtendedChainHeader) { + this.storage = new OffHeapStorageArea(PointerSize.LONG, new StorageOwner(), source, minPageSize, maxPageSize, thief, victim); + this.keyPortability = keyPortability; + this.extendedChainHeaderSize = emptyExtendedChainHeader.remaining(); + this.emptyExtendedChainHeader = emptyExtendedChainHeader; + this.totalChainHeaderSize = CHAIN_HEADER_SIZE + this.extendedChainHeaderSize; + } + + //For tests + Set getActiveChains() { + return this.activeChains; + } + + @Override + public InternalChain newChain(ByteBuffer element) { + return new GenesisLink(element); + } + + @Override + public InternalChain newChain(Chain chain) { + return new GenesisLinks(chain); + } + + @Override + public Long writeMapping(K key, InternalChain value, int hash, int metadata) { + if (value instanceof GenesisChain) { + return createAttachedChain(key, hash, (GenesisChain) value); + } else { + throw new AssertionError("only detached internal chains should be initially written"); + } + } + + @Override + public void attachedMapping(long encoding, int hash, int metadata) { + chainAttached(encoding); + } + + @Override + public void freeMapping(long encoding, int hash, boolean removal) { + try (AttachedInternalChain chain = new AttachedInternalChain(encoding)) { + chain.free(); + } + } + + @Override + public InternalChain readValue(long encoding) { + return new AttachedInternalChain(encoding); + } + + @Override + public boolean equalsValue(Object value, long encoding) { + try (AttachedInternalChain chain = new AttachedInternalChain(encoding)) { + return chain.equals(value); + } + } + + @SuppressWarnings("unchecked") + @Override + public K readKey(long encoding, int hashCode) { + return (K) keyPortability.decode(readKeyBuffer(encoding)); + } + + @Override + public boolean equalsKey(Object key, long encoding) { + return keyPortability.equals(key, readKeyBuffer(encoding)); + } + + private ByteBuffer readKeyBuffer(long encoding) { + int keyLength = readKeySize(encoding); + int elemLength = readElementLength(encoding + this.totalChainHeaderSize); + return storage.readBuffer(encoding + this.totalChainHeaderSize + ELEMENT_HEADER_SIZE + elemLength, keyLength); + } + + @Override + public int readKeyHash(long encoding) { + return storage.readInt(encoding + CHAIN_HEADER_KEY_HASH_OFFSET); + } + + private int readElementLength(long element) { + // The most significant bit (MSB) of element length is used to signify whether an element is explicitly allocated + // (msb clear) or part of a contiguous chain (msb set). Clear the msb when returning length. + return Integer.MAX_VALUE & storage.readInt(element + ELEMENT_HEADER_LENGTH_OFFSET); + } + + @Override + public ByteBuffer readBinaryKey(long encoding) { + return readKeyBuffer(encoding); + } + + @Override + public ByteBuffer readBinaryValue(long chain) { + // first get total element size and allocate buffer + long element = chain + this.totalChainHeaderSize; + int totalLength = DETACHED_CONTIGUOUS_CHAIN_HEADER_SIZE; + do { + totalLength += ELEMENT_HEADER_SIZE + readElementLength(element); + element = storage.readLong(element + ELEMENT_HEADER_NEXT_OFFSET); + } while (element != chain); + + final ByteBuffer detachedContiguousBuffer = ByteBuffer.allocate(totalLength); + // one way for layers above to extract encoding is to put the encoding of the chain address in the value + detachedContiguousBuffer.putLong(chain); + + // now add the elements to the buffer + element = chain + this.totalChainHeaderSize; + do { + final int startPosition = detachedContiguousBuffer.position(); + detachedContiguousBuffer.put(storage.readBuffer(element, ELEMENT_HEADER_SIZE + readElementLength(element))); + detachedContiguousBuffer.mark(); + detachedContiguousBuffer.putLong(startPosition + ELEMENT_HEADER_NEXT_OFFSET, -1L); + detachedContiguousBuffer.reset(); + element = storage.readLong(element + ELEMENT_HEADER_NEXT_OFFSET); + } while (element != chain); + return (ByteBuffer)detachedContiguousBuffer.flip(); + } + + @Override + public boolean equalsBinaryKey(ByteBuffer binaryKey, long chain) { + return binaryKey.equals(readKeyBuffer(chain)); + } + + @Override + public Long writeBinaryMapping(ByteBuffer binaryKey, ByteBuffer binaryValue, int hash, int metadata) { + final int totalSize = binaryKey.remaining() + + (binaryValue.remaining() - DETACHED_CONTIGUOUS_CHAIN_HEADER_SIZE) + + this.totalChainHeaderSize; + long chain = storage.allocate(totalSize); + if (chain < 0) { + return null; + } + if (binaryValue.remaining() < DETACHED_CONTIGUOUS_CHAIN_HEADER_SIZE + ELEMENT_HEADER_SIZE) { + // a chain must have at least one element. Something is wrong + throw new AssertionError("Invalid chain data detected. Empty links"); + } + binaryValue.mark(); + binaryKey.mark(); + try { + // extract first element + binaryValue.position(DETACHED_CONTIGUOUS_CHAIN_HEADER_SIZE); + final ByteBuffer firstElementWithHeader = binaryValue.slice(); + final int firstElementWithHeaderSize = ELEMENT_HEADER_SIZE + + (Integer.MAX_VALUE & firstElementWithHeader.getInt(ELEMENT_HEADER_LENGTH_OFFSET)); + firstElementWithHeader.limit(firstElementWithHeaderSize); + binaryValue.position(binaryValue.position() + firstElementWithHeaderSize); + + // mark relevant locations + final int keySize = binaryKey.remaining(); + final long firstElementLocation = chain + this.totalChainHeaderSize; + final long keyLocation = firstElementLocation + firstElementWithHeaderSize; + final long restOfElementsLocation = keyLocation + keySize; + + // build element length list + final ByteBuffer restOfElementsBuffer = binaryValue.slice(); + final List restOfElementLengthsWithHeader = new ArrayList<>(); + while (restOfElementsBuffer.hasRemaining()) { + final int skipLength = ELEMENT_HEADER_SIZE + (Integer.MAX_VALUE & restOfElementsBuffer.getInt( + restOfElementsBuffer.position() + ELEMENT_HEADER_LENGTH_OFFSET)); + restOfElementLengthsWithHeader.add(skipLength); + restOfElementsBuffer.position(restOfElementsBuffer.position() + skipLength); + } + restOfElementsBuffer.rewind(); + + // now write all the data + storage.writeInt(chain + CHAIN_HEADER_KEY_HASH_OFFSET, hash); + storage.writeInt(chain + CHAIN_HEADER_KEY_LENGTH_OFFSET, Integer.MIN_VALUE | keySize); + storage.writeBuffer(keyLocation, binaryKey); + storage.writeBuffer(firstElementLocation, firstElementWithHeader); + storage.writeBuffer(chain + CHAIN_HEADER_SIZE, emptyExtendedChainHeader.duplicate()); + if (restOfElementsBuffer.hasRemaining()) { + storage.writeBuffer(restOfElementsLocation, restOfElementsBuffer); + } + + // now adjust offsets + if (restOfElementLengthsWithHeader.size() <= 0) { + // we have only one element + storage.writeLong(chain + CHAIN_HEADER_TAIL_OFFSET, firstElementLocation); + storage.writeLong(firstElementLocation + ELEMENT_HEADER_NEXT_OFFSET, chain); + } else { + // recovering the buffer into a contiguous chain..denote this.. + this.hasContiguousChains = true; + storage.writeLong(firstElementLocation + ELEMENT_HEADER_NEXT_OFFSET, restOfElementsLocation); + long currentLocation = restOfElementsLocation; + int i = 0; + for (; i < restOfElementLengthsWithHeader.size() - 1; i++) { + final int elemLength = restOfElementLengthsWithHeader.get(i) - ELEMENT_HEADER_SIZE; + final int adjustedLength = Integer.MIN_VALUE | elemLength; + long nextLocation = currentLocation + elemLength + ELEMENT_HEADER_SIZE; + storage.writeLong(currentLocation + ELEMENT_HEADER_NEXT_OFFSET, nextLocation); + // denote that this is not an allocated chunk + storage.writeInt(currentLocation + ELEMENT_HEADER_LENGTH_OFFSET, adjustedLength); + currentLocation = nextLocation; + } + final int adjustedLength = Integer.MIN_VALUE | (restOfElementLengthsWithHeader.get(i) - ELEMENT_HEADER_SIZE); + storage.writeLong(currentLocation + ELEMENT_HEADER_NEXT_OFFSET, chain); + storage.writeInt(currentLocation + ELEMENT_HEADER_LENGTH_OFFSET, adjustedLength); + storage.writeLong(chain + CHAIN_HEADER_TAIL_OFFSET, currentLocation); + } + return chain; + } finally { + binaryKey.reset(); + binaryValue.reset(); + } + } + + public static long extractChainAddressFromValue(ByteBuffer valueBuffer) { + return valueBuffer.getLong(DETACHED_CONTIGUOUS_CHAIN_ADDRESS_OFFSET); + } + + @Override + public Long writeBinaryMapping(ByteBuffer[] byteBuffers, ByteBuffer[] byteBuffers1, int i, int i1) { + throw new AssertionError("Operation Not supported"); + } + + private int readKeySize(long encoding) { + return Integer.MAX_VALUE & storage.readInt(encoding + CHAIN_HEADER_KEY_LENGTH_OFFSET); + } + + @Override + public void clear() { + storage.clear(); + } + + @Override + public long getAllocatedMemory() { + return storage.getAllocatedMemory(); + } + + @Override + public long getOccupiedMemory() { + return storage.getOccupiedMemory(); + } + + @Override + public long getVitalMemory() { + return getOccupiedMemory(); + } + + @Override + public long getDataSize() { + return storage.getAllocatedMemory(); + } + + @Override + public void invalidateCache() { + //no-op - for now + } + + @Override + public void bind(StorageEngine.Owner owner) { + this.owner = owner; + } + + @Override + public void destroy() { + storage.destroy(); + } + + @Override + public boolean shrink() { + return storage.shrink(); + } + + protected ByteBuffer getExtensionHeader(long chainAddress) { + checkExtensionHeaderExists(); + return storage.readBuffer(toExtensionAddress(chainAddress), extendedChainHeaderSize); + } + + protected WriteContext getExtensionWriteContext(long chainAddress) { + checkExtensionHeaderExists(); + return new WriteContext() { + + @Override + public void setLong(int offset, long value) { + if (offset < 0 || offset >= extendedChainHeaderSize) { + throw new IllegalArgumentException("Offset not within bounds 0 >= " + offset + " < " + extendedChainHeaderSize); + } else { + storage.writeLong(toExtensionAddress(chainAddress) + offset, value); + } + } + + @Override + public void flush() { + //no-op + } + }; + } + + protected void chainAttached(long chainAddress) { + } + + protected void chainFreed(long chainAddress) { + } + + protected void chainModified(long chainAddress) { + } + + protected void chainMoved(long fromChainAddress, long toChainAddress) { + } + + private void checkExtensionHeaderExists() { + if (extendedChainHeaderSize <= 0) { + throw new AssertionError("No extended header support for this storage engine"); + } + } + + private long toExtensionAddress(long chainAddress) { + return chainAddress + CHAIN_HEADER_SIZE; + } + + /** + * Represents the initial form of a chain before the storage engine writes the chain mapping + * to the underlying map against the key. + */ + private static abstract class GenesisChain implements InternalChain { + @Override + public Chain detach() { + throw new AssertionError("Chain not in storage yet. Cannot be detached"); + } + + @Override + public boolean append(ByteBuffer element) { + throw new AssertionError("Chain not in storage yet. Cannot be appended"); + } + + @Override + public boolean replace(Chain expected, Chain replacement) { + throw new AssertionError("Chain not in storage yet. Cannot be mutated"); + } + + @Override + public void close() { + //no-op + } + + protected abstract Iterator iterator(); + } + + /** + * Represents a simple {@link GenesisChain} that contains a single link. + */ + private static class GenesisLink extends GenesisChain { + private final Element element; + + public GenesisLink(ByteBuffer buffer) { + element = buffer::asReadOnlyBuffer; + } + + @Override + protected Iterator iterator() { + return Collections.singleton(element).iterator(); + } + } + + /** + * Represents a more complex {@link GenesisChain} that contains multiple links represented itself + * as a {@link Chain}. + */ + private static class GenesisLinks extends GenesisChain { + private final Chain chain; + + public GenesisLinks(Chain chain) { + this.chain = chain; + } + + @Override + protected Iterator iterator() { + return chain.iterator(); + } + } + + private final class AttachedInternalChain implements InternalChain { + + /** + * Location of the chain structure, not of the first element. + */ + private long chain; + /** + * track if this chain is modified so that we can signal on close + */ + private boolean chainModified = false; + + AttachedInternalChain(long address) { + this.chain = address; + OffHeapChainStorageEngine.this.activeChains.add(this); + } + + @Override + public Chain detach() { + List buffers = new ArrayList<>(); + + long element = chain + OffHeapChainStorageEngine.this.totalChainHeaderSize; + do { + buffers.add(element(readElementBuffer(element), readElementSequenceNumber(element))); + element = storage.readLong(element + ELEMENT_HEADER_NEXT_OFFSET); + } while (element != chain); + + return chainFromList(buffers); + } + + @Override + public boolean append(ByteBuffer element) { + long newTail = createElement(element); + if (newTail < 0) { + return false; + } else { + this.chainModified = true; + long oldTail = storage.readLong(chain + CHAIN_HEADER_TAIL_OFFSET); + storage.writeLong(newTail + ELEMENT_HEADER_NEXT_OFFSET, chain); + storage.writeLong(oldTail + ELEMENT_HEADER_NEXT_OFFSET, newTail); + storage.writeLong(chain + CHAIN_HEADER_TAIL_OFFSET, newTail); + return true; + } + } + + @Override + public boolean replace(Chain expected, Chain replacement) { + if (expected.isEmpty()) { + throw new IllegalArgumentException("Empty expected sequence"); + } else if (replacement.isEmpty()) { + return removeHeader(expected); + } else { + return replaceHeader(expected, replacement); + } + } + + + /** + * @return false if storage can't be allocated for new header when whole chain is not removed, true otherwise + */ + public boolean removeHeader(Chain expected) { + long suffixHead = chain + OffHeapChainStorageEngine.this.totalChainHeaderSize; + + Iterator expectedIt = expected.iterator(); + do { + if (!compare(expectedIt.next(), suffixHead)) { + return true; + } + suffixHead = storage.readLong(suffixHead + ELEMENT_HEADER_NEXT_OFFSET); + } while (expectedIt.hasNext() && suffixHead != chain); + + if (expectedIt.hasNext()) { + return true; + } else if (suffixHead == chain) { + //whole chain removed + int slot = owner.getSlotForHashAndEncoding(readKeyHash(chain), chain, ~0); + if (!owner.evict(slot, true)) { + throw new AssertionError("Unexpected failure to evict slot " + slot); + } + return true; + } else { + int hash = readKeyHash(chain); + int elemSize = readElementLength(suffixHead); + ByteBuffer elemBuffer = storage.readBuffer(suffixHead + ELEMENT_HEADER_SIZE, elemSize); + Long newChainAddress = createAttachedChain(readKeyBuffer(chain), hash, elemBuffer); + if (newChainAddress == null) { + return false; + } else { + try (AttachedInternalChain newChain = new AttachedInternalChain(newChainAddress)) { + newChain.chainModified = true; + //copy remaining elements from old chain (by reference) + long next = storage.readLong(suffixHead + ELEMENT_HEADER_NEXT_OFFSET); + long tail = storage.readLong(chain + CHAIN_HEADER_TAIL_OFFSET); + if (next != chain) { + newChain.append(next, tail); + } + + if (owner.updateEncoding(hash, chain, newChainAddress, ~0)) { + // NOTE: we leave the original suffix head attached to the prefix so that it gets freed along with the + // prefix since we took a copy of it for the new chain. + storage.writeLong(suffixHead + ELEMENT_HEADER_NEXT_OFFSET, chain); + storage.writeLong(chain + CHAIN_HEADER_TAIL_OFFSET, suffixHead); + chainMoved(chain, newChainAddress); + free(); + return true; + } else { + newChain.free(); + throw new AssertionError("Encoding update failure - impossible!"); + } + } + } + } + } + + /** + * @return false if storage can't be allocated for new header when head of the current chain matches expected + * chain, true otherwise + */ + public boolean replaceHeader(Chain expected, Chain replacement) { + long suffixHead = chain + OffHeapChainStorageEngine.this.totalChainHeaderSize; + long prefixTail; + + Iterator expectedIt = expected.iterator(); + do { + if (!compare(expectedIt.next(), suffixHead)) { + return true; + } + prefixTail = suffixHead; + suffixHead = storage.readLong(suffixHead + ELEMENT_HEADER_NEXT_OFFSET); + } while (expectedIt.hasNext() && suffixHead != chain); + + if (expectedIt.hasNext()) { + return true; + } + + int hash = readKeyHash(chain); + Long newChainAddress = createAttachedChain(readKeyBuffer(chain), hash, replacement.iterator()); + if (newChainAddress == null) { + return false; + } else { + try (AttachedInternalChain newChain = new AttachedInternalChain(newChainAddress)) { + newChain.chainModified = true; + //copy remaining elements from old chain (by reference) + if (suffixHead != chain) { + newChain.append(suffixHead, storage.readLong(chain + CHAIN_HEADER_TAIL_OFFSET)); + } + + if (owner.updateEncoding(hash, chain, newChainAddress, ~0)) { + storage.writeLong(prefixTail + ELEMENT_HEADER_NEXT_OFFSET, chain); + chainMoved(chain, newChainAddress); + free(); + return true; + } else { + newChain.free(); + throw new AssertionError("Encoding update failure - impossible!"); + } + } + } + } + + private void free() { + // signal dependent engines to act on this free before freeing the storage + chainFreed(chain); + chainModified = false; + + long element = storage.readLong(chain + OffHeapChainStorageEngine.this.totalChainHeaderSize + ELEMENT_HEADER_NEXT_OFFSET); + while (element != chain) { + long next = storage.readLong(element + ELEMENT_HEADER_NEXT_OFFSET); + if (storage.readInt(element + ELEMENT_HEADER_LENGTH_OFFSET) >= 0) { + // do not free blocks contiguous to chain + storage.free(element); + } + element = next; + } + + storage.free(chain); + } + + private long createElement(ByteBuffer element) { + long newElement = storage.allocate(element.remaining() + ELEMENT_HEADER_SIZE); + if (newElement < 0) { + return newElement; + } else { + writeElement(newElement, element); + return newElement; + } + } + + private boolean compare(Element element, long address) { + if (element instanceof SequencedElement) { + return readElementSequenceNumber(address) == ((SequencedElement) element).getSequenceNumber(); + } else { + return readElementBuffer(address).equals(element.getPayload()); + } + } + + private void append(long head, long tail) { + long oldTail = storage.readLong(chain + CHAIN_HEADER_TAIL_OFFSET); + + storage.writeLong(oldTail + ELEMENT_HEADER_NEXT_OFFSET, head); + storage.writeLong(tail + ELEMENT_HEADER_NEXT_OFFSET, chain); + storage.writeLong(chain + CHAIN_HEADER_TAIL_OFFSET, tail); + + if (OffHeapChainStorageEngine.this.hasContiguousChains) { + // we will have to move out any contiguous elements in the old chain as it is going to be freed soon + long current = head; + long prev = oldTail; + while (current != chain) { + final long next = storage.readLong(current + ELEMENT_HEADER_NEXT_OFFSET); + final int elemLength = storage.readInt(current + ELEMENT_HEADER_LENGTH_OFFSET); + if (elemLength < 0) { + final int elemLengthWithHeader = (Integer.MAX_VALUE & elemLength) + ELEMENT_HEADER_SIZE; + final long element = storage.allocate(elemLengthWithHeader); + storage.writeBuffer(element, storage.readBuffer(current, elemLengthWithHeader)); + storage.writeInt(element + ELEMENT_HEADER_LENGTH_OFFSET, elemLengthWithHeader - ELEMENT_HEADER_SIZE); + storage.writeLong(prev + ELEMENT_HEADER_NEXT_OFFSET, element); + prev = element; + } else { + prev = current; + } + current = next; + } + storage.writeLong(chain + CHAIN_HEADER_TAIL_OFFSET, prev); + } + } + + private Element element(ByteBuffer attachedBuffer, final long sequence) { + final ByteBuffer detachedBuffer = (ByteBuffer) ByteBuffer.allocate(attachedBuffer.remaining()).put(attachedBuffer).flip(); + + return new SequencedElement() { + + @Override + public ByteBuffer getPayload() { + return detachedBuffer.asReadOnlyBuffer(); + } + + @Override + public long getSequenceNumber() { + return sequence; + } + }; + } + + private ByteBuffer readElementBuffer(long address) { + int elemLength = readElementLength(address); + return storage.readBuffer(address + ELEMENT_HEADER_SIZE, elemLength); + } + + private long readElementSequenceNumber(long address) { + return storage.readLong(address + ELEMENT_HEADER_SEQUENCE_OFFSET); + } + + public void moved(long from, long to) { + if (from == chain) { + chain = to; + if (from != to) { + chainMoved(from, to); + } + } + } + + @Override + public void close() { + try { + if (this.chainModified) { + this.chainModified = false; + chainModified(chain); + } + } finally { + // must remove even if chain modified threw an unexpected exception + OffHeapChainStorageEngine.this.activeChains.remove(this); + } + } + } + + private long writeElement(long address, ByteBuffer element) { + storage.writeLong(address + ELEMENT_HEADER_SEQUENCE_OFFSET, nextSequenceNumber++); + storage.writeInt(address + ELEMENT_HEADER_LENGTH_OFFSET, element.remaining()); + storage.writeBuffer(address + ELEMENT_HEADER_SIZE, element.duplicate()); + return address; + } + + private Long createAttachedChain(K key, int hash, GenesisChain value) { + ByteBuffer keyBuffer = keyPortability.encode(key); + return createAttachedChain(keyBuffer, hash, value.iterator()); + } + + private Long createAttachedChain(ByteBuffer keyBuffer, int hash, ByteBuffer elemBuffer) { + long chain = storage.allocate(keyBuffer.remaining() + elemBuffer.remaining() + this.totalChainHeaderSize + ELEMENT_HEADER_SIZE); + if (chain < 0) { + return null; + } + int keySize = keyBuffer.remaining(); + storage.writeInt(chain + CHAIN_HEADER_KEY_HASH_OFFSET, hash); + storage.writeInt(chain + CHAIN_HEADER_KEY_LENGTH_OFFSET, Integer.MIN_VALUE | keySize); + storage.writeBuffer(chain + this.totalChainHeaderSize + ELEMENT_HEADER_SIZE + elemBuffer.remaining(), keyBuffer); + if (extendedChainHeaderSize > 0) { + storage.writeBuffer(chain + CHAIN_HEADER_SIZE, emptyExtendedChainHeader.duplicate()); + } + long element = chain + this.totalChainHeaderSize; + writeElement(element, elemBuffer); + storage.writeLong(element + ELEMENT_HEADER_NEXT_OFFSET, chain); + storage.writeLong(chain + CHAIN_HEADER_TAIL_OFFSET, element); + return chain; + } + + private Long createAttachedChain(ByteBuffer readKeyBuffer, int hash, Iterator iterator) { + Long address = createAttachedChain(readKeyBuffer, hash, iterator.next().getPayload()); + if (address == null) { + return null; + } + + if (iterator.hasNext()) { + try (AttachedInternalChain chain = new AttachedInternalChain(address)) { + do { + if (!chain.append(iterator.next().getPayload())) { + chain.free(); + return null; + } + } while (iterator.hasNext()); + } + } + return address; + } + + private long findHead(long address) { + while (!isHead(address)) { + address = storage.readLong(address + ELEMENT_HEADER_NEXT_OFFSET); + } + return address; + } + + private boolean isHead(long address) { + return storage.readInt(address + CHAIN_HEADER_KEY_LENGTH_OFFSET) < 0; + } + + class StorageOwner implements OffHeapStorageArea.Owner { + + @Override + public Collection evictAtAddress(long address, boolean shrink) { + Collection elements = new ArrayList<>(); + long chain = -1L; + long element = address; + do { + elements.add(element); + if (isHead(element)) { + chain = element; + element += OffHeapChainStorageEngine.this.totalChainHeaderSize; + } + element = storage.readLong(element + ELEMENT_HEADER_NEXT_OFFSET); + } while (element != address); + + for (AttachedInternalChain activeChain : activeChains) { + if (activeChain.chain == chain) { + return emptyList(); + } + } + int hash = storage.readInt(chain + CHAIN_HEADER_KEY_HASH_OFFSET); + int slot = owner.getSlotForHashAndEncoding(hash, chain, ~0); + if (owner.evict(slot, shrink)) { + return elements; + } else { + return emptyList(); + } + } + + @Override + public Lock writeLock() { + return owner.writeLock(); + } + + @Override + public boolean isThief() { + return owner.isThiefForTableAllocations(); + } + + @Override + public boolean moved(long from, long to) { + if (isHead(to)) { + int hashCode = storage.readInt(to + CHAIN_HEADER_KEY_HASH_OFFSET); + if (!owner.updateEncoding(hashCode, from, to, ~0)) { + return false; + } else { + long tail = storage.readLong(to + CHAIN_HEADER_TAIL_OFFSET); + if (tail == from + OffHeapChainStorageEngine.this.totalChainHeaderSize) { + tail = to + OffHeapChainStorageEngine.this.totalChainHeaderSize; + storage.writeLong(to + CHAIN_HEADER_TAIL_OFFSET, tail); + } + storage.writeLong(tail + ELEMENT_HEADER_NEXT_OFFSET, to); + for (AttachedInternalChain activeChain : activeChains) { + activeChain.moved(from, to); + } + return true; + } + } else { + long chain = findHead(to); + + long tail = storage.readLong(chain + CHAIN_HEADER_TAIL_OFFSET); + if (tail == from) { + storage.writeLong(chain + CHAIN_HEADER_TAIL_OFFSET, to); + } + + long element = chain + OffHeapChainStorageEngine.this.totalChainHeaderSize; + while (element != chain) { + long next = storage.readLong(element + ELEMENT_HEADER_NEXT_OFFSET); + if (next == from) { + storage.writeLong(element + ELEMENT_HEADER_NEXT_OFFSET, to); + return true; + } else { + element = next; + } + } + throw new AssertionError(); + } + } + + @Override + public int sizeOf(long address) { + if (isHead(address)) { + int keySize = readKeySize(address); + return keySize + OffHeapChainStorageEngine.this.totalChainHeaderSize + sizeOf(address + OffHeapChainStorageEngine.this.totalChainHeaderSize); + } else { + int elementSize = readElementLength(address); + return ELEMENT_HEADER_SIZE + elementSize; + } + } + } +} diff --git a/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java new file mode 100644 index 0000000000..f3bed5513e --- /dev/null +++ b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java @@ -0,0 +1,446 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.offheap; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.ServerStore; +import org.ehcache.clustered.server.KeySegmentMapper; +import org.ehcache.clustered.server.ServerStoreEventListener; +import org.ehcache.clustered.server.state.ResourcePageSource; +import org.terracotta.offheapstore.MapInternals; +import org.terracotta.offheapstore.exceptions.OversizeMappingException; +import org.terracotta.offheapstore.paging.PageSource; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.function.LongConsumer; +import java.util.function.LongFunction; + +import static org.terracotta.offheapstore.util.MemoryUnit.BYTES; +import static org.terracotta.offheapstore.util.MemoryUnit.KILOBYTES; +import static org.terracotta.offheapstore.util.MemoryUnit.MEGABYTES; + +public class OffHeapServerStore implements ServerStore, MapInternals { + + private static final long MAX_PAGE_SIZE_IN_KB = KILOBYTES.convert(8, MEGABYTES); + + private final List> segments; + private final KeySegmentMapper mapper; + private volatile ServerStoreEventListener listener; + private volatile boolean fireEvents; + + public OffHeapServerStore(List> segments, KeySegmentMapper mapper) { + this.mapper = mapper; + this.segments = segments; + } + + OffHeapServerStore(PageSource source, KeySegmentMapper mapper, boolean writeBehindConfigured) { + this.mapper = mapper; + segments = new ArrayList<>(mapper.getSegments()); + for (int i = 0; i < mapper.getSegments(); i++) { + if (writeBehindConfigured) { + segments.add(new PinningOffHeapChainMap<>(source, LongPortability.INSTANCE, KILOBYTES.toBytes(4), MEGABYTES.toBytes(8), false)); + } else { + segments.add(new OffHeapChainMap<>(source, LongPortability.INSTANCE, KILOBYTES.toBytes(4), MEGABYTES.toBytes(8), false)); + } + } + } + + public OffHeapServerStore(ResourcePageSource source, KeySegmentMapper mapper, boolean writeBehindConfigured) { + this.mapper = mapper; + segments = new ArrayList<>(mapper.getSegments()); + long maxSize = getMaxSize(source.getPool().getSize()); + for (int i = 0; i < mapper.getSegments(); i++) { + if (writeBehindConfigured) { + segments.add(new PinningOffHeapChainMap<>(source, LongPortability.INSTANCE, KILOBYTES.toBytes(4), (int) KILOBYTES.toBytes(maxSize), false)); + } else { + segments.add(new OffHeapChainMap<>(source, LongPortability.INSTANCE, KILOBYTES.toBytes(4), (int)KILOBYTES.toBytes(maxSize), false)); + } + } + } + + public List> getSegments() { + return segments; + } + + static long getMaxSize(long poolSize) { + long l = Long.highestOneBit(poolSize); + long sizeInKb = KILOBYTES.convert(l, BYTES); + long maxSize = sizeInKb >> 5; + + if (maxSize >= MAX_PAGE_SIZE_IN_KB) { + maxSize = MAX_PAGE_SIZE_IN_KB; + } + return maxSize; + } + + public void setEventListener(ServerStoreEventListener listener) { + if (this.listener != null) { + throw new IllegalStateException("ServerStoreEventListener instance already set"); + } + this.listener = listener; + OffHeapChainMap.ChainMapEvictionListener chainMapEvictionListener = listener::onEviction; + for (OffHeapChainMap segment : segments) { + segment.setEvictionListener(chainMapEvictionListener); + } + } + + @Override + public Chain get(long key) { + return segmentFor(key).get(key); + } + + @Override + public void append(long key, ByteBuffer payLoad) { + LongConsumer lambda; + if (listener != null && fireEvents) { + lambda = (k) -> { + Chain beforeAppend = segmentFor(k).getAndAppend(k, payLoad); + listener.onAppend(beforeAppend, payLoad.duplicate()); + }; + } else { + lambda = (k) -> segmentFor(k).append(k, payLoad); + } + + try { + lambda.accept(key); + } catch (OversizeMappingException e) { + consumeOversizeMappingException(key, lambda); + } + } + + @Override + public Chain getAndAppend(long key, ByteBuffer payLoad) { + LongFunction lambda; + if (listener != null && fireEvents) { + lambda = (k) -> { + Chain beforeAppend = segmentFor(k).getAndAppend(k, payLoad); + listener.onAppend(beforeAppend, payLoad.duplicate()); + return beforeAppend; + }; + } else { + lambda = (k) -> segmentFor(k).getAndAppend(k, payLoad); + } + + try { + return lambda.apply(key); + } catch (OversizeMappingException e) { + return handleOversizeMappingException(key, lambda); + } + } + + @Override + public void replaceAtHead(long key, Chain expect, Chain update) { + try { + segmentFor(key).replaceAtHead(key, expect, update); + } catch (OversizeMappingException e) { + consumeOversizeMappingException(key, (long k) -> segmentFor(k).replaceAtHead(k, expect, update)); + } + } + + public void put(long key, Chain chain) { + try { + try {segmentFor(key).put(key, chain); + } catch (OversizeMappingException e) { + consumeOversizeMappingException(key, (long k) -> segmentFor(k).put(k, chain));} + } catch (Throwable t) { + segmentFor(key).remove(key); + throw t; + } + } + + public void remove(long key) { + segmentFor(key).remove(key); + } + + @Override + public void clear() { + for (OffHeapChainMap segment : segments) { + segment.clear(); + } + } + + OffHeapChainMap segmentFor(long key) { + return segments.get(mapper.getSegmentForKey(key)); + } + + private void writeLockAll() { + for (OffHeapChainMap s : segments) { + s.writeLock().lock(); + } + } + + private void writeUnlockAll() { + for (OffHeapChainMap s : segments) { + s.writeLock().unlock(); + } + } + + private void consumeOversizeMappingException(long key, LongConsumer operation) { + handleOversizeMappingException(key, k -> { + operation.accept(k); + return null; + }); + } + + /** + * Force eviction from other segments until {@code operation} succeeds or no further eviction is possible. + * + * @param key the target key + * @param operation the previously failed operation + * @param operation result type + * @return the operation result + * @throws OversizeMappingException if the operation cannot be made to succeed + */ + private R handleOversizeMappingException(long key, LongFunction operation) throws OversizeMappingException { + if (tryShrinkOthers(key)) { + try { + return operation.apply(key); + } catch (OversizeMappingException ex) { + //ignore + } + } + + writeLockAll(); + try { + OversizeMappingException e; + do { + try { + return operation.apply(key); + } catch (OversizeMappingException ex) { + e = ex; + } + } while (tryShrinkOthers(key)); + throw e; + } finally { + writeUnlockAll(); + } + } + + boolean tryShrinkOthers(long key) { + boolean evicted = false; + + OffHeapChainMap target = segmentFor(key); + for (OffHeapChainMap s : segments) { + if (s != target) { + evicted |= s.shrink(); + } + } + + return evicted; + } + + public void close() { + writeLockAll(); + try { + clear(); + } finally { + writeUnlockAll(); + } + segments.clear(); + } + + // stats + + @Override + public long getAllocatedMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getAllocatedMemory(); + } + return total; + } + + @Override + public long getOccupiedMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getOccupiedMemory(); + } + return total; + } + + @Override + public long getDataAllocatedMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getDataAllocatedMemory(); + } + return total; + } + + @Override + public long getDataOccupiedMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getDataOccupiedMemory(); + } + return total; + } + + @Override + public long getDataSize() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getDataSize(); + } + return total; + } + + @Override + public long getSize() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getSize(); + } + return total; + } + + @Override + public long getTableCapacity() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getTableCapacity(); + } + return total; + } + + @Override + public long getUsedSlotCount() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getUsedSlotCount(); + } + return total; + } + + @Override + public long getRemovedSlotCount() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getRemovedSlotCount(); + } + return total; + } + + @Override + public int getReprobeLength() { + int total = 0; + for (MapInternals segment : segments) { + total += segment.getReprobeLength(); + } + return total; + } + + @Override + public long getVitalMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getVitalMemory(); + } + return total; + } + + @Override + public long getDataVitalMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getDataVitalMemory(); + } + return total; + } + + @Override + public Iterator> iterator() { + return new AggregateIterator>() { + @Override + protected Iterator> getNextIterator() { + return listIterator.next().iterator(); + } + }; + } + + public void enableEvents(boolean enable) { + this.fireEvents = enable; + } + + protected abstract class AggregateIterator implements Iterator { + + protected final Iterator> listIterator; + protected Iterator currentIterator; + + protected abstract Iterator getNextIterator(); + + public AggregateIterator() { + listIterator = segments.iterator(); + while (listIterator.hasNext()) { + currentIterator = getNextIterator(); + if (currentIterator.hasNext()) { + return; + } + } + } + + @Override + public boolean hasNext() { + if (currentIterator == null) { + return false; + } + + if (currentIterator.hasNext()) { + return true; + } else { + while (listIterator.hasNext()) { + currentIterator = getNextIterator(); + if (currentIterator.hasNext()) { + return true; + } + } + return false; + } + } + + @Override + public T next() { + if (currentIterator == null) { + throw new NoSuchElementException(); + } + + if (currentIterator.hasNext()) { + return currentIterator.next(); + } else { + while (listIterator.hasNext()) { + currentIterator = getNextIterator(); + + if (currentIterator.hasNext()) { + return currentIterator.next(); + } + } + } + + throw new NoSuchElementException(); + } + + @Override + public void remove() { + currentIterator.remove(); + } + } +} diff --git a/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/PinningOffHeapChainMap.java b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/PinningOffHeapChainMap.java new file mode 100644 index 0000000000..caa6d1154c --- /dev/null +++ b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/offheap/PinningOffHeapChainMap.java @@ -0,0 +1,93 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.offheap; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.common.internal.store.operations.codecs.OperationsCodec; +import org.terracotta.offheapstore.paging.PageSource; +import org.terracotta.offheapstore.storage.portability.Portability; + +import java.nio.ByteBuffer; +import java.util.concurrent.locks.Lock; +import java.util.function.Supplier; + +/** + * This class is used in WriteBehind implementation + */ +public class PinningOffHeapChainMap extends OffHeapChainMap { + + public PinningOffHeapChainMap(PageSource source, Portability keyPortability, int minPageSize, int maxPageSize, boolean shareByThieving) { + super(source, keyPortability, minPageSize, maxPageSize, shareByThieving); + } + + @Override + public Chain getAndAppend(K key, ByteBuffer element) { + return execute(key, () -> super.getAndAppend(key, element)); + } + + @Override + public void append(K key, ByteBuffer element) { + execute(key, () -> { + super.append(key, element); + return null; + }); + } + + @Override + public void put(K key, Chain chain) { + execute(key, () -> { + super.put(key, chain); + return null; + }); + } + + @Override + public void replaceAtHead(K key, Chain expected, Chain replacement) { + execute(key, () -> { + heads.setPinning(key, false); + super.replaceAtHead(key, expected, replacement); + return null; + }); + } + + private Chain execute(K key, Supplier supplier) { + final Lock lock = heads.writeLock(); + lock.lock(); + try { + return supplier.get(); + } finally { + pinIfNeeded(key); + lock.unlock(); + } + } + + private void pinIfNeeded(K key) { + InternalChain internalChain = heads.get(key); + if (internalChain != null && shouldBePinned(internalChain.detach())) { + heads.setPinning(key, true); + } + } + + private boolean shouldBePinned(Chain chain) { + for (Element element : chain) { + if (OperationsCodec.getOperationCode(element.getPayload()).shouldBePinned()) { + return true; + } + } + return false; + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceDump.java b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceDump.java similarity index 100% rename from clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceDump.java rename to clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceDump.java diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java similarity index 84% rename from clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java rename to clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java index ab1c5ac533..dd2f7fbefb 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java +++ b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java @@ -25,17 +25,18 @@ import org.terracotta.entity.PlatformConfiguration; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceProvider; -import org.terracotta.entity.ServiceProviderCleanupException; import org.terracotta.entity.ServiceProviderConfiguration; import org.terracotta.entity.StateDumpCollector; import org.terracotta.offheapresource.OffHeapResources; +import java.io.Closeable; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.NoSuchElementException; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -43,11 +44,11 @@ * {@link ServiceProvider} for {@link EhcacheStateService} */ @BuiltinService -public class EhcacheStateServiceProvider implements ServiceProvider { +public class EhcacheStateServiceProvider implements ServiceProvider, Closeable { private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheStateServiceProvider.class); - private ConcurrentMap serviceMap = new ConcurrentHashMap<>(); + private final ConcurrentMap serviceMap = new ConcurrentHashMap<>(); private OffHeapResources offHeapResourcesProvider; @Override @@ -62,18 +63,20 @@ public void addStateTo(StateDumpCollector dump) { @Override public boolean initialize(ServiceProviderConfiguration configuration, PlatformConfiguration platformConfiguration) { Collection extendedConfiguration = platformConfiguration.getExtendedConfiguration(OffHeapResources.class); + // When a server is activated, there will ALWAYS be one OffHeapResources, that will hold the mapping configured by the user with the "offheap-resources" setting. + // In diagnostic mode, no extended configuration is loaded, so there won't be any OffHeapResources. In that case, we ask this service to be discarded (by returning false). if (extendedConfiguration.size() > 1) { throw new UnsupportedOperationException("There are " + extendedConfiguration.size() + " OffHeapResourcesProvider, this is not supported. " + - "There must be only one!"); + "There must be only one!"); } Iterator iterator = extendedConfiguration.iterator(); if (iterator.hasNext()) { offHeapResourcesProvider = iterator.next(); if (offHeapResourcesProvider.getAllIdentifiers().isEmpty()) { - throw new UnsupportedOperationException("There are no offheap-resource defined, this is not supported. There must be at least one!"); + LOGGER.warn("No offheap-resource defined - this will prevent provider from offering any EhcacheStateService."); } } else { - LOGGER.warn("No offheap-resource defined - this will prevent provider from offering any EhcacheStateService."); + return false; } return true; } @@ -112,10 +115,16 @@ public Collection> getProvidedServiceTypes() { } @Override - public void prepareForSynchronization() throws ServiceProviderCleanupException { + public void prepareForSynchronization() { serviceMap.clear(); } + @Override + public void close() { + //passthrough test cleanup + serviceMap.values().forEach(EhcacheStateService::destroy); + } + public interface DestroyCallback { void destroy(EhcacheStateService service); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTrackerImpl.java b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/state/InvalidationTrackerImpl.java similarity index 93% rename from clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTrackerImpl.java rename to clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/state/InvalidationTrackerImpl.java index 02f118b354..b060aaaba6 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTrackerImpl.java +++ b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/state/InvalidationTrackerImpl.java @@ -16,11 +16,14 @@ package org.ehcache.clustered.server.state; +import java.util.HashSet; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; +import static java.util.Collections.unmodifiableSet; + public class InvalidationTrackerImpl implements InvalidationTracker { private final ConcurrentMap invalidationMap = new ConcurrentHashMap<>(); @@ -67,7 +70,7 @@ public void untrackHashInvalidation(long chainKey) { @Override public Set getTrackedKeys() { - return getInvalidationMap().keySet(); + return unmodifiableSet(new HashSet<>(getInvalidationMap().keySet())); } @Override diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java similarity index 89% rename from clustered/server/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java rename to clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java index 9827e45474..e77b9c9dde 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java +++ b/clustered/server/ehcache-service/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java @@ -15,7 +15,6 @@ */ package org.ehcache.clustered.server.state; -import com.tc.classloader.CommonComponent; import org.ehcache.clustered.common.ServerSideConfiguration; import org.terracotta.offheapstore.buffersource.OffHeapBufferSource; import org.terracotta.offheapstore.paging.OffHeapStorageArea; @@ -30,7 +29,6 @@ * Pairs a {@link ServerSideConfiguration.Pool} and an {@link UpfrontAllocatingPageSource} instance providing storage * for the pool. */ -@CommonComponent public class ResourcePageSource implements PageSource { /** * A description of the resource allocation underlying this {@code PageSource}. @@ -63,10 +61,9 @@ public void free(Page page) { @Override public String toString() { - final StringBuilder sb = new StringBuilder("ResourcePageSource{"); - sb.append("pool=").append(pool); - sb.append(", delegatePageSource=").append(delegatePageSource); - sb.append('}'); - return sb.toString(); + String sb = "ResourcePageSource{" + "pool=" + pool + + ", delegatePageSource=" + delegatePageSource + + '}'; + return sb; } } diff --git a/clustered/server/src/main/resources/META-INF/services/org.terracotta.entity.ServiceProvider b/clustered/server/ehcache-service/src/main/resources/META-INF/services/org.terracotta.entity.ServiceProvider similarity index 100% rename from clustered/server/src/main/resources/META-INF/services/org.terracotta.entity.ServiceProvider rename to clustered/server/ehcache-service/src/main/resources/META-INF/services/org.terracotta.entity.ServiceProvider diff --git a/clustered/server/src/main/resources/offheap-message.properties b/clustered/server/ehcache-service/src/main/resources/offheap-message.properties similarity index 100% rename from clustered/server/src/main/resources/offheap-message.properties rename to clustered/server/ehcache-service/src/main/resources/offheap-message.properties diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheStateServiceImplTest.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/EhcacheStateServiceImplTest.java similarity index 96% rename from clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheStateServiceImplTest.java rename to clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/EhcacheStateServiceImplTest.java index 865f060072..b9507ea57a 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheStateServiceImplTest.java +++ b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/EhcacheStateServiceImplTest.java @@ -19,10 +19,10 @@ import org.ehcache.clustered.common.internal.exceptions.DestroyInProgressException; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; public class EhcacheStateServiceImplTest { diff --git a/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/offheap/ChainMapExtensionTest.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/offheap/ChainMapExtensionTest.java new file mode 100644 index 0000000000..8a79bd0daf --- /dev/null +++ b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/offheap/ChainMapExtensionTest.java @@ -0,0 +1,369 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.offheap; + +import org.ehcache.clustered.common.internal.store.Element; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; +import org.junit.Test; +import org.terracotta.offheapstore.ReadWriteLockedOffHeapClockCache; +import org.terracotta.offheapstore.buffersource.OffHeapBufferSource; +import org.terracotta.offheapstore.paging.OffHeapStorageArea; +import org.terracotta.offheapstore.paging.PageSource; +import org.terracotta.offheapstore.paging.UnlimitedPageSource; +import org.terracotta.offheapstore.storage.PointerSize; +import org.terracotta.offheapstore.storage.portability.Portability; +import org.terracotta.offheapstore.storage.portability.StringPortability; +import org.terracotta.offheapstore.storage.portability.WriteContext; +import org.terracotta.offheapstore.util.Factory; + +import java.nio.ByteBuffer; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.locks.Lock; + +import static org.ehcache.clustered.ChainUtils.chainOf; +import static java.util.Collections.emptyList; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.collection.IsIterableContainingInOrder.contains; + +/** + * Test extensibility of chain map storage engine, including binary engine capabilities. + */ +public class ChainMapExtensionTest { + private static final int ADDRESS_OFFSET = 0; + private static final int HASH_OFFSET = 8; + private static final int EXTENDED_HEADER_LENGTH = 16; + private static final long NULL_ENCODING = Long.MAX_VALUE; + + private static final int STORAGE_KEY_LENGTH_OFFSET = 0; + private static final int STORAGE_VALUE_LENGTH_OFFSET = 4; + private static final int STORAGE_HEADER_OFFSET = 8; + + static final ByteBuffer EMPTY_HEADER_NODE; + static { + ByteBuffer emptyHeader = ByteBuffer.allocateDirect(EXTENDED_HEADER_LENGTH); + emptyHeader.putLong(ADDRESS_OFFSET, NULL_ENCODING); + emptyHeader.putLong(HASH_OFFSET, -1); + EMPTY_HEADER_NODE = emptyHeader.asReadOnlyBuffer(); + } + + @Test + public void testAppend() { + OffHeapChainMap map = getChainMapWithExtendedStorageEngine(); + map.append("foo", buffer(1)); + assertThat(map.get("foo"), contains(element(1))); + ChainStorageEngine se = map.getStorageEngine(); + assertThat(se, is(instanceOf(ExtendedOffHeapChainStorageEngine.class))); + ExtendedOffHeapChainStorageEngine ese = (ExtendedOffHeapChainStorageEngine) se; + map = getNewMap(ese); + assertThat(map.get("foo"), contains(element(1))); + } + + @Test + public void testAppendAndReplace() { + OffHeapChainMap map = getChainMapWithExtendedStorageEngine(); + map.append("foo", buffer(1)); + assertThat(map.get("foo"), contains(element(1))); + map.replaceAtHead("foo", chainOf(buffer(1)), chainOf()); + ChainStorageEngine se = map.getStorageEngine(); + assertThat(se, is(instanceOf(ExtendedOffHeapChainStorageEngine.class))); + @SuppressWarnings("unchecked") + ExtendedOffHeapChainStorageEngine ese = (ExtendedOffHeapChainStorageEngine) se; + map = getNewMap(ese); + assertThat(map.get("foo"), emptyIterable()); + } + + @SuppressWarnings("unchecked") + @Test + public void testMultipleAppendAndReplace() { + OffHeapChainMap map = getChainMapWithExtendedStorageEngine(); + for (int i = 1; i <= 20; i++) { + map.append("foo" + i, buffer(i)); + assertThat(map.get("foo" + i), contains(element(i))); + } + for (int i = 1; i <= 20; i++) { + assertThat(map.getAndAppend("foo" + i, buffer(1)), contains(element(i))); + } + for (int i = 10; i < 15; i++) { + map.replaceAtHead("foo" + i, chainOf(buffer(i), buffer(1)), chainOf()); + } + + ChainStorageEngine se = map.getStorageEngine(); + assertThat(se, is(instanceOf(ExtendedOffHeapChainStorageEngine.class))); + ExtendedOffHeapChainStorageEngine ese = (ExtendedOffHeapChainStorageEngine) se; + map = getNewMap(ese); + for (int i = 1; i <= 20; i++) { + if (i < 10 || i >= 15) { + assertThat(map.get("foo" + i), contains(element(i), element(1))); + } else { + assertThat(map.get("foo" + i), emptyIterable()); + } + } + } + + private OffHeapChainMap getChainMapWithExtendedStorageEngine() { + PageSource chainSource = new UnlimitedPageSource(new OffHeapBufferSource()); + PageSource extendedSource = new UnlimitedPageSource(new OffHeapBufferSource()); + Factory> factory = ExtendedOffHeapChainStorageEngine.createFactory(chainSource, + StringPortability.INSTANCE, 4096, 4096, false, false, extendedSource); + return new OffHeapChainMap<>(chainSource, factory); + } + + private OffHeapChainMap getNewMap(ExtendedOffHeapChainStorageEngine ese) { + PageSource chainSource = new UnlimitedPageSource(new OffHeapBufferSource()); + Factory> factory = OffHeapChainStorageEngine.createFactory(chainSource, + StringPortability.INSTANCE, 4096, 4096, false, false); + OffHeapChainStorageEngine storageEngine = (OffHeapChainStorageEngine) factory.newInstance(); + OffHeapChainMap.HeadMap newMap = new OffHeapChainMap.HeadMap<>(e -> {}, chainSource, storageEngine); + ese.replayIntoMap(newMap); + return new OffHeapChainMap<>(newMap, storageEngine); + } + + private static ByteBuffer buffer(int i) { + ByteBuffer buffer = ByteBuffer.allocate(i); + while (buffer.hasRemaining()) { + buffer.put((byte) i); + } + return (ByteBuffer) buffer.flip(); + } + + private static Matcher element(int i) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(Element item) { + return item.getPayload().equals(buffer(i)); + } + + @Override + public void describeTo(Description description) { + description.appendText("element containing buffer[" + i +"]"); + } + }; + } + + private static final class ExtendedHeaderForTest { + private final ByteBuffer data; + private final WriteContext writeContext; + + ExtendedHeaderForTest(ByteBuffer buffer, WriteContext writeContext) { + this.data = buffer; + this.writeContext = writeContext; + } + + long getAddress() { + return getLong(ADDRESS_OFFSET); + } + + void setAddress(long val) { + writeContext.setLong(ADDRESS_OFFSET, val); + } + + int getHash() { + long hashAndSize = getLong(HASH_OFFSET) >> 32; + return (int) hashAndSize; + } + + int getSize() { + long hashAndSize = getLong(HASH_OFFSET); + return (int) hashAndSize; + } + + void setHashAndSize(int hash, int size) { + long val = ((long) hash << 32) | size; + writeContext.setLong(HASH_OFFSET, val); + } + + private long getLong(int address) { + return data.getLong(address); + } + } + + public static class ExtendedOffHeapChainStorageEngine extends OffHeapChainStorageEngine { + private final OffHeapStorageArea extendedArea; + private final Set chainAddresses; + private volatile boolean bypassEngineCommands = false; + + public static Factory> + createFactory(PageSource source, + Portability keyPortability, + int minPageSize, int maxPageSize, + boolean thief, boolean victim, PageSource cachePageSource) { + return (Factory>)() -> + new ExtendedOffHeapChainStorageEngine<>(source, keyPortability, + minPageSize, maxPageSize, thief, victim, cachePageSource); + } + + private ExtendedOffHeapChainStorageEngine(PageSource source, Portability keyPortability, int minPageSize, + int maxPageSize, boolean thief, boolean victim, + PageSource cachePageSource) { + super(source, keyPortability, minPageSize, maxPageSize, thief, victim, EMPTY_HEADER_NODE); + this.extendedArea = new OffHeapStorageArea(PointerSize.LONG, new ExtendedEngineOwner(), cachePageSource, minPageSize, maxPageSize, thief, victim); + this.chainAddresses = new HashSet<>(); + } + + @Override + public Long writeMapping(K key, InternalChain value, int hash, int metadata) { + bypassEngineCommands = true; + try { + return super.writeMapping(key, value, hash, metadata); + } finally { + bypassEngineCommands = false; + } + } + + @Override + public void freeMapping(long encoding, int hash, boolean removal) { + if (removal) { + // free the chain here if we are removing..otherwise chainFreed will be invoked from within + chainFreed(encoding); + } + super.freeMapping(encoding, hash, removal); + } + + @Override + public void chainAttached(long chainAddress) { + localPut(chainAddress); + } + + @Override + public void chainFreed(long chainAddress) { + if (bypassEngineCommands) { + // do not do anything when in write mapping + return; + } + localRemove(chainAddress); + } + + @Override + public void chainModified(long chainAddress) { + if (bypassEngineCommands) { + return; + } + localPut(chainAddress); + } + + @Override + public void chainMoved(long fromChainAddress, long toChainAddress) { + if (bypassEngineCommands) { + return; + } + localMove(fromChainAddress, toChainAddress); + } + + private ExtendedHeaderForTest createAtExtensionAddress(long chainAddress) { + return new ExtendedHeaderForTest(getExtensionHeader(chainAddress), + getExtensionWriteContext(chainAddress)); + } + + void replayIntoMap(ReadWriteLockedOffHeapClockCache newMap) { + Lock l = newMap.writeLock(); + l.lock(); + try { + chainAddresses.forEach((a) -> { + ExtendedHeaderForTest hdr = createAtExtensionAddress(a); + long address = hdr.getAddress(); + int keyLength = extendedArea.readInt(address + STORAGE_KEY_LENGTH_OFFSET); + int valueLength = extendedArea.readInt(address + STORAGE_VALUE_LENGTH_OFFSET); + ByteBuffer keyBuffer = extendedArea.readBuffer(address + STORAGE_HEADER_OFFSET, keyLength); + ByteBuffer valueBuffer = extendedArea.readBuffer(address + STORAGE_HEADER_OFFSET + keyLength, valueLength); + newMap.installMappingForHashAndEncoding(hdr.getHash(), keyBuffer, valueBuffer, 0); + }); + } finally { + l.unlock(); + } + } + + private void localPut(long chainAddress) { + ByteBuffer keyBuffer = super.readBinaryKey(chainAddress); + int hash = super.readKeyHash(chainAddress); + ByteBuffer valueBuffer = super.readBinaryValue(chainAddress); + writeToExtendedArea(chainAddress, hash, keyBuffer, valueBuffer); + } + + private void writeToExtendedArea(long chainAddress, int hash, ByteBuffer keyBuffer, ByteBuffer valueBuffer) { + ExtendedHeaderForTest hdr = createAtExtensionAddress(chainAddress); + long address = hdr.getAddress(); + if (address != NULL_ENCODING) { + // free previous + extendedArea.free(address); + } else { + chainAddresses.add(chainAddress); + } + int size = (2 * Integer.BYTES) + keyBuffer.remaining() + valueBuffer.remaining(); + address = extendedArea.allocate(size); + hdr.setAddress(address); + hdr.setHashAndSize(hash, size); + extendedArea.writeInt(address + STORAGE_KEY_LENGTH_OFFSET, keyBuffer.remaining()); + extendedArea.writeInt(address + STORAGE_VALUE_LENGTH_OFFSET, valueBuffer.remaining()); + extendedArea.writeBuffer(address + STORAGE_HEADER_OFFSET, keyBuffer.duplicate()); + extendedArea.writeBuffer(address + STORAGE_HEADER_OFFSET + keyBuffer.remaining(), valueBuffer.duplicate()); + } + + private void localRemove(long chainAddress) { + ExtendedHeaderForTest node = createAtExtensionAddress(chainAddress); + long address = node.getAddress(); + if (address != NULL_ENCODING) { + extendedArea.free(node.getAddress()); + chainAddresses.remove(chainAddress); + } + node.setAddress(NULL_ENCODING); + } + + private void localMove(long fromChainAddress, long toChainAddress) { + ExtendedHeaderForTest fromHeader = createAtExtensionAddress(fromChainAddress); + ExtendedHeaderForTest toHeader = createAtExtensionAddress(toChainAddress); + chainAddresses.remove(fromChainAddress); + chainAddresses.add(toChainAddress); + toHeader.setAddress(fromHeader.getAddress()); + toHeader.setHashAndSize(fromHeader.getHash(), fromHeader.getSize()); + } + + private class ExtendedEngineOwner implements OffHeapStorageArea.Owner { + @Override + public Collection evictAtAddress(long address, boolean shrink) { + return emptyList(); + } + + @Override + public Lock writeLock() { + return owner.writeLock(); + } + + @Override + public boolean isThief() { + return owner.isThiefForTableAllocations(); + } + + @Override + public boolean moved(long from, long to) { + // for now not supported + return false; + } + + @Override + public int sizeOf(long address) { + return extendedArea.readInt(address + STORAGE_KEY_LENGTH_OFFSET) + + extendedArea.readInt(address + STORAGE_VALUE_LENGTH_OFFSET) + STORAGE_HEADER_OFFSET; + } + } + } +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java similarity index 79% rename from clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java rename to clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java index 533ec3b007..d3f95b0770 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java +++ b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java @@ -23,6 +23,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; +import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.Element; import org.hamcrest.Description; @@ -43,13 +44,13 @@ import org.terracotta.offheapstore.storage.portability.StringPortability; import static java.util.Arrays.asList; -import static org.ehcache.clustered.server.offheap.OffHeapChainMap.chain; +import static org.ehcache.clustered.ChainUtils.chainOf; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.collection.IsEmptyIterable.emptyIterable; import static org.hamcrest.collection.IsIterableContainingInOrder.contains; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.terracotta.offheapstore.util.MemoryUnit.KILOBYTES; @@ -78,6 +79,8 @@ public void testInitiallyEmptyChain() { OffHeapChainMap map = new OffHeapChainMap<>(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); assertThat(map.get("foo"), emptyIterable()); + + emptyAndValidate(map); } @Test @@ -86,6 +89,8 @@ public void testAppendToEmptyChain() { map.append("foo", buffer(1)); assertThat(map.get("foo"), contains(element(1))); + + emptyAndValidate(map); } @Test @@ -94,6 +99,8 @@ public void testGetAndAppendToEmptyChain() { assertThat(map.getAndAppend("foo", buffer(1)), emptyIterable()); assertThat(map.get("foo"), contains(element(1))); + + emptyAndValidate(map); } @Test @@ -103,6 +110,8 @@ public void testAppendToSingletonChain() { map.append("foo", buffer(2)); assertThat(map.get("foo"), contains(element(1), element(2))); + + emptyAndValidate(map); } @Test @@ -112,6 +121,8 @@ public void testGetAndAppendToSingletonChain() { assertThat(map.getAndAppend("foo", buffer(2)), contains(element(1))); assertThat(map.get("foo"), contains(element(1), element(2))); + + emptyAndValidate(map); } @Test @@ -122,6 +133,8 @@ public void testAppendToDoubleChain() { map.append("foo", buffer(3)); assertThat(map.get("foo"), contains(element(1), element(2), element(3))); + + emptyAndValidate(map); } @Test @@ -132,6 +145,8 @@ public void testGetAndAppendToDoubleChain() { assertThat(map.getAndAppend("foo", buffer(3)), contains(element(1), element(2))); assertThat(map.get("foo"), contains(element(1), element(2), element(3))); + + emptyAndValidate(map); } @Test @@ -143,6 +158,8 @@ public void testAppendToTripleChain() { map.append("foo", buffer(4)); assertThat(map.get("foo"), contains(element(1), element(2), element(3), element(4))); + + emptyAndValidate(map); } @Test @@ -154,6 +171,8 @@ public void testGetAndAppendToTripleChain() { assertThat(map.getAndAppend("foo", buffer(4)), contains(element(1), element(2), element(3))); assertThat(map.get("foo"), contains(element(1), element(2), element(3), element(4))); + + emptyAndValidate(map); } @Test @@ -161,11 +180,13 @@ public void testReplaceEmptyChainAtHeadOnEmptyChainFails() { OffHeapChainMap map = new OffHeapChainMap<>(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); try { - map.replaceAtHead("foo", chain(), chain(buffer(1))); + map.replaceAtHead("foo", chainOf(), chainOf(buffer(1))); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { //expected } + + emptyAndValidate(map); } @Test @@ -174,11 +195,13 @@ public void testReplaceEmptyChainAtHeadOnNonEmptyChain() { map.append("foo", buffer(1)); try { - map.replaceAtHead("foo", chain(), chain(buffer(2))); + map.replaceAtHead("foo", chainOf(), chainOf(buffer(2))); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { //expected } + + emptyAndValidate(map); } @Test @@ -186,8 +209,10 @@ public void testMismatchingReplaceSingletonChainAtHeadOnSingletonChain() { OffHeapChainMap map = new OffHeapChainMap<>(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); map.append("foo", buffer(1)); - map.replaceAtHead("foo", chain(buffer(2)), chain(buffer(42))); + map.replaceAtHead("foo", chainOf(buffer(2)), chainOf(buffer(42))); assertThat(map.get("foo"), contains(element(1))); + + emptyAndValidate(map); } @Test @@ -195,8 +220,10 @@ public void testReplaceSingletonChainAtHeadOnSingletonChain() { OffHeapChainMap map = new OffHeapChainMap<>(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); map.append("foo", buffer(1)); - map.replaceAtHead("foo", chain(buffer(1)), chain(buffer(42))); + map.replaceAtHead("foo", chainOf(buffer(1)), chainOf(buffer(42))); assertThat(map.get("foo"), contains(element(42))); + + emptyAndValidate(map); } @Test @@ -205,8 +232,10 @@ public void testReplaceSingletonChainAtHeadOnDoubleChain() { map.append("foo", buffer(1)); map.append("foo", buffer(2)); - map.replaceAtHead("foo", chain(buffer(1)), chain(buffer(42))); + map.replaceAtHead("foo", chainOf(buffer(1)), chainOf(buffer(42))); assertThat(map.get("foo"), contains(element(42), element(2))); + + emptyAndValidate(map); } @Test @@ -216,8 +245,10 @@ public void testReplaceSingletonChainAtHeadOnTripleChain() { map.append("foo", buffer(2)); map.append("foo", buffer(3)); - map.replaceAtHead("foo", chain(buffer(1)), chain(buffer(42))); + map.replaceAtHead("foo", chainOf(buffer(1)), chainOf(buffer(42))); assertThat(map.get("foo"), contains(element(42), element(2), element(3))); + + emptyAndValidate(map); } @Test @@ -226,8 +257,10 @@ public void testMismatchingReplacePluralChainAtHead() { map.append("foo", buffer(1)); map.append("foo", buffer(2)); - map.replaceAtHead("foo", chain(buffer(1), buffer(3)), chain(buffer(42))); + map.replaceAtHead("foo", chainOf(buffer(1), buffer(3)), chainOf(buffer(42))); assertThat(map.get("foo"), contains(element(1), element(2))); + + emptyAndValidate(map); } @Test @@ -236,8 +269,10 @@ public void testReplacePluralChainAtHeadOnDoubleChain() { map.append("foo", buffer(1)); map.append("foo", buffer(2)); - map.replaceAtHead("foo", chain(buffer(1), buffer(2)), chain(buffer(42))); + map.replaceAtHead("foo", chainOf(buffer(1), buffer(2)), chainOf(buffer(42))); assertThat(map.get("foo"), contains(element(42))); + + emptyAndValidate(map); } @Test @@ -247,8 +282,10 @@ public void testReplacePluralChainAtHeadOnTripleChain() { map.append("foo", buffer(2)); map.append("foo", buffer(3)); - map.replaceAtHead("foo", chain(buffer(1), buffer(2)), chain(buffer(42))); + map.replaceAtHead("foo", chainOf(buffer(1), buffer(2)), chainOf(buffer(42))); assertThat(map.get("foo"), contains(element(42), element(3))); + + emptyAndValidate(map); } @Test @@ -259,9 +296,11 @@ public void testReplacePluralChainAtHeadWithEmpty() { map.append("foo", buffer(3)); long before = map.getDataOccupiedMemory(); - map.replaceAtHead("foo", chain(buffer(1), buffer(2)), chain()); + map.replaceAtHead("foo", chainOf(buffer(1), buffer(2)), chainOf()); assertThat(map.getDataOccupiedMemory(), lessThan(before)); assertThat(map.get("foo"), contains(element(3))); + + emptyAndValidate(map); } @Test @@ -271,8 +310,10 @@ public void testSequenceBasedChainComparison() { map.append("foo", buffer(2)); map.append("foo", buffer(3)); - map.replaceAtHead("foo", map.get("foo"), chain()); + map.replaceAtHead("foo", map.get("foo"), chainOf()); assertThat(map.get("foo"), emptyIterable()); + + emptyAndValidate(map); } @Test @@ -283,9 +324,11 @@ public void testReplaceFullPluralChainAtHeadWithEmpty() { map.append("foo", buffer(3)); assertThat(map.getDataOccupiedMemory(), greaterThan(0L)); - map.replaceAtHead("foo", chain(buffer(1), buffer(2), buffer(3)), chain()); + map.replaceAtHead("foo", chainOf(buffer(1), buffer(2), buffer(3)), chainOf()); assertThat(map.getDataOccupiedMemory(), is(0L)); assertThat(map.get("foo"), emptyIterable()); + + emptyAndValidate(map); } @Test @@ -311,6 +354,9 @@ public void testContinualAppendCausingEvictionIsStable() { break; } } + + emptyAndValidate(mapA); + emptyAndValidate(mapB); } else { OffHeapChainMap map = new OffHeapChainMap<>(pageSource, StringPortability.INSTANCE, minPageSize, maxPageSize, false); @@ -328,6 +374,8 @@ public void testContinualAppendCausingEvictionIsStable() { break; } } + + emptyAndValidate(map); } } @@ -335,17 +383,21 @@ public void testContinualAppendCausingEvictionIsStable() { public void testPutWhenKeyIsNotNull() { OffHeapChainMap map = new OffHeapChainMap<>(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); map.append("key", buffer(3)); - map.put("key", chain(buffer(1), buffer(2))); + map.put("key", chainOf(buffer(1), buffer(2))); assertThat(map.get("key"), contains(element(1), element(2))); + + emptyAndValidate(map); } @Test public void testPutWhenKeyIsNull() { OffHeapChainMap map = new OffHeapChainMap<>(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); - map.put("key", chain(buffer(1), buffer(2))); + map.put("key", chainOf(buffer(1), buffer(2))); assertThat(map.get("key"), contains(element(1), element(2))); + + emptyAndValidate(map); } @Test @@ -353,27 +405,28 @@ public void testActiveChainsThreadSafety() throws ExecutionException, Interrupte UnlimitedPageSource source = new UnlimitedPageSource(new OffHeapBufferSource()); OffHeapChainStorageEngine chainStorage = new OffHeapChainStorageEngine<>(source, StringPortability.INSTANCE, minPageSize, maxPageSize, steal, steal); - ReadWriteLockedOffHeapClockCache heads = new EvictionListeningReadWriteLockedOffHeapClockCache<>(callable -> {}, source, chainStorage); + OffHeapChainMap.HeadMap heads = new OffHeapChainMap.HeadMap<>(callable -> {}, source, chainStorage); OffHeapChainMap map = new OffHeapChainMap<>(heads, chainStorage); - map.put("key", chain(buffer(1), buffer(2))); + map.put("key", chainOf(buffer(1), buffer(2))); int nThreads = 10; ExecutorService executorService = Executors.newFixedThreadPool(nThreads); - List futures = new ArrayList<>(); + List> futures = new ArrayList<>(); for (int i = 0; i < nThreads ; i++) { futures.add(executorService.submit(() -> map.get("key"))); } - for (Future f : futures) { + for (Future f : futures) { f.get(); } assertThat(chainStorage.getActiveChains().size(), is(0)); + emptyAndValidate(map); } @Test @@ -381,15 +434,47 @@ public void testPutDoesNotLeakWhenMappingIsNotNull() { UnlimitedPageSource source = new UnlimitedPageSource(new OffHeapBufferSource()); OffHeapChainStorageEngine chainStorage = new OffHeapChainStorageEngine<>(source, StringPortability.INSTANCE, minPageSize, maxPageSize, steal, steal); - ReadWriteLockedOffHeapClockCache heads = new EvictionListeningReadWriteLockedOffHeapClockCache<>(callable -> {}, source, chainStorage); + OffHeapChainMap.HeadMap heads = new OffHeapChainMap.HeadMap<>(callable -> {}, source, chainStorage); OffHeapChainMap map = new OffHeapChainMap<>(heads, chainStorage); - map.put("key", chain(buffer(1))); - map.put("key", chain(buffer(2))); + map.put("key", chainOf(buffer(1))); + map.put("key", chainOf(buffer(2))); assertThat(chainStorage.getActiveChains().size(), is(0)); + emptyAndValidate(map); + } + + @Test + public void testRemoveMissingKey() { + OffHeapChainMap map = new OffHeapChainMap<>(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); + map.remove("foo"); + assertThat(map.get("foo").isEmpty(), is(true)); + } + + @Test + public void testRemoveSingleChain() { + OffHeapChainMap map = new OffHeapChainMap<>(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); + map.append("foo", buffer(1)); + map.append("bar", buffer(2)); + assertThat(map.get("foo"), contains(element(1))); + assertThat(map.get("bar"), contains(element(2))); + + map.remove("foo"); + assertThat(map.get("foo").isEmpty(), is(true)); + assertThat(map.get("bar"), contains(element(2))); + } + + @Test + public void testRemoveDoubleChain() { + OffHeapChainMap map = new OffHeapChainMap<>(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); + map.append("foo", buffer(1)); + map.append("foo", buffer(2)); + assertThat(map.get("foo"), contains(element(1), element(2))); + + map.remove("foo"); + assertThat(map.get("foo").isEmpty(), is(true)); } private static ByteBuffer buffer(int i) { @@ -414,5 +499,11 @@ public void describeTo(Description description) { }; } - + private void emptyAndValidate(OffHeapChainMap map) { + for (String key : map.keySet()) { + map.replaceAtHead(key, map.get(key), chainOf()); + } + assertThat(map.getSize(), is(0L)); + assertThat(map.getDataOccupiedMemory(), is(0L)); + } } diff --git a/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java new file mode 100644 index 0000000000..bc71b29b54 --- /dev/null +++ b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java @@ -0,0 +1,452 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.offheap; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; +import java.util.Random; +import java.util.concurrent.locks.ReentrantLock; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.server.KeySegmentMapper; +import org.ehcache.clustered.server.ServerStoreEventListener; +import org.ehcache.clustered.server.store.ChainBuilder; +import org.ehcache.clustered.server.store.ElementBuilder; +import org.ehcache.clustered.common.internal.store.ServerStore; +import org.ehcache.clustered.server.store.ServerStoreTest; +import org.junit.Test; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; +import org.terracotta.offheapstore.buffersource.OffHeapBufferSource; +import org.terracotta.offheapstore.exceptions.OversizeMappingException; +import org.terracotta.offheapstore.paging.UnlimitedPageSource; +import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; + +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; +import static org.ehcache.clustered.ChainUtils.chainOf; +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.Is.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.terracotta.offheapstore.util.MemoryUnit.GIGABYTES; +import static org.terracotta.offheapstore.util.MemoryUnit.KILOBYTES; +import static org.terracotta.offheapstore.util.MemoryUnit.MEGABYTES; + +public class OffHeapServerStoreTest extends ServerStoreTest { + + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); + + @SuppressWarnings("unchecked") + private OffHeapChainMap getOffHeapChainMapMock() { + return mock(OffHeapChainMap.class); + } + + @SuppressWarnings("unchecked") + private OffHeapChainMap getOffHeapChainMapLongMock() { + return mock(OffHeapChainMap.class); + } + + @SuppressWarnings("unchecked") + private ChainStorageEngine getChainStorageEngineLongMock() { + return mock(ChainStorageEngine.class); + } + + @Override + public ServerStore newStore() { + return new OffHeapServerStore(new UnlimitedPageSource(new OffHeapBufferSource()), DEFAULT_MAPPER, false); + } + + @Override + public ChainBuilder newChainBuilder() { + return elements -> { + ByteBuffer[] buffers = new ByteBuffer[elements.length]; + for (int i = 0; i < buffers.length; i++) { + buffers[i] = elements[i].getPayload(); + } + return chainOf(buffers); + }; + } + + @Override + public ElementBuilder newElementBuilder() { + return payLoad -> () -> payLoad.asReadOnlyBuffer(); + } + + @Test + public void testGetMaxSize() { + assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(2)), is(64L)); + assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(4)), is(128L)); + assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(16)), is(512L)); + assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(64)), is(2048L)); + assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(128)), is(4096L)); + assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(256)), is(8192L)); + assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(512)), is(8192L)); + + assertThat(OffHeapServerStore.getMaxSize(GIGABYTES.toBytes(2)), is(8192L)); + } + + @Test + public void put_worked_the_first_time_test() throws Exception { + OffHeapChainMap offheapChainMap = getOffHeapChainMapLongMock(); + ChainStorageEngine storageEngine = getChainStorageEngineLongMock(); + when(offheapChainMap.getStorageEngine()).thenReturn(storageEngine); + + doNothing() + .when(offheapChainMap).put(anyLong(), any(Chain.class)); + + OffHeapServerStore offHeapServerStore = new OffHeapServerStore(singletonList(offheapChainMap), mock(KeySegmentMapper.class)); + offHeapServerStore.put(43L, mock(Chain.class)); + } + + + @Test(expected = OversizeMappingException.class) + public void put_should_throw_when_underlying_put_always_throw_test() throws Exception { + OffHeapChainMap offheapChainMap = getOffHeapChainMapLongMock(); + ChainStorageEngine storageEngine = getChainStorageEngineLongMock(); + when(offheapChainMap.getStorageEngine()).thenReturn(storageEngine); + when(offheapChainMap.writeLock()).thenReturn(new ReentrantLock()); + + doThrow(new OversizeMappingException()) + .when(offheapChainMap).put(anyLong(), any(Chain.class)); + + OffHeapServerStore offHeapServerStore = new OffHeapServerStore(singletonList(offheapChainMap), mock(KeySegmentMapper.class)); + offHeapServerStore.put(43L, mock(Chain.class)); + } + + @Test + public void put_should_return_when_underlying_put_does_not_throw_test() throws Exception { + OffHeapChainMap offheapChainMap = getOffHeapChainMapLongMock(); + ChainStorageEngine storageEngine = getChainStorageEngineLongMock(); + when(offheapChainMap.getStorageEngine()).thenReturn(storageEngine); + when(offheapChainMap.writeLock()).thenReturn(new ReentrantLock()); + + // throw once, then ok + doThrow(new OversizeMappingException()) + .doNothing() + .when(offheapChainMap).put(anyLong(), any(Chain.class)); + + OffHeapServerStore offHeapServerStore = new OffHeapServerStore(singletonList(offheapChainMap), mock(KeySegmentMapper.class)); + offHeapServerStore.put(43L, mock(Chain.class)); + } + + @Test + public void put_should_return_when_underlying_put_does_not_throw_with_keymapper_test() throws Exception { + long theKey = 43L; + ChainStorageEngine storageEngine = getChainStorageEngineLongMock(); + OffHeapChainMap offheapChainMap = getOffHeapChainMapLongMock(); + OffHeapChainMap otherOffheapChainMap = getOffHeapChainMapLongMock(); + when(offheapChainMap.shrink()).thenReturn(true); + when(offheapChainMap.getStorageEngine()).thenReturn(storageEngine); + when(offheapChainMap.writeLock()).thenReturn(new ReentrantLock()); + when(otherOffheapChainMap.writeLock()).thenReturn(new ReentrantLock()); + + // throw twice, then OK + doThrow(new OversizeMappingException()) + .doThrow(new OversizeMappingException()) + .doNothing() + .when(otherOffheapChainMap).put(anyLong(), any(Chain.class)); + + KeySegmentMapper keySegmentMapper = mock(KeySegmentMapper.class); + when(keySegmentMapper.getSegmentForKey(theKey)).thenReturn(1); + OffHeapServerStore offHeapServerStore = new OffHeapServerStore(asList(offheapChainMap, otherOffheapChainMap), keySegmentMapper); + offHeapServerStore.put(theKey, mock(Chain.class)); + + //getSegmentForKey was called 4 times : segmentFor, handleOversizeMappingException, segmentFor, segmentFor + verify(keySegmentMapper, times(4)).getSegmentForKey(theKey); + } + + + @Test + public void test_append_doesNotConsumeBuffer_evenWhenOversizeMappingException() throws Exception { + OffHeapServerStore store = (OffHeapServerStore) spy(newStore()); + final OffHeapChainMap offHeapChainMap = getOffHeapChainMapMock(); + doThrow(OversizeMappingException.class).when(offHeapChainMap).append(any(Object.class), any(ByteBuffer.class)); + + when(store.segmentFor(anyLong())).then(new Answer() { + int invocations = 0; + @Override + public Object answer(InvocationOnMock invocation) throws Throwable { + if (invocations++ < 10) { + return offHeapChainMap; + } else { + return invocation.callRealMethod(); + } + } + }); + when(store.tryShrinkOthers(anyLong())).thenReturn(true); + + ByteBuffer payload = createPayload(1L); + + store.append(1L, payload); + assertThat(payload.remaining(), is(8)); + } + + @Test + public void test_getAndAppend_doesNotConsumeBuffer_evenWhenOversizeMappingException() throws Exception { + OffHeapServerStore store = (OffHeapServerStore) spy(newStore()); + final OffHeapChainMap offHeapChainMap = getOffHeapChainMapMock(); + doThrow(OversizeMappingException.class).when(offHeapChainMap).getAndAppend(any(), any(ByteBuffer.class)); + + when(store.segmentFor(anyLong())).then(new Answer() { + int invocations = 0; + @Override + public Object answer(InvocationOnMock invocation) throws Throwable { + if (invocations++ < 10) { + return offHeapChainMap; + } else { + return invocation.callRealMethod(); + } + } + }); + when(store.tryShrinkOthers(anyLong())).thenReturn(true); + + + ByteBuffer payload = createPayload(1L); + + store.getAndAppend(1L, payload); + assertThat(payload.remaining(), is(8)); + + Chain expected = newChainBuilder().build(newElementBuilder().build(payload), newElementBuilder().build(payload)); + Chain update = newChainBuilder().build(newElementBuilder().build(payload)); + store.replaceAtHead(1L, expected, update); + assertThat(payload.remaining(), is(8)); + } + + @Test + public void test_replaceAtHead_doesNotConsumeBuffer_evenWhenOversizeMappingException() throws Exception { + OffHeapServerStore store = (OffHeapServerStore) spy(newStore()); + final OffHeapChainMap offHeapChainMap = getOffHeapChainMapMock(); + doThrow(OversizeMappingException.class).when(offHeapChainMap).replaceAtHead(any(), any(Chain.class), any(Chain.class)); + + when(store.segmentFor(anyLong())).then(new Answer() { + int invocations = 0; + @Override + public Object answer(InvocationOnMock invocation) throws Throwable { + if (invocations++ < 10) { + return offHeapChainMap; + } else { + return invocation.callRealMethod(); + } + } + }); + when(store.tryShrinkOthers(anyLong())).thenReturn(true); + + + ByteBuffer payload = createPayload(1L); + + Chain expected = newChainBuilder().build(newElementBuilder().build(payload), newElementBuilder().build(payload)); + Chain update = newChainBuilder().build(newElementBuilder().build(payload)); + store.replaceAtHead(1L, expected, update); + assertThat(payload.remaining(), is(8)); + } + + @Test + public void testCrossSegmentShrinking() { + long seed = System.nanoTime(); + Random random = new Random(seed); + try { + OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), MEGABYTES.toBytes(1L), MEGABYTES.toBytes(1)), DEFAULT_MAPPER, false); + + ByteBuffer smallValue = ByteBuffer.allocate(1024); + for (int i = 0; i < 10000; i++) { + try { + store.getAndAppend(random.nextInt(500), smallValue.duplicate()); + } catch (OversizeMappingException e) { + //ignore + } + } + + ByteBuffer largeValue = ByteBuffer.allocate(100 * 1024); + for (int i = 0; i < 10000; i++) { + try { + store.getAndAppend(random.nextInt(500), largeValue.duplicate()); + } catch (OversizeMappingException e) { + //ignore + } + } + } catch (Throwable t) { + throw (AssertionError) new AssertionError("Failed with seed " + seed).initCause(t); + } + } + + @Test + public void testServerSideUsageStats() { + + long maxBytes = MEGABYTES.toBytes(1); + OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), maxBytes, MEGABYTES.toBytes(1)), new KeySegmentMapper(16), false); + + int oneKb = 1024; + long smallLoopCount = 5; + ByteBuffer smallValue = ByteBuffer.allocate(oneKb); + for (long i = 0; i < smallLoopCount; i++) { + store.getAndAppend(i, smallValue.duplicate()); + } + + assertThat(store.getAllocatedMemory(),lessThanOrEqualTo(maxBytes)); + assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo(smallLoopCount * oneKb)); + assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo(store.getOccupiedMemory())); + + //asserts above already guarantee that occupiedMemory <= maxBytes and that occupiedMemory <= allocatedMemory + assertThat(store.getOccupiedMemory(),greaterThanOrEqualTo(smallLoopCount * oneKb)); + + assertThat(store.getSize(), is(smallLoopCount)); + + int multiplier = 100; + long largeLoopCount = 5 + smallLoopCount; + ByteBuffer largeValue = ByteBuffer.allocate(multiplier * oneKb); + for (long i = smallLoopCount; i < largeLoopCount; i++) { + store.getAndAppend(i, largeValue.duplicate()); + } + + assertThat(store.getAllocatedMemory(),lessThanOrEqualTo(maxBytes)); + assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo( (smallLoopCount * oneKb) + ( (largeLoopCount - smallLoopCount) * oneKb * multiplier) )); + assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo(store.getOccupiedMemory())); + + //asserts above already guarantee that occupiedMemory <= maxBytes and that occupiedMemory <= allocatedMemory + assertThat(store.getOccupiedMemory(),greaterThanOrEqualTo(smallLoopCount * oneKb)); + + assertThat(store.getSize(), is(smallLoopCount + (largeLoopCount - smallLoopCount))); + + } + + @Test + public void testEvictionFiresEventsWithChainWhenEvictionIsEnabled() { + OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), (long) MEGABYTES.toBytes(1), MEGABYTES.toBytes(1)), new KeySegmentMapper(16), false); + AuditingServerStoreEventListener audit = new AuditingServerStoreEventListener(); + store.setEventListener(audit); + store.enableEvents(true); + + ByteBuffer buffer = ByteBuffer.allocate(KILOBYTES.toBytes(500)); + + store.append(1L, buffer.duplicate()); + store.append(2L, buffer.duplicate()); + store.append(3L, buffer.duplicate()); + + assertThat(store.getSize(), is(1L)); + assertThat(audit.onEviction.size(), is(2)); + assertThat(audit.onEviction.get(0).key, is(1L)); + assertThat(audit.onEviction.get(0).evictedChain, is(notNullValue())); + assertThat(audit.onEviction.get(1).key, is(2L)); + assertThat(audit.onEviction.get(1).evictedChain, is(notNullValue())); + } + + @Test + public void testNoEventFiredWhenDisabled() { + OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), (long) MEGABYTES.toBytes(1), MEGABYTES.toBytes(1)), new KeySegmentMapper(16), false); + AuditingServerStoreEventListener audit = new AuditingServerStoreEventListener(); + store.setEventListener(audit); + + store.append(1L, toBuffer(1)); + store.getAndAppend(1L, toBuffer(2)); + + store.enableEvents(true); + store.append(1L, toBuffer(3)); + store.getAndAppend(1L, toBuffer(4)); + + store.enableEvents(false); + store.append(1L, toBuffer(5)); + store.getAndAppend(1L, toBuffer(6)); + + assertThat(audit.onAppend.size(), is(2)); + assertThat(audit.onAppend.get(0).appended.asIntBuffer().get(), is(3)); + assertThat(audit.onAppend.get(1).appended.asIntBuffer().get(), is(4)); + } + + @Test + public void testAppendFiresEvents() { + OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), (long) MEGABYTES.toBytes(1), MEGABYTES.toBytes(1)), new KeySegmentMapper(16), false); + AuditingServerStoreEventListener audit = new AuditingServerStoreEventListener(); + store.setEventListener(audit); + store.enableEvents(true); + + store.append(1L, toBuffer(1)); + store.append(1L, toBuffer(2)); + store.append(1L, toBuffer(3)); + + assertThat(audit.onAppend.size(), is(3)); + assertThat(audit.onAppend.get(0).appended.asIntBuffer().get(), is(1)); + assertThat(audit.onAppend.get(1).appended.asIntBuffer().get(), is(2)); + assertThat(audit.onAppend.get(2).appended.asIntBuffer().get(), is(3)); + } + + @Test + public void testGetAndAppendFiresEvents() { + OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), (long) MEGABYTES.toBytes(1), MEGABYTES.toBytes(1)), new KeySegmentMapper(16), false); + AuditingServerStoreEventListener audit = new AuditingServerStoreEventListener(); + store.setEventListener(audit); + store.enableEvents(true); + + store.getAndAppend(1L, toBuffer(1)); + store.getAndAppend(1L, toBuffer(2)); + store.getAndAppend(1L, toBuffer(3)); + + assertThat(audit.onAppend.size(), is(3)); + assertThat(audit.onAppend.get(0).appended.asIntBuffer().get(), is(1)); + assertThat(audit.onAppend.get(1).appended.asIntBuffer().get(), is(2)); + assertThat(audit.onAppend.get(2).appended.asIntBuffer().get(), is(3)); + } + + private static class AuditingServerStoreEventListener implements ServerStoreEventListener { + private final List onAppend = new ArrayList<>(); + private final List onEviction = new ArrayList<>(); + @Override + public void onEviction(long key, InternalChain evictedChain) { + onEviction.add(new OnEvictionArgs(key, evictedChain)); + } + @Override + public void onAppend(Chain beforeAppend, ByteBuffer appended) { + onAppend.add(new OnAppendArgs(appended, beforeAppend)); + } + + static class OnEvictionArgs { + OnEvictionArgs(long key, InternalChain evictedChain) { + this.key = key; + this.evictedChain = evictedChain; + } + long key; + InternalChain evictedChain; + } + + static class OnAppendArgs { + OnAppendArgs(ByteBuffer appended, Chain beforeAppend) { + this.appended = appended; + this.beforeAppend = beforeAppend; + } + ByteBuffer appended; + Chain beforeAppend; + } + } + + private static ByteBuffer toBuffer(int i) { + ByteBuffer buffer = ByteBuffer.allocate(Integer.BYTES); + buffer.asIntBuffer().put(i); + return buffer; + } +} diff --git a/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/offheap/PinningOffHeapChainMapTest.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/offheap/PinningOffHeapChainMapTest.java new file mode 100644 index 0000000000..39b699acf3 --- /dev/null +++ b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/offheap/PinningOffHeapChainMapTest.java @@ -0,0 +1,139 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.offheap; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.operations.OperationCode; +import org.junit.Test; +import org.terracotta.offheapstore.buffersource.OffHeapBufferSource; +import org.terracotta.offheapstore.paging.UnlimitedPageSource; + +import java.nio.ByteBuffer; + +import static org.ehcache.clustered.ChainUtils.chainOf; +import static org.ehcache.clustered.common.internal.store.operations.OperationCode.PUT; +import static org.ehcache.clustered.common.internal.store.operations.OperationCode.PUT_IF_ABSENT; +import static org.ehcache.clustered.common.internal.store.operations.OperationCode.PUT_WITH_WRITER; +import static org.ehcache.clustered.common.internal.store.operations.OperationCode.REMOVE; +import static org.ehcache.clustered.common.internal.store.operations.OperationCode.REMOVE_CONDITIONAL; +import static org.ehcache.clustered.common.internal.store.operations.OperationCode.REPLACE; +import static org.ehcache.clustered.common.internal.store.operations.OperationCode.REPLACE_CONDITIONAL; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +public class PinningOffHeapChainMapTest { + @Test + public void testAppendWithPinningOperation() { + PinningOffHeapChainMap pinningOffHeapChainMap = getPinningOffHeapChainMap(); + + pinningOffHeapChainMap.append(1L, buffer(PUT_WITH_WRITER)); + assertThat(pinningOffHeapChainMap.heads.isPinned(1L), is(true)); + } + + @Test + public void testAppendWithNormalOperation() { + PinningOffHeapChainMap pinningOffHeapChainMap = getPinningOffHeapChainMap(); + + pinningOffHeapChainMap.append(1L, buffer(PUT)); + assertThat(pinningOffHeapChainMap.heads.isPinned(1L), is(false)); + } + + @Test + public void testGetAndAppendWithPinningOperation() { + PinningOffHeapChainMap pinningOffHeapChainMap = getPinningOffHeapChainMap(); + + pinningOffHeapChainMap.getAndAppend(1L, buffer(REMOVE_CONDITIONAL)); + assertThat(pinningOffHeapChainMap.heads.isPinned(1L), is(true)); + } + + @Test + public void testGetAndAppendWithNormalOperation() { + PinningOffHeapChainMap pinningOffHeapChainMap = getPinningOffHeapChainMap(); + + pinningOffHeapChainMap.getAndAppend(1L, buffer(PUT)); + assertThat(pinningOffHeapChainMap.heads.isPinned(1L), is(false)); + } + + @Test + public void testPutWithPinningChain() { + PinningOffHeapChainMap pinningOffHeapChainMap = getPinningOffHeapChainMap(); + + pinningOffHeapChainMap.put(1L, chainOf(buffer(PUT), buffer(REMOVE))); + assertThat(pinningOffHeapChainMap.heads.isPinned(1L), is(true)); + } + + @Test + public void testPutWithNormalChain() { + PinningOffHeapChainMap pinningOffHeapChainMap = getPinningOffHeapChainMap(); + + pinningOffHeapChainMap.put(1L, chainOf(buffer(PUT), buffer(PUT))); + assertThat(pinningOffHeapChainMap.heads.isPinned(1L), is(false)); + } + + @Test + public void testReplaceAtHeadWithUnpinningChain() { + PinningOffHeapChainMap pinningOffHeapChainMap = getPinningOffHeapChainMap(); + + ByteBuffer buffer = buffer(PUT_IF_ABSENT); + Chain pinningChain = chainOf(buffer); + Chain unpinningChain = chainOf(buffer(PUT)); + + pinningOffHeapChainMap.append(1L, buffer); + assertThat(pinningOffHeapChainMap.heads.isPinned(1L), is(true)); + + pinningOffHeapChainMap.replaceAtHead(1L, pinningChain, unpinningChain); + assertThat(pinningOffHeapChainMap.heads.isPinned(1L), is(false)); + } + + @Test + public void testReplaceAtHeadWithPinningChain() { + PinningOffHeapChainMap pinningOffHeapChainMap = getPinningOffHeapChainMap(); + + ByteBuffer buffer = buffer(REPLACE); + Chain pinningChain = chainOf(buffer); + Chain unpinningChain = chainOf(buffer(REPLACE_CONDITIONAL)); + + pinningOffHeapChainMap.append(1L, buffer); + assertThat(pinningOffHeapChainMap.heads.isPinned(1L), is(true)); + + pinningOffHeapChainMap.replaceAtHead(1L, pinningChain, unpinningChain); + assertThat(pinningOffHeapChainMap.heads.isPinned(1L), is(true)); + } + + @Test + public void testReplaceAtHeadWithEmptyChain() { + PinningOffHeapChainMap pinningOffHeapChainMap = getPinningOffHeapChainMap(); + + ByteBuffer buffer = buffer(PUT_WITH_WRITER); + Chain pinningChain = chainOf(buffer); + Chain unpinningChain = chainOf(); + + pinningOffHeapChainMap.append(1L, buffer); + assertThat(pinningOffHeapChainMap.heads.isPinned(1L), is(true)); + + pinningOffHeapChainMap.replaceAtHead(1L, pinningChain, unpinningChain); + assertThat(pinningOffHeapChainMap.heads.isPinned(1L), is(false)); + } + + private ByteBuffer buffer(OperationCode first) { + return ByteBuffer.wrap(new byte[] { first.getValue() }); + } + + private PinningOffHeapChainMap getPinningOffHeapChainMap() { + return new PinningOffHeapChainMap<>(new UnlimitedPageSource(new OffHeapBufferSource()), LongPortability.INSTANCE, + 4096, 4096, false); + } +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java similarity index 92% rename from clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java rename to clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java index 311b6d8319..fc1e2881ad 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java +++ b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java @@ -36,14 +36,15 @@ import java.util.Collections; import static java.util.Collections.emptyMap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.sameInstance; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class EhcacheStateServiceProviderTest { @@ -63,21 +64,8 @@ public void setUp() { configuration.getResource().add(resource); OffHeapResources offheapResources = new OffHeapResourcesProvider(configuration); - platformConfiguration = new PlatformConfiguration() { - @Override - public String getServerName() { - return "Server1"; - } - - @Override - public Collection getExtendedConfiguration(Class type) { - if (OffHeapResources.class.isAssignableFrom(type)) { - return Collections.singletonList(type.cast(offheapResources)); - } - throw new UnsupportedOperationException("TODO Implement me!"); - } - }; - + platformConfiguration = mock(PlatformConfiguration.class); + when(platformConfiguration.getExtendedConfiguration(OffHeapResources.class)).thenReturn(Collections.singletonList(offheapResources)); serviceProviderConfiguration = mock(ServiceProviderConfiguration.class); tierManagerConfiguration = new ClusterTierManagerConfiguration("identifier", new ServerSideConfiguration(emptyMap())); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/InvalidationTrackerImplTest.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/state/InvalidationTrackerImplTest.java similarity index 100% rename from clustered/server/src/test/java/org/ehcache/clustered/server/state/InvalidationTrackerImplTest.java rename to clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/state/InvalidationTrackerImplTest.java diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/store/ChainBuilder.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/ChainBuilder.java similarity index 100% rename from clustered/server/src/test/java/org/ehcache/clustered/server/store/ChainBuilder.java rename to clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/ChainBuilder.java diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/store/ElementBuilder.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/ElementBuilder.java similarity index 100% rename from clustered/server/src/test/java/org/ehcache/clustered/server/store/ElementBuilder.java rename to clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/ElementBuilder.java diff --git a/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/ServerStoreTest.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/ServerStoreTest.java new file mode 100644 index 0000000000..7450450670 --- /dev/null +++ b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/ServerStoreTest.java @@ -0,0 +1,280 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.store; + + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.common.internal.store.ServerStore; +import org.hamcrest.core.Is; +import org.junit.Test; + +import java.nio.ByteBuffer; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Set; +import java.util.concurrent.TimeoutException; + +import static org.ehcache.clustered.ChainUtils.createPayload; +import static org.ehcache.clustered.ChainUtils.readPayload; +import static org.ehcache.clustered.Matchers.hasPayloads; +import static java.util.stream.LongStream.range; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.IsCollectionContaining.hasItem; +import static org.hamcrest.core.IsCollectionContaining.hasItems; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.fail; + +/** + * Verify Server Store + */ +public abstract class ServerStoreTest { + + public abstract ServerStore newStore(); + + public abstract ChainBuilder newChainBuilder(); + + public abstract ElementBuilder newElementBuilder(); + + private final ChainBuilder chainBuilder = newChainBuilder(); + private final ElementBuilder elementBuilder = newElementBuilder(); + + private static void populateStore(ServerStore store) throws Exception { + for(int i = 1 ; i <= 16; i++) { + store.append(i, createPayload(i)); + } + } + + @Test + public void testGetNoMappingExists() throws Exception { + ServerStore store = newStore(); + Chain chain = store.get(1); + assertThat(chain.isEmpty(), is(true)); + assertThat(chain.iterator().hasNext(), is(false)); + } + + @Test + public void testGetMappingExists() throws Exception { + ServerStore store = newStore(); + populateStore(store); + Chain chain = store.get(1L); + assertThat(chain.isEmpty(), is(false)); + assertThat(chain, hasPayloads(1L)); + } + + @Test + public void testAppendNoMappingExists() throws Exception { + ServerStore store = newStore(); + store.append(1L, createPayload(1L)); + Chain chain = store.get(1L); + assertThat(chain.isEmpty(), is(false)); + assertThat(chain, hasPayloads(1L)); + } + + @Test + public void testAppendMappingExists() throws Exception { + ServerStore store = newStore(); + populateStore(store); + store.append(2L, createPayload(22L)); + Chain chain = store.get(2L); + assertThat(chain.isEmpty(), is(false)); + assertThat(chain, hasPayloads(2L, 22L)); + } + + @Test + public void testGetAndAppendNoMappingExists() throws Exception { + ServerStore store = newStore(); + Chain chain = store.getAndAppend(1, createPayload(1)); + assertThat(chain.isEmpty(), is(true)); + chain = store.get(1); + assertThat(chain, hasPayloads(1L)); + } + + @Test + public void testGetAndAppendMappingExists() throws Exception { + ServerStore store = newStore(); + populateStore(store); + Chain chain = store.getAndAppend(1, createPayload(22)); + for (Element element : chain) { + assertThat(readPayload(element.getPayload()), is(Long.valueOf(1))); + } + chain = store.get(1); + assertThat(chain, hasPayloads(1, 22)); + } + + @Test + public void testReplaceAtHeadSucceedsMappingExistsHeadMatchesStrictly() throws Exception { + ServerStore store = newStore(); + populateStore(store); + Chain existingMapping = store.get(1); + + store.replaceAtHead(1, existingMapping, chainBuilder.build(elementBuilder.build(createPayload(11)))); + Chain chain = store.get(1); + assertThat(chain, hasPayloads(11)); + + store.append(2, createPayload(22)); + store.append(2, createPayload(222)); + + existingMapping = store.get(2); + + store.replaceAtHead(2, existingMapping, chainBuilder.build(elementBuilder.build(createPayload(2222)))); + + chain = store.get(2); + + assertThat(chain, hasPayloads(2222)); + } + + @Test + public void testReplaceAtHeadSucceedsMappingExistsHeadMatches() throws Exception { + ServerStore store = newStore(); + populateStore(store); + + Chain existingMapping = store.get(1); + + store.append(1, createPayload(11)); + + store.replaceAtHead(1, existingMapping, chainBuilder.build(elementBuilder.build(createPayload(111)))); + Chain chain = store.get(1); + + assertThat(chain, hasPayloads(111, 11)); + + store.append(2, createPayload(22)); + existingMapping = store.get(2); + + store.append(2, createPayload(222)); + + store.replaceAtHead(2, existingMapping, chainBuilder.build(elementBuilder.build(createPayload(2222)))); + + chain = store.get(2); + assertThat(chain, hasPayloads(2222, 222)); + } + + @Test + public void testReplaceAtHeadIgnoredMappingExistsHeadMisMatch() throws Exception { + ServerStore store = newStore(); + populateStore(store); + + store.append(1, createPayload(11)); + store.append(1, createPayload(111)); + + Chain mappingReadFirst = store.get(1); + store.replaceAtHead(1, mappingReadFirst, chainBuilder.build(elementBuilder.build(createPayload(111)))); + + Chain current = store.get(1); + assertThat(current, hasPayloads(111)); + + store.append(1, createPayload(1111)); + store.replaceAtHead(1, mappingReadFirst, chainBuilder.build(elementBuilder.build(createPayload(11111)))); + + Chain toVerify = store.get(1); + + assertThat(toVerify, hasPayloads(111, 1111)); + } + + @Test + public void test_append_doesNotConsumeBuffer() throws Exception { + ServerStore store = newStore(); + ByteBuffer payload = createPayload(1L); + + store.append(1L, payload); + assertThat(payload.remaining(), Is.is(8)); + } + + @Test + public void test_getAndAppend_doesNotConsumeBuffer() throws Exception { + ServerStore store = newStore(); + ByteBuffer payload = createPayload(1L); + + store.getAndAppend(1L, payload); + assertThat(payload.remaining(), Is.is(8)); + } + + @Test + public void test_replaceAtHead_doesNotConsumeBuffer() { + ServerStore store = newStore(); + ByteBuffer payload = createPayload(1L); + + Chain expected = newChainBuilder().build(newElementBuilder().build(payload), newElementBuilder().build(payload)); + Chain update = newChainBuilder().build(newElementBuilder().build(payload)); + store.replaceAtHead(1L, expected, update); + assertThat(payload.remaining(), Is.is(8)); + } + + @Test + public void testEmptyIterator() throws TimeoutException { + ServerStore store = newStore(); + + Iterator> chainIterator = store.iterator(); + + assertThat(chainIterator.hasNext(), Is.is(false)); + try { + chainIterator.next(); + fail("Expected NoSuchElementException"); + } catch (NoSuchElementException e) { + //expected + } + } + + @Test + public void testSingleElementIterator() throws TimeoutException { + ServerStore store = newStore(); + + store.append(1L, createPayload(42L)); + Iterator> chainIterator = store.iterator(); + + assertThat(chainIterator.hasNext(), is(true)); + Map.Entry next = chainIterator.next(); + assertThat(next.getKey(), is(1L)); + assertThat(next.getValue(), hasPayloads(42L)); + assertThat(chainIterator.hasNext(), is(false)); + try { + chainIterator.next(); + fail("Expected NoSuchElementException"); + } catch (NoSuchElementException e) { + //expected + } + } + + @Test + public void testHeavilyPopulatedIterator() throws TimeoutException { + ServerStore store = newStore(); + + range(0, 100).forEach(k -> { + try { + store.append(k, createPayload(k)); + } catch (TimeoutException e) { + throw new AssertionError(); + } + }); + + Iterator> chainIterator = store.iterator(); + + Set longs = new HashSet<>(); + while (chainIterator.hasNext()) { + Map.Entry chain = chainIterator.next(); + for (Element e: chain.getValue()) { + long l = readPayload(e.getPayload()); + assertThat(longs, not(hasItem(l))); + longs.add(l); + } + } + + assertThat(longs, hasItems(range(0, 100).boxed().toArray(Long[]::new))); + } +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/HeapChainBuilder.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/HeapChainBuilder.java similarity index 100% rename from clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/HeapChainBuilder.java rename to clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/HeapChainBuilder.java diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/HeapChainImpl.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/HeapChainImpl.java similarity index 82% rename from clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/HeapChainImpl.java rename to clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/HeapChainImpl.java index 7d44ba5a36..cf4dd0a71a 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/HeapChainImpl.java +++ b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/HeapChainImpl.java @@ -38,11 +38,6 @@ public HeapChainImpl(Element... elements) { createChain(elements); } - @Override - public Iterator reverseIterator() { - return new ReverseChainIterator(); - } - @Override public boolean isEmpty() { return first == null; @@ -135,37 +130,6 @@ public Element next() { this.current = this.current.nextLink; return temp.element; } - - @Override - public void remove() { - throw new UnsupportedOperationException("Remove operation is not supported"); - } - } - - private class ReverseChainIterator implements Iterator { - - private Node current; - - ReverseChainIterator() { - this.current = last; - } - - @Override - public boolean hasNext() { - return current != null; - } - - @Override - public Element next() { - Node temp = this.current; - this.current = this.current.prevLink; - return temp.element; - } - - @Override - public void remove() { - throw new UnsupportedOperationException("Remove operation is not supported"); - } } private static class Node { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/HeapElementBuilder.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/HeapElementBuilder.java similarity index 100% rename from clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/HeapElementBuilder.java rename to clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/HeapElementBuilder.java diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/HeapElementImpl.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/HeapElementImpl.java similarity index 96% rename from clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/HeapElementImpl.java rename to clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/HeapElementImpl.java index c4fef8a0cc..db1d88ce18 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/HeapElementImpl.java +++ b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/HeapElementImpl.java @@ -38,6 +38,6 @@ public long getSequenceNumber() { @Override public ByteBuffer getPayload() { - return this.data.duplicate(); + return this.data.asReadOnlyBuffer(); } } diff --git a/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/ReferenceStoreImpl.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/ReferenceStoreImpl.java new file mode 100644 index 0000000000..8a63f587bd --- /dev/null +++ b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/ReferenceStoreImpl.java @@ -0,0 +1,105 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.store.impl; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.common.internal.store.ServerStore; + +import java.nio.ByteBuffer; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicLong; + +/** + * Implements {@link ServerStore} + */ +public class ReferenceStoreImpl implements ServerStore { + + private final ConcurrentMap map = new ConcurrentHashMap<>(); + private final AtomicLong sequenceGenerator = new AtomicLong(); + + @Override + public Chain get(long key) { + return map.getOrDefault(key, new HeapChainImpl()); + } + + @Override + public void append(long key, ByteBuffer payLoad) { + getAndAppend(key, payLoad); + } + + @Override + public Chain getAndAppend(long key, ByteBuffer payLoad) { + while (true) { + Chain existing = map.get(key); + if (existing == null) { + if (map.putIfAbsent(key, new HeapChainImpl(new HeapElementImpl(sequenceGenerator.incrementAndGet(), payLoad))) == null) { + return new HeapChainImpl(); + } + } else { + if (map.replace(key, existing, cast(existing).append(new HeapElementImpl(sequenceGenerator.incrementAndGet(), payLoad)))) { + return existing; + } + } + } + } + + @Override + public void replaceAtHead(long key, Chain expect, Chain update) { + map.computeIfPresent(key, (k, existing) -> { + Iterator current = existing.iterator(); + Iterator expected = expect.iterator(); + while (expected.hasNext()) { + if (current.hasNext()) { + HeapElementImpl expectedLink = (HeapElementImpl)expected.next(); + if (expectedLink.getSequenceNumber() != ((HeapElementImpl)current.next()).getSequenceNumber()) { + return existing; + } + } else { + return existing; + } + } + + List elements = new LinkedList<>(); + for (Element element : update) { + elements.add(element); + } + while(current.hasNext()) { + elements.add(current.next()); + } + return new HeapChainImpl(elements.toArray(new Element[elements.size()])); + }); + } + + @Override + public void clear() { + map.clear(); + } + + @Override + public Iterator> iterator() { + return map.entrySet().iterator(); + } + + private HeapChainImpl cast(Chain chain) { + return (HeapChainImpl)chain; + } +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/ReferenceStoreTest.java b/clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/ReferenceStoreTest.java similarity index 100% rename from clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/ReferenceStoreTest.java rename to clustered/server/ehcache-service/src/test/java/org/ehcache/clustered/server/store/impl/ReferenceStoreTest.java diff --git a/clustered/server/gradle.properties b/clustered/server/gradle.properties deleted file mode 100644 index 37b82b87c9..0000000000 --- a/clustered/server/gradle.properties +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -subPomName = Ehcache 3 Server Side Clustering module -subPomDesc = The Server Side Clustering module of Ehcache 3 diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreEvictionListener.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreEvictionListener.java deleted file mode 100644 index dd1f162663..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreEvictionListener.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server; - -import com.tc.classloader.CommonComponent; - -/** - * ServerStore eviction listener interface - */ -@CommonComponent -public interface ServerStoreEvictionListener { - /** - * Called when the ServerStore evicts a mapping - * @param key the key of the evicted mapping - */ - void onEviction(long key); -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java deleted file mode 100644 index 724bd30289..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.server; - -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.server.offheap.OffHeapServerStore; -import org.ehcache.clustered.server.state.ResourcePageSource; -import org.terracotta.offheapstore.paging.PageSource; - -import com.tc.classloader.CommonComponent; - -import java.nio.ByteBuffer; -import java.util.AbstractList; -import java.util.List; -import java.util.Set; - -@CommonComponent -public class ServerStoreImpl implements ServerSideServerStore { - - private final ServerStoreConfiguration storeConfiguration; - private final ResourcePageSource pageSource; - private final OffHeapServerStore store; - - public ServerStoreImpl(ServerStoreConfiguration storeConfiguration, ResourcePageSource pageSource, KeySegmentMapper mapper) { - this.storeConfiguration = storeConfiguration; - this.pageSource = pageSource; - this.store = new OffHeapServerStore(pageSource, mapper); - } - - public void setEvictionListener(ServerStoreEvictionListener listener) { - store.setEvictionListener(listener); - } - - /** - * Gets the {@link PageSource} providing storage for this {@code ServerStore}. - * - * @return the {@code PageSource} used by this {@code ServerStore} - */ - public PageSource getPageSource() { - return pageSource; - } - - public ServerStoreConfiguration getStoreConfiguration() { - return storeConfiguration; - } - - @Override - public Chain get(long key) { - return store.get(key); - } - - @Override - public void append(long key, ByteBuffer payLoad) { - store.append(key, payLoad); - } - - @Override - public Chain getAndAppend(long key, ByteBuffer payLoad) { - return store.getAndAppend(key, payLoad); - } - - @Override - public void replaceAtHead(long key, Chain expect, Chain update) { - store.replaceAtHead(key, expect, update); - } - - public void put(long key, Chain chain) { - store.put(key, chain); - } - - @Override - public void clear() { - store.clear(); - } - - public void close() { - store.close(); - } - - @Override - public List> getSegmentKeySets() { - - return new AbstractList>() { - @Override - public Set get(int index) { - return store.getSegments().get(index).keySet(); - } - @Override - public int size() { - return store.getSegments().size(); - } - }; - } - - // stats - - - @Override - public long getSize() { - return store.getSize(); - } - - @Override - public long getTableCapacity() { - return store.getTableCapacity(); - } - - @Override - public long getUsedSlotCount() { - return store.getUsedSlotCount(); - } - - @Override - public long getRemovedSlotCount() { - return store.getRemovedSlotCount(); - } - - @Override - public long getAllocatedMemory() { - return store.getAllocatedMemory(); - } - - @Override - public long getOccupiedMemory() { - return store.getOccupiedMemory(); - } - - @Override - public long getVitalMemory() { - return store.getVitalMemory(); - } - - @Override - public long getDataAllocatedMemory() { - return store.getDataAllocatedMemory(); - } - - @Override - public long getDataOccupiedMemory() { - return store.getDataOccupiedMemory(); - } - - @Override - public long getDataVitalMemory() { - return store.getDataVitalMemory(); - } - - @Override - public long getDataSize() { - return store.getDataSize(); - } - - @Override - public int getReprobeLength() { - //TODO - //MapInternals Interface may need to change to implement this function correctly. - //Currently MapInternals Interface contains function: int getReprobeLength(); - //however OffHeapServerStore.reprobeLength() returns a long - //Thus there could be data loss - - throw new UnsupportedOperationException("Not supported yet."); - } -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerMessage.java deleted file mode 100644 index c515efcca4..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheMessageTrackerMessage.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.server.internal.messages; - -import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; -import org.terracotta.client.message.tracker.OOOMessageHandler; -import org.terracotta.entity.ClientSourceId; - -import java.util.Map; - -import static java.util.stream.Collectors.toMap; - -/** - * Message sending messages that are tracked for duplication. If a passive becoming active receives - * a duplicate, it needs to discard it. - */ -public class EhcacheMessageTrackerMessage extends EhcacheSyncMessage { - - private final int segmentId; - private final Map> trackedMessages; - - public EhcacheMessageTrackerMessage(int segmentId, Map> trackedMessages) { - this.segmentId = segmentId; - this.trackedMessages = trackedMessages; - } - - public EhcacheMessageTrackerMessage(int segmentId, OOOMessageHandler messageHandler) { - this(segmentId, messageHandler.getTrackedClients() - .collect(toMap(ClientSourceId::toLong, clientSourceId -> messageHandler.getTrackedResponsesForSegment(segmentId, clientSourceId)))); - } - - @Override - public SyncMessageType getMessageType() { - return SyncMessageType.MESSAGE_TRACKER; - } - - public Map> getTrackedMessages() { - return trackedMessages; - } - - public int getSegmentId() { - return segmentId; - } -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java deleted file mode 100644 index 743089b0b9..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.server.internal.messages; - -import org.ehcache.clustered.common.internal.messages.EhcacheCodec; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; -import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.terracotta.entity.MessageCodec; -import org.terracotta.entity.MessageCodecException; -import org.terracotta.runnel.decoding.Enm; - -import java.nio.ByteBuffer; - -import static java.nio.ByteBuffer.wrap; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveReplicationMessage; - -/** - * EhcacheServerCodec - */ -public class EhcacheServerCodec implements MessageCodec { - - private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheServerCodec.class); - - private final EhcacheCodec clientCodec; - private final PassiveReplicationMessageCodec replicationCodec; - - public EhcacheServerCodec(EhcacheCodec clientCodec, PassiveReplicationMessageCodec replicationCodec) { - this.clientCodec = clientCodec; - this.replicationCodec = replicationCodec; - } - - @Override - public byte[] encodeMessage(EhcacheEntityMessage message) throws MessageCodecException { - if (message instanceof PassiveReplicationMessage) { - return replicationCodec.encode((PassiveReplicationMessage) message); - } - return clientCodec.encodeMessage(message); - } - - @Override - public EhcacheEntityMessage decodeMessage(byte[] payload) throws MessageCodecException { - ByteBuffer byteBuffer = wrap(payload); - Enm opCodeEnm = EhcacheCodec.OP_CODE_DECODER.decoder(byteBuffer).enm("opCode"); - if (!opCodeEnm.isFound()) { - throw new AssertionError("Got a message without an opCode"); - } - if (!opCodeEnm.isValid()) { - LOGGER.warn("Received message with unknown operation code - more recent version at the other end?"); - return null; - } - - byteBuffer.rewind(); - - EhcacheMessageType messageType = opCodeEnm.get(); - if (isPassiveReplicationMessage(messageType)) { - return replicationCodec.decode(messageType, byteBuffer); - } - return clientCodec.decodeMessage(byteBuffer, messageType); - } - - @Override - public byte[] encodeResponse(EhcacheEntityResponse response) throws MessageCodecException { - return clientCodec.encodeResponse(response); - } - - @Override - public EhcacheEntityResponse decodeResponse(byte[] payload) throws MessageCodecException { - return clientCodec.decodeResponse(payload); - } -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java deleted file mode 100644 index c6a60d6155..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.server.internal.messages; - -import org.terracotta.runnel.EnumMapping; - -import com.tc.classloader.CommonComponent; - -import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; - -/** - * SyncMessageType - */ -@CommonComponent -public enum SyncMessageType { - STATE_REPO, - DATA, - MESSAGE_TRACKER; - - public static final String SYNC_MESSAGE_TYPE_FIELD_NAME = "msgType"; - public static final int SYNC_MESSAGE_TYPE_FIELD_INDEX = 10; - public static final EnumMapping SYNC_MESSAGE_TYPE_MAPPING = newEnumMappingBuilder(SyncMessageType.class) - .mapping(STATE_REPO, 1) - .mapping(DATA, 10) - .mapping(MESSAGE_TRACKER, 20) - .build(); -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagement.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagement.java deleted file mode 100644 index b7f7dc945a..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClusterTierManagement.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.server.management; - -import org.ehcache.clustered.common.ServerSideConfiguration; -import org.ehcache.clustered.server.ServerSideServerStore; -import org.ehcache.clustered.server.state.EhcacheStateService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.terracotta.entity.ConfigurationException; -import org.terracotta.entity.ServiceException; -import org.terracotta.entity.ServiceRegistry; -import org.terracotta.management.service.monitoring.EntityManagementRegistry; -import org.terracotta.management.service.monitoring.EntityManagementRegistryConfiguration; - -import java.io.Closeable; -import java.util.concurrent.CompletableFuture; - -import static org.ehcache.clustered.server.management.Notification.EHCACHE_SERVER_STORE_CREATED; - -public class ClusterTierManagement implements Closeable { - - private static final Logger LOGGER = LoggerFactory.getLogger(ClusterTierManagement.class); - - private final EntityManagementRegistry managementRegistry; - private final EhcacheStateService ehcacheStateService; - private final String storeIdentifier; - - public ClusterTierManagement(ServiceRegistry services, EhcacheStateService ehcacheStateService, boolean active, String storeIdentifier, String clusterTierManagerIdentifier) throws ConfigurationException { - this.ehcacheStateService = ehcacheStateService; - this.storeIdentifier = storeIdentifier; - - // create an entity monitoring service that allows this entity to push some management information into voltron monitoring service - try { - managementRegistry = services.getService(new EntityManagementRegistryConfiguration(services, active)); - } catch (ServiceException e) { - throw new ConfigurationException("Unable to retrieve service: " + e.getMessage()); - } - - if (managementRegistry != null) { - // expose settings about server stores - managementRegistry.addManagementProvider(new ServerStoreSettingsManagementProvider(clusterTierManagerIdentifier)); - // expose settings about pools - managementRegistry.addManagementProvider(new PoolSettingsManagementProvider()); - - // expose stats about server stores - managementRegistry.addManagementProvider(new ServerStoreStatisticsManagementProvider()); - // expose stats about pools - managementRegistry.addManagementProvider(new PoolStatisticsManagementProvider(ehcacheStateService)); - } - } - - @Override - public void close() { - if (managementRegistry != null) { - managementRegistry.close(); - } - } - - public void reload() { - if (managementRegistry != null) { - managementRegistry.entityPromotionCompleted(); - init(); - } - } - - // the goal of the following code is to send the management metadata from the entity into the monitoring tree AFTER the entity creation - public void init() { - if (managementRegistry != null) { - LOGGER.trace("init({})", storeIdentifier); - ServerSideServerStore serverStore = ehcacheStateService.getStore(storeIdentifier); - ServerStoreBinding serverStoreBinding = new ServerStoreBinding(storeIdentifier, serverStore); - CompletableFuture r1 = managementRegistry.register(serverStoreBinding); - ServerSideConfiguration.Pool pool = ehcacheStateService.getDedicatedResourcePool(storeIdentifier); - CompletableFuture allOf; - if (pool != null) { - allOf = CompletableFuture.allOf(r1, managementRegistry.register(new PoolBinding(storeIdentifier, pool, PoolBinding.AllocationType.DEDICATED))); - } else { - allOf = r1; - } - allOf.thenRun(() -> { - managementRegistry.refresh(); - managementRegistry.pushServerEntityNotification(serverStoreBinding, EHCACHE_SERVER_STORE_CREATED.name()); - }); - } - } - -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java deleted file mode 100644 index 88b953730c..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.management; - -import org.ehcache.clustered.common.PoolAllocation; -import org.terracotta.management.model.capabilities.descriptors.Descriptor; -import org.terracotta.management.model.capabilities.descriptors.Settings; -import org.terracotta.management.model.context.Context; -import org.terracotta.management.registry.Named; -import org.terracotta.management.registry.RequiredContext; -import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; - -@Named("ServerStoreSettings") -@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) -class ServerStoreSettingsManagementProvider extends AliasBindingManagementProvider { - - private final String clusterTierManagerIdentifier; - - ServerStoreSettingsManagementProvider(String clusterTierManagerIdentifier) { - super(ServerStoreBinding.class); - this.clusterTierManagerIdentifier = clusterTierManagerIdentifier; - } - - @Override - public Collection getDescriptors() { - Collection descriptors = new ArrayList<>(super.getDescriptors()); - descriptors.add(new Settings() - .set("type", getCapabilityName()) - .set("clusterTierManager", clusterTierManagerIdentifier) - .set("time", System.currentTimeMillis())); - return descriptors; - } - - @Override - protected ExposedServerStoreBinding internalWrap(Context context, ServerStoreBinding managedObject) { - return new ExposedServerStoreBinding(context, managedObject); - } - - private static class ExposedServerStoreBinding extends ExposedAliasBinding { - - ExposedServerStoreBinding(Context context, ServerStoreBinding binding) { - super(context.with("type", "ServerStore"), binding); - } - - @Override - public Collection getDescriptors() { - return Collections.singleton(getSettings()); - } - - Settings getSettings() { - // names taken from ServerStoreConfiguration.isCompatible() - PoolAllocation poolAllocation = getBinding().getValue().getStoreConfiguration().getPoolAllocation(); - Settings settings = new Settings(getContext()) - .set("resourcePoolType", poolAllocation.getClass().getSimpleName().toLowerCase()) - .set("allocatedMemoryAtTime", getBinding().getValue().getAllocatedMemory()) - .set("tableCapacityAtTime", getBinding().getValue().getTableCapacity()) - .set("vitalMemoryAtTime", getBinding().getValue().getVitalMemory()) - .set("longSizeAtTime", getBinding().getValue().getSize()) - .set("dataAllocatedMemoryAtTime", getBinding().getValue().getDataAllocatedMemory()) - .set("dataOccupiedMemoryAtTime", getBinding().getValue().getDataOccupiedMemory()) - .set("dataSizeAtTime", getBinding().getValue().getDataSize()) - .set("dataVitalMemoryAtTime", getBinding().getValue().getDataVitalMemory()); - if (poolAllocation instanceof PoolAllocation.DedicatedPoolAllocation) { - settings.set("resourcePoolDedicatedResourceName", ((PoolAllocation.DedicatedPoolAllocation) poolAllocation).getResourceName()); - settings.set("resourcePoolDedicatedSize", ((PoolAllocation.DedicatedPoolAllocation) poolAllocation).getSize()); - } else if (poolAllocation instanceof PoolAllocation.SharedPoolAllocation) { - settings.set("resourcePoolSharedPoolName", ((PoolAllocation.SharedPoolAllocation) poolAllocation).getResourcePoolName()); - } - return settings; - } - } - -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java deleted file mode 100644 index 496d37a14c..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.management; - -import org.terracotta.management.model.context.Context; -import org.terracotta.management.registry.Named; -import org.terracotta.management.registry.RequiredContext; -import org.terracotta.management.registry.collect.StatisticProvider; -import org.terracotta.management.registry.collect.StatisticRegistry; -import org.terracotta.management.service.monitoring.registry.provider.AbstractExposedStatistics; -import org.terracotta.management.service.monitoring.registry.provider.AbstractStatisticsManagementProvider; - -import java.util.HashSet; -import java.util.Set; - -import static java.util.Arrays.asList; -import static org.terracotta.context.extended.ValueStatisticDescriptor.descriptor; - -@Named("ServerStoreStatistics") -@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) -@StatisticProvider -class ServerStoreStatisticsManagementProvider extends AbstractStatisticsManagementProvider { - - ServerStoreStatisticsManagementProvider() { - super(ServerStoreBinding.class); - } - - @Override - protected AbstractExposedStatistics internalWrap(Context context, ServerStoreBinding managedObject, StatisticRegistry statisticRegistry) { - return new ServerStoreExposedStatistics(context, managedObject, statisticRegistry); - } - - private static class ServerStoreExposedStatistics extends AbstractExposedStatistics { - - ServerStoreExposedStatistics(Context context, ServerStoreBinding binding, StatisticRegistry statisticRegistry) { - super(context.with("type", "ServerStore"), binding, statisticRegistry); - - getRegistry().registerSize("AllocatedMemory", descriptor("allocatedMemory", tags("tier", "Store"))); - getRegistry().registerSize("DataAllocatedMemory", descriptor("dataAllocatedMemory", tags("tier", "Store"))); - getRegistry().registerSize("OccupiedMemory", descriptor("occupiedMemory", tags("tier", "Store"))); - getRegistry().registerSize("DataOccupiedMemory", descriptor("dataOccupiedMemory", tags("tier", "Store"))); - getRegistry().registerCounter("Entries", descriptor("entries", tags("tier", "Store"))); - getRegistry().registerCounter("UsedSlotCount", descriptor("usedSlotCount", tags("tier", "Store"))); - getRegistry().registerSize("DataVitalMemory", descriptor("dataVitalMemory", tags("tier", "Store"))); - getRegistry().registerSize("VitalMemory", descriptor("vitalMemory", tags("tier", "Store"))); - getRegistry().registerCounter("RemovedSlotCount", descriptor("removedSlotCount", tags("tier", "Store"))); - getRegistry().registerSize("DataSize", descriptor("dataSize", tags("tier", "Store"))); - getRegistry().registerSize("TableCapacity", descriptor("tableCapacity", tags("tier", "Store"))); - } - - } - - private static Set tags(String... tags) {return new HashSet<>(asList(tags));} - -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java deleted file mode 100644 index b11880f35e..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java +++ /dev/null @@ -1,363 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.offheap; - -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.locks.Lock; - -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.clustered.common.internal.store.Util; -import org.terracotta.offheapstore.MapInternals; - -import org.terracotta.offheapstore.ReadWriteLockedOffHeapClockCache; -import org.terracotta.offheapstore.eviction.EvictionListener; -import org.terracotta.offheapstore.eviction.EvictionListeningReadWriteLockedOffHeapClockCache; -import org.terracotta.offheapstore.exceptions.OversizeMappingException; -import org.terracotta.offheapstore.paging.PageSource; -import org.terracotta.offheapstore.storage.portability.Portability; - -public class OffHeapChainMap implements MapInternals { - - interface ChainMapEvictionListener { - void onEviction(K key); - } - - private final ReadWriteLockedOffHeapClockCache heads; - private final OffHeapChainStorageEngine chainStorage; - private volatile ChainMapEvictionListener evictionListener; - - public OffHeapChainMap(PageSource source, Portability keyPortability, int minPageSize, int maxPageSize, boolean shareByThieving) { - this.chainStorage = new OffHeapChainStorageEngine<>(source, keyPortability, minPageSize, maxPageSize, shareByThieving, shareByThieving); - EvictionListener listener = callable -> { - try { - Map.Entry entry = callable.call(); - try { - if (evictionListener != null) { - evictionListener.onEviction(entry.getKey()); - } - } finally { - entry.getValue().close(); - } - } catch (Exception e) { - throw new AssertionError(e); - } - }; - - //TODO: EvictionListeningReadWriteLockedOffHeapClockCache lacks ctor that takes shareByThieving - // this.heads = new ReadWriteLockedOffHeapClockCache(source, shareByThieving, chainStorage); - this.heads = new EvictionListeningReadWriteLockedOffHeapClockCache<>(listener, source, chainStorage); - } - - //For tests - OffHeapChainMap(ReadWriteLockedOffHeapClockCache heads, OffHeapChainStorageEngine chainStorage) { - this.chainStorage = chainStorage; - this.heads = heads; - } - - void setEvictionListener(ChainMapEvictionListener listener) { - evictionListener = listener; - } - - public Chain get(K key) { - final Lock lock = heads.readLock(); - lock.lock(); - try { - InternalChain chain = heads.get(key); - if (chain == null) { - return EMPTY_CHAIN; - } else { - try { - return chain.detach(); - } finally { - chain.close(); - } - } - } finally { - lock.unlock(); - } - } - - public Chain getAndAppend(K key, ByteBuffer element) { - final Lock lock = heads.writeLock(); - lock.lock(); - try { - while (true) { - InternalChain chain = heads.get(key); - if (chain == null) { - heads.put(key, chainStorage.newChain(element)); - return EMPTY_CHAIN; - } else { - try { - Chain current = chain.detach(); - if (chain.append(element)) { - return current; - } else { - evict(); - } - } finally { - chain.close(); - } - } - } - } finally { - lock.unlock(); - } - } - - public void append(K key, ByteBuffer element) { - final Lock lock = heads.writeLock(); - lock.lock(); - try { - while (true) { - InternalChain chain = heads.get(key); - if (chain == null) { - heads.put(key, chainStorage.newChain(element)); - return; - } else { - try { - if (chain.append(element)) { - return; - } else { - evict(); - } - } finally { - chain.close(); - } - } - } - } finally { - lock.unlock(); - } - - } - - public void replaceAtHead(K key, Chain expected, Chain replacement) { - final Lock lock = heads.writeLock(); - lock.lock(); - try { - while (true) { - InternalChain chain = heads.get(key); - if (chain == null) { - if (expected.isEmpty()) { - throw new IllegalArgumentException("Empty expected sequence"); - } else { - return; - } - } else { - try { - if (chain.replace(expected, replacement)) { - return; - } else { - evict(); - } - } finally { - chain.close(); - } - } - } - } finally { - lock.unlock(); - } - } - - public void put(K key, Chain chain) { - final Lock lock = heads.writeLock(); - lock.lock(); - try { - InternalChain current = heads.get(key); - if (current != null) { - try { - replaceAtHead(key, current.detach(), chain); - } finally { - current.close(); - } - } else { - for (Element x : chain) { - append(key, x.getPayload()); - } - } - } finally { - lock.unlock(); - } - } - - public void clear() { - heads.writeLock().lock(); - try { - this.heads.clear(); - } finally { - heads.writeLock().unlock(); - } - } - - public Set keySet() { - heads.writeLock().lock(); - try { - return heads.keySet(); - } finally { - heads.writeLock().unlock(); - } - } - - private void evict() { - int evictionIndex = heads.getEvictionIndex(); - if (evictionIndex < 0) { - StringBuilder sb = new StringBuilder("Storage Engine and Eviction Failed - Everything Pinned ("); - sb.append(getSize()).append(" mappings) \n").append("Storage Engine : ").append(chainStorage); - throw new OversizeMappingException(sb.toString()); - } else { - heads.evict(evictionIndex, false); - } - } - - private static final Chain EMPTY_CHAIN = new Chain() { - @Override - public Iterator reverseIterator() { - return Collections.emptyList().iterator(); - } - - @Override - public boolean isEmpty() { - return true; - } - - @Override - public int length() { - return 0; - } - - @Override - public Iterator iterator() { - return Collections.emptyList().iterator(); - } - }; - - public static Chain chain(ByteBuffer... buffers) { - final List list = new ArrayList<>(); - for (ByteBuffer b : buffers) { - list.add(element(b)); - } - - return new Chain() { - - final List elements = Collections.unmodifiableList(list); - - @Override - public Iterator iterator() { - return elements.iterator(); - } - - @Override - public Iterator reverseIterator() { - return Util.reverseIterator(elements); - } - - @Override - public boolean isEmpty() { - return elements.isEmpty(); - } - - @Override - public int length() { - return elements.size(); - } - }; - } - - private static Element element(final ByteBuffer b) { - return b::asReadOnlyBuffer; - } - - @Override - public long getSize() { - return heads.getSize(); - } - - @Override - public long getTableCapacity() { - return heads.getTableCapacity(); - } - - @Override - public long getUsedSlotCount() { - return heads.getUsedSlotCount(); - } - - @Override - public long getRemovedSlotCount() { - return heads.getRemovedSlotCount(); - } - - @Override - public int getReprobeLength() { - return heads.getReprobeLength(); - } - - @Override - public long getAllocatedMemory() { - return heads.getAllocatedMemory(); - } - - @Override - public long getOccupiedMemory() { - return heads.getOccupiedMemory(); - } - - @Override - public long getVitalMemory() { - return heads.getVitalMemory(); - } - - @Override - public long getDataAllocatedMemory() { - return heads.getDataAllocatedMemory(); - } - - @Override - public long getDataOccupiedMemory() { - return heads.getDataOccupiedMemory(); - } - - @Override - public long getDataVitalMemory() { - return heads.getDataVitalMemory(); - } - - @Override - public long getDataSize() { - return heads.getDataSize(); - } - - boolean shrink() { - return heads.shrink(); - } - - Lock writeLock() { - return heads.writeLock(); - } - - protected void storageEngineFailure(Object failure) { - } - -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java deleted file mode 100644 index 3688ae746a..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java +++ /dev/null @@ -1,582 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.offheap; - -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Iterator; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.locks.Lock; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.clustered.common.internal.store.SequencedElement; -import org.ehcache.clustered.common.internal.store.Util; -import org.terracotta.offheapstore.paging.OffHeapStorageArea; -import org.terracotta.offheapstore.paging.PageSource; -import org.terracotta.offheapstore.storage.PointerSize; -import org.terracotta.offheapstore.storage.StorageEngine; -import org.terracotta.offheapstore.storage.portability.Portability; - -import static java.util.Collections.unmodifiableList; - -class OffHeapChainStorageEngine implements StorageEngine { - - private static final int ELEMENT_HEADER_SEQUENCE_OFFSET = 0; - private static final int ELEMENT_HEADER_LENGTH_OFFSET = 8; - private static final int ELEMENT_HEADER_NEXT_OFFSET = 12; - private static final int ELEMENT_HEADER_SIZE = 20; - - private static final int CHAIN_HEADER_KEY_LENGTH_OFFSET = 0; - private static final int CHAIN_HEADER_KEY_HASH_OFFSET = 4; - private static final int CHAIN_HEADER_TAIL_OFFSET = 8; - private static final int CHAIN_HEADER_SIZE = 16; - - private final OffHeapStorageArea storage; - private final Portability keyPortability; - private final Set activeChains = Collections.newSetFromMap(new ConcurrentHashMap()); - - private StorageEngine.Owner owner; - private long nextSequenceNumber = 0; - - public OffHeapChainStorageEngine(PageSource source, Portability keyPortability, int minPageSize, int maxPageSize, boolean thief, boolean victim) { - this.storage = new OffHeapStorageArea(PointerSize.LONG, new StorageOwner(), source, minPageSize, maxPageSize, thief, victim); - this.keyPortability = keyPortability; - } - - //For tests - Set getActiveChains() { - return this.activeChains; - } - - InternalChain newChain(ByteBuffer element) { - return new PrimordialChain(element); - } - - @Override - public Long writeMapping(K key, InternalChain value, int hash, int metadata) { - if (value instanceof PrimordialChain) { - return createAttachedChain(key, hash, (PrimordialChain) value); - } else { - throw new AssertionError("only detached internal chains should be initially written"); - } - } - - @Override - public void attachedMapping(long encoding, int hash, int metadata) { - //nothing - } - - @Override - public void freeMapping(long encoding, int hash, boolean removal) { - try (AttachedInternalChain chain = new AttachedInternalChain(encoding)) { - chain.free(); - } - } - - @Override - public InternalChain readValue(long encoding) { - return new AttachedInternalChain(encoding); - } - - @Override - public boolean equalsValue(Object value, long encoding) { - try (AttachedInternalChain chain = new AttachedInternalChain(encoding)) { - return chain.equals(value); - } - } - - @SuppressWarnings("unchecked") - @Override - public K readKey(long encoding, int hashCode) { - return (K) keyPortability.decode(readKeyBuffer(encoding)); - } - - @Override - public boolean equalsKey(Object key, long encoding) { - return keyPortability.equals(key, readKeyBuffer(encoding)); - } - - private ByteBuffer readKeyBuffer(long encoding) { - int keyLength = readKeySize(encoding); - int elemLength = storage.readInt(encoding + CHAIN_HEADER_SIZE + ELEMENT_HEADER_LENGTH_OFFSET); - return storage.readBuffer(encoding + CHAIN_HEADER_SIZE + ELEMENT_HEADER_SIZE + elemLength, keyLength); - } - - private int readKeyHash(long encoding) { - return storage.readInt(encoding + CHAIN_HEADER_KEY_HASH_OFFSET); - } - - private int readKeySize(long encoding) { - return Integer.MAX_VALUE & storage.readInt(encoding + CHAIN_HEADER_KEY_LENGTH_OFFSET); - } - - @Override - public void clear() { - storage.clear(); - } - - @Override - public long getAllocatedMemory() { - return storage.getAllocatedMemory(); - } - - @Override - public long getOccupiedMemory() { - return storage.getOccupiedMemory(); - } - - @Override - public long getVitalMemory() { - return getOccupiedMemory(); - } - - @Override - public long getDataSize() { - return storage.getAllocatedMemory(); - } - - @Override - public void invalidateCache() { - //no-op - for now - } - - @Override - public void bind(StorageEngine.Owner owner) { - this.owner = owner; - } - - @Override - public void destroy() { - storage.destroy(); - } - - @Override - public boolean shrink() { - return storage.shrink(); - } - - private static class DetachedChain implements Chain { - - private final List elements; - - private DetachedChain(List buffers) { - this.elements = unmodifiableList(buffers); - } - - @Override - public Iterator reverseIterator() { - return Util.reverseIterator(elements); - } - - @Override - public boolean isEmpty() { - return elements.isEmpty(); - } - - @Override - public int length() { - return elements.size(); - } - - @Override - public Iterator iterator() { - return elements.iterator(); - } - - } - - private static class PrimordialChain implements InternalChain { - - private final ByteBuffer element; - - public PrimordialChain(ByteBuffer element) { - this.element = element; - } - - @Override - public Chain detach() { - throw new AssertionError("primordial chains cannot be detached"); - } - - @Override - public boolean append(ByteBuffer element) { - throw new AssertionError("primordial chains cannot be appended"); - } - - @Override - public boolean replace(Chain expected, Chain replacement) { - throw new AssertionError("primordial chains cannot be mutated"); - } - - @Override - public void close() { - //no-op - } - } - - private final class AttachedInternalChain implements InternalChain { - - /** - * Location of the chain structure, not of the first element. - */ - private long chain; - - AttachedInternalChain(long address) { - this.chain = address; - OffHeapChainStorageEngine.this.activeChains.add(this); - } - - @Override - public Chain detach() { - List buffers = new ArrayList<>(); - - long element = chain + CHAIN_HEADER_SIZE; - do { - buffers.add(element(readElementBuffer(element), readElementSequenceNumber(element))); - element = storage.readLong(element + ELEMENT_HEADER_NEXT_OFFSET); - } while (element != chain); - - return new DetachedChain(buffers); - } - - @Override - public boolean append(ByteBuffer element) { - long newTail = createElement(element); - if (newTail < 0) { - return false; - } else { - long oldTail = storage.readLong(chain + CHAIN_HEADER_TAIL_OFFSET); - storage.writeLong(newTail + ELEMENT_HEADER_NEXT_OFFSET, chain); - try { - storage.writeLong(oldTail + ELEMENT_HEADER_NEXT_OFFSET, newTail); - } catch (NullPointerException e) { - throw e; - } - storage.writeLong(chain + CHAIN_HEADER_TAIL_OFFSET, newTail); - return true; - } - } - - @Override - public boolean replace(Chain expected, Chain replacement) { - if (expected.isEmpty()) { - throw new IllegalArgumentException("Empty expected sequence"); - } else if (replacement.isEmpty()) { - return removeHeader(expected); - } else { - return replaceHeader(expected, replacement); - } - } - - public boolean removeHeader(Chain header) { - long suffixHead = chain + CHAIN_HEADER_SIZE; - long prefixTail; - - Iterator iterator = header.iterator(); - do { - if (!compare(iterator.next(), suffixHead)) { - return true; - } - prefixTail = suffixHead; - suffixHead = storage.readLong(suffixHead + ELEMENT_HEADER_NEXT_OFFSET); - } while (iterator.hasNext()); - - if (suffixHead == chain) { - //whole chain removed - int slot = owner.getSlotForHashAndEncoding(readKeyHash(chain), chain, ~0); - if (!owner.evict(slot, true)) { - throw new AssertionError("Unexpected failure to evict slot " + slot); - } - return true; - } else { - int hash = readKeyHash(chain); - int elemSize = storage.readInt(suffixHead + ELEMENT_HEADER_LENGTH_OFFSET); - ByteBuffer elemBuffer = storage.readBuffer(suffixHead + ELEMENT_HEADER_SIZE, elemSize); - Long newChainAddress = createAttachedChain(readKeyBuffer(chain), hash, elemBuffer); - if (newChainAddress == null) { - return false; - } else { - try (AttachedInternalChain newChain = new AttachedInternalChain(newChainAddress)) { - //copy remaining elements from old chain (by reference) - long next = storage.readLong(suffixHead + ELEMENT_HEADER_NEXT_OFFSET); - long tail = storage.readLong(chain + CHAIN_HEADER_TAIL_OFFSET); - if (next != chain) { - newChain.append(next, tail); - } - - if (owner.updateEncoding(hash, chain, newChainAddress, ~0)) { - storage.writeLong(prefixTail + ELEMENT_HEADER_NEXT_OFFSET, chain); - free(); - return true; - } else { - newChain.free(); - throw new AssertionError("Encoding update failure - impossible!"); - } - } - } - } - } - - public boolean replaceHeader(Chain expected, Chain replacement) { - long suffixHead = chain + CHAIN_HEADER_SIZE; - long prefixTail; - - Iterator expectedIt = expected.iterator(); - do { - if (!compare(expectedIt.next(), suffixHead)) { - return true; - } - prefixTail = suffixHead; - suffixHead = storage.readLong(suffixHead + ELEMENT_HEADER_NEXT_OFFSET); - } while (expectedIt.hasNext()); - - int hash = readKeyHash(chain); - Long newChainAddress = createAttachedChain(readKeyBuffer(chain), hash, replacement); - if (newChainAddress == null) { - return false; - } else { - try (AttachedInternalChain newChain = new AttachedInternalChain(newChainAddress)) { - //copy remaining elements from old chain (by reference) - if (suffixHead != chain) { - newChain.append(suffixHead, storage.readLong(chain + CHAIN_HEADER_TAIL_OFFSET)); - } - - if (owner.updateEncoding(hash, chain, newChainAddress, ~0)) { - storage.writeLong(prefixTail + ELEMENT_HEADER_NEXT_OFFSET, chain); - free(); - return true; - } else { - newChain.free(); - throw new AssertionError("Encoding update failure - impossible!"); - } - } - } - } - - private void free() { - long element = storage.readLong(chain + CHAIN_HEADER_SIZE + ELEMENT_HEADER_NEXT_OFFSET); - storage.free(chain); - - while (element != chain) { - long next = storage.readLong(element + ELEMENT_HEADER_NEXT_OFFSET); - storage.free(element); - element = next; - } - } - - private long createElement(ByteBuffer element) { - long newElement = storage.allocate(element.remaining() + ELEMENT_HEADER_SIZE); - if (newElement < 0) { - return newElement; - } else { - writeElement(newElement, element); - return newElement; - } - } - - private boolean compare(Element element, long address) { - if (element instanceof SequencedElement) { - return readElementSequenceNumber(address) == ((SequencedElement) element).getSequenceNumber(); - } else { - return readElementBuffer(address).equals(element.getPayload()); - } - } - - private void append(long head, long tail) { - long oldTail = storage.readLong(chain + CHAIN_HEADER_TAIL_OFFSET); - - storage.writeLong(oldTail + ELEMENT_HEADER_NEXT_OFFSET, head); - storage.writeLong(tail + ELEMENT_HEADER_NEXT_OFFSET, chain); - storage.writeLong(chain + CHAIN_HEADER_TAIL_OFFSET, tail); - } - - private Element element(ByteBuffer attachedBuffer, final long sequence) { - final ByteBuffer detachedBuffer = (ByteBuffer) ByteBuffer.allocate(attachedBuffer.remaining()).put(attachedBuffer).flip(); - - return new SequencedElement() { - - @Override - public ByteBuffer getPayload() { - return detachedBuffer.asReadOnlyBuffer(); - } - - @Override - public long getSequenceNumber() { - return sequence; - } - }; - } - - private ByteBuffer readElementBuffer(long address) { - int elemLength = storage.readInt(address + ELEMENT_HEADER_LENGTH_OFFSET); - return storage.readBuffer(address + ELEMENT_HEADER_SIZE, elemLength); - } - - private long readElementSequenceNumber(long address) { - return storage.readLong(address + ELEMENT_HEADER_SEQUENCE_OFFSET); - } - - public void moved(long from, long to) { - if (from == chain) { - chain = to; - } - } - - @Override - public void close() { - OffHeapChainStorageEngine.this.activeChains.remove(this); - } - } - - private long writeElement(long address, ByteBuffer element) { - storage.writeLong(address + ELEMENT_HEADER_SEQUENCE_OFFSET, nextSequenceNumber++); - storage.writeInt(address + ELEMENT_HEADER_LENGTH_OFFSET, element.remaining()); - storage.writeBuffer(address + ELEMENT_HEADER_SIZE, element.duplicate()); - return address; - } - - private Long createAttachedChain(K key, int hash, PrimordialChain value) { - ByteBuffer keyBuffer = keyPortability.encode(key); - ByteBuffer elemBuffer = value.element; - return createAttachedChain(keyBuffer, hash, elemBuffer); - } - - private Long createAttachedChain(ByteBuffer keyBuffer, int hash, ByteBuffer elemBuffer) { - long chain = storage.allocate(keyBuffer.remaining() + elemBuffer.remaining() + CHAIN_HEADER_SIZE + ELEMENT_HEADER_SIZE); - if (chain < 0) { - return null; - } - int keySize = keyBuffer.remaining(); - storage.writeInt(chain + CHAIN_HEADER_KEY_HASH_OFFSET, hash); - storage.writeInt(chain + CHAIN_HEADER_KEY_LENGTH_OFFSET, Integer.MIN_VALUE | keySize); - storage.writeBuffer(chain + CHAIN_HEADER_SIZE + ELEMENT_HEADER_SIZE + elemBuffer.remaining(), keyBuffer); - long element = chain + CHAIN_HEADER_SIZE; - writeElement(element, elemBuffer); - storage.writeLong(element + ELEMENT_HEADER_NEXT_OFFSET, chain); - storage.writeLong(chain + CHAIN_HEADER_TAIL_OFFSET, element); - return chain; - } - - private Long createAttachedChain(ByteBuffer readKeyBuffer, int hash, Chain from) { - Iterator iterator = from.iterator(); - Long address = createAttachedChain(readKeyBuffer, hash, iterator.next().getPayload()); - if (address == null) { - return null; - } - - try (AttachedInternalChain chain = new AttachedInternalChain(address)) { - while (iterator.hasNext()) { - if (!chain.append(iterator.next().getPayload())) { - chain.free(); - return null; - } - } - } - return address; - } - - private long findHead(long address) { - while (!isHead(address)) { - address = storage.readLong(address + ELEMENT_HEADER_NEXT_OFFSET); - } - return address; - } - - private boolean isHead(long address) { - return storage.readInt(address + CHAIN_HEADER_KEY_LENGTH_OFFSET) < 0; - } - - class StorageOwner implements OffHeapStorageArea.Owner { - - @Override - public boolean evictAtAddress(long address, boolean shrink) { - long chain = findHead(address); - for (AttachedInternalChain activeChain : activeChains) { - if (activeChain.chain == chain) { - return false; - } - } - int hash = storage.readInt(chain + CHAIN_HEADER_KEY_HASH_OFFSET); - int slot = owner.getSlotForHashAndEncoding(hash, chain, ~0); - return owner.evict(slot, shrink); - } - - @Override - public Lock writeLock() { - return owner.writeLock(); - } - - @Override - public boolean isThief() { - return owner.isThiefForTableAllocations(); - } - - @Override - public boolean moved(long from, long to) { - if (isHead(to)) { - int hashCode = storage.readInt(to + CHAIN_HEADER_KEY_HASH_OFFSET); - if (!owner.updateEncoding(hashCode, from, to, ~0)) { - return false; - } else { - long tail = storage.readLong(to + CHAIN_HEADER_TAIL_OFFSET); - if (tail == from + CHAIN_HEADER_SIZE) { - tail = to + CHAIN_HEADER_SIZE; - storage.writeLong(to + CHAIN_HEADER_TAIL_OFFSET, tail); - } - storage.writeLong(tail + ELEMENT_HEADER_NEXT_OFFSET, to); - for (AttachedInternalChain activeChain : activeChains) { - activeChain.moved(from, to); - } - return true; - } - } else { - long chain = findHead(to); - - long tail = storage.readLong(chain + CHAIN_HEADER_TAIL_OFFSET); - if (tail == from) { - storage.writeLong(chain + CHAIN_HEADER_TAIL_OFFSET, to); - } - - long element = chain + CHAIN_HEADER_SIZE; - while (element != chain) { - long next = storage.readLong(element + ELEMENT_HEADER_NEXT_OFFSET); - if (next == from) { - storage.writeLong(element + ELEMENT_HEADER_NEXT_OFFSET, to); - return true; - } else { - element = next; - } - } - throw new AssertionError(); - } - } - - @Override - public int sizeOf(long address) { - if (isHead(address)) { - int keySize = readKeySize(address); - return CHAIN_HEADER_SIZE + keySize + sizeOf(address + CHAIN_HEADER_SIZE); - } else { - int elementSize = storage.readInt(address + ELEMENT_HEADER_LENGTH_OFFSET); - return ELEMENT_HEADER_SIZE + elementSize; - } - } - } -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java deleted file mode 100644 index b1e7d84709..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java +++ /dev/null @@ -1,362 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.offheap; - -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.List; - -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.ServerStore; -import org.ehcache.clustered.server.KeySegmentMapper; -import org.ehcache.clustered.server.ServerStoreEvictionListener; -import org.ehcache.clustered.server.state.ResourcePageSource; -import org.terracotta.offheapstore.MapInternals; -import org.terracotta.offheapstore.exceptions.OversizeMappingException; -import org.terracotta.offheapstore.paging.PageSource; - -import static org.terracotta.offheapstore.util.MemoryUnit.BYTES; -import static org.terracotta.offheapstore.util.MemoryUnit.KILOBYTES; -import static org.terracotta.offheapstore.util.MemoryUnit.MEGABYTES; - -public class OffHeapServerStore implements ServerStore, MapInternals { - - private static final long MAX_PAGE_SIZE_IN_KB = KILOBYTES.convert(8, MEGABYTES); - - private final List> segments; - private final KeySegmentMapper mapper; - - OffHeapServerStore(PageSource source, KeySegmentMapper mapper) { - this.mapper = mapper; - segments = new ArrayList<>(mapper.getSegments()); - for (int i = 0; i < mapper.getSegments(); i++) { - segments.add(new OffHeapChainMap<>(source, LongPortability.INSTANCE, KILOBYTES.toBytes(4), MEGABYTES.toBytes(8), false)); - } - } - - public OffHeapServerStore(ResourcePageSource source, KeySegmentMapper mapper) { - this.mapper = mapper; - segments = new ArrayList<>(mapper.getSegments()); - long maxSize = getMaxSize(source.getPool().getSize()); - for (int i = 0; i < mapper.getSegments(); i++) { - segments.add(new OffHeapChainMap<>(source, LongPortability.INSTANCE, KILOBYTES.toBytes(4), (int) KILOBYTES.toBytes(maxSize), false)); - } - } - - public List> getSegments() { - return segments; - } - - static long getMaxSize(long poolSize) { - long l = Long.highestOneBit(poolSize); - long sizeInKb = KILOBYTES.convert(l, BYTES); - long maxSize = sizeInKb >> 5; - - if (maxSize >= MAX_PAGE_SIZE_IN_KB) { - maxSize = MAX_PAGE_SIZE_IN_KB; - } - return maxSize; - } - - public void setEvictionListener(final ServerStoreEvictionListener listener) { - OffHeapChainMap.ChainMapEvictionListener chainMapEvictionListener = listener::onEviction; - for (OffHeapChainMap segment : segments) { - segment.setEvictionListener(chainMapEvictionListener); - } - } - - @Override - public Chain get(long key) { - return segmentFor(key).get(key); - } - - @Override - public void append(long key, ByteBuffer payLoad) { - try { - segmentFor(key).append(key, payLoad); - } catch (OversizeMappingException e) { - if (handleOversizeMappingException(key)) { - try { - segmentFor(key).append(key, payLoad); - return; - } catch (OversizeMappingException ex) { - //ignore - } - } - - writeLockAll(); - try { - do { - try { - segmentFor(key).append(key, payLoad); - return; - } catch (OversizeMappingException ex) { - e = ex; - } - } while (handleOversizeMappingException(key)); - throw e; - } finally { - writeUnlockAll(); - } - } - } - - @Override - public Chain getAndAppend(long key, ByteBuffer payLoad) { - try { - return segmentFor(key).getAndAppend(key, payLoad); - } catch (OversizeMappingException e) { - if (handleOversizeMappingException(key)) { - try { - return segmentFor(key).getAndAppend(key, payLoad); - } catch (OversizeMappingException ex) { - //ignore - } - } - - writeLockAll(); - try { - do { - try { - return segmentFor(key).getAndAppend(key, payLoad); - } catch (OversizeMappingException ex) { - e = ex; - } - } while (handleOversizeMappingException(key)); - throw e; - } finally { - writeUnlockAll(); - } - } - } - - @Override - public void replaceAtHead(long key, Chain expect, Chain update) { - try { - segmentFor(key).replaceAtHead(key, expect, update); - } catch (OversizeMappingException e) { - if (handleOversizeMappingException(key)) { - try { - segmentFor(key).replaceAtHead(key, expect, update); - return; - } catch (OversizeMappingException ex) { - //ignore - } - } - - writeLockAll(); - try { - do { - try { - segmentFor(key).replaceAtHead(key, expect, update); - return; - } catch (OversizeMappingException ex) { - e = ex; - } - } while (handleOversizeMappingException(key)); - throw e; - } finally { - writeUnlockAll(); - } - } - } - - public void put(long key, Chain chain) { - try { - segmentFor(key).put(key, chain); - } catch (OversizeMappingException e) { - if (handleOversizeMappingException(key)) { - try { - segmentFor(key).put(key, chain); - } catch (OversizeMappingException ex) { - //ignore - } - } - - writeLockAll(); - try { - do { - try { - segmentFor(key).put(key, chain); - } catch (OversizeMappingException ex) { - e = ex; - } - } while (handleOversizeMappingException(key)); - throw e; - } finally { - writeUnlockAll(); - } - } - } - - - @Override - public void clear() { - for (OffHeapChainMap segment : segments) { - segment.clear(); - } - } - - OffHeapChainMap segmentFor(long key) { - return segments.get(mapper.getSegmentForKey(key)); - } - - private void writeLockAll() { - for (OffHeapChainMap s : segments) { - s.writeLock().lock(); - } - } - - private void writeUnlockAll() { - for (OffHeapChainMap s : segments) { - s.writeLock().unlock(); - } - } - - boolean handleOversizeMappingException(long hash) { - boolean evicted = false; - - OffHeapChainMap target = segmentFor(hash); - for (OffHeapChainMap s : segments) { - if (s != target) { - evicted |= s.shrink(); - } - } - - return evicted; - } - - public void close() { - writeLockAll(); - try { - clear(); - } finally { - writeUnlockAll(); - } - segments.clear(); - } - - // stats - - @Override - public long getAllocatedMemory() { - long total = 0L; - for (MapInternals segment : segments) { - total += segment.getAllocatedMemory(); - } - return total; - } - - @Override - public long getOccupiedMemory() { - long total = 0L; - for (MapInternals segment : segments) { - total += segment.getOccupiedMemory(); - } - return total; - } - - @Override - public long getDataAllocatedMemory() { - long total = 0L; - for (MapInternals segment : segments) { - total += segment.getDataAllocatedMemory(); - } - return total; - } - - @Override - public long getDataOccupiedMemory() { - long total = 0L; - for (MapInternals segment : segments) { - total += segment.getDataOccupiedMemory(); - } - return total; - } - - @Override - public long getDataSize() { - long total = 0L; - for (MapInternals segment : segments) { - total += segment.getDataSize(); - } - return total; - } - - @Override - public long getSize() { - long total = 0L; - for (MapInternals segment : segments) { - total += segment.getSize(); - } - return total; - } - - @Override - public long getTableCapacity() { - long total = 0L; - for (MapInternals segment : segments) { - total += segment.getTableCapacity(); - } - return total; - } - - @Override - public long getUsedSlotCount() { - long total = 0L; - for (MapInternals segment : segments) { - total += segment.getUsedSlotCount(); - } - return total; - } - - @Override - public long getRemovedSlotCount() { - long total = 0L; - for (MapInternals segment : segments) { - total += segment.getRemovedSlotCount(); - } - return total; - } - - @Override - public int getReprobeLength() { - int total = 0; - for (MapInternals segment : segments) { - total += segment.getReprobeLength(); - } - return total; - } - - @Override - public long getVitalMemory() { - long total = 0L; - for (MapInternals segment : segments) { - total += segment.getVitalMemory(); - } - return total; - } - - @Override - public long getDataVitalMemory() { - long total = 0L; - for (MapInternals segment : segments) { - total += segment.getDataVitalMemory(); - } - return total; - } - -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java deleted file mode 100644 index bec386781f..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.server.state; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.concurrent.ConcurrentSkipListSet; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; - - -/** - * Message Tracker keeps track of messages seen so far in efficient way by keeping track of contiguous and non-contiguous message ids. - * - * Assumption: message ids are generated in contiguous fashion in the increment on 1, starting from 0. - */ -public class MessageTracker { - - private static final Logger LOGGER = LoggerFactory.getLogger(MessageTracker.class); - - // keeping track of highest contiguous message id seen - private volatile long highestContiguousMsgId; - - // Keeping track of non contiguous message Ids higher than highestContiguousMsgId. - private final ConcurrentSkipListSet nonContiguousMsgIds; - - // Lock used for reconciliation. - private final Lock reconciliationLock; - - // Status that the sync is completed. - private volatile boolean isSyncCompleted; - - public MessageTracker(boolean isSyncCompleted) { - this.highestContiguousMsgId = -1L; - this.nonContiguousMsgIds = new ConcurrentSkipListSet<>(); - this.reconciliationLock = new ReentrantLock(); - this.isSyncCompleted = isSyncCompleted; - } - - /** - * Track the given message Id. - * - * @param msgId Message Id to be checked. - */ - public void track(long msgId) { - nonContiguousMsgIds.add(msgId); - tryReconcile(); - } - - /** - * Check wheather the given message id is already seen by track call. - * - * @param msgId Message Identifier to be checked. - * @return true if the given msgId is already tracked otherwise false. - */ - public boolean seen(long msgId) { - boolean seen = nonContiguousMsgIds.contains(msgId) || msgId <= highestContiguousMsgId; - tryReconcile(); - return seen; - } - - /** - * Checks weather non-contiguous message ids set is empty. - * - * @return true if the there is no non contiguous message ids otherwise false - */ - public boolean isEmpty() { - return nonContiguousMsgIds.isEmpty(); - } - - - /** - * Notify Message tracker that the sync is completed. - */ - public void notifySyncCompleted() { - this.isSyncCompleted = true; - } - - /** - * Remove the contiguous seen msgIds from the nonContiguousMsgIds and update highestContiguousMsgId - */ - private void reconcile() { - - // If nonContiguousMsgIds is empty then nothing to reconcile. - if (nonContiguousMsgIds.isEmpty()) { - return; - } - - // This happens when a passive is started after Active has moved on and - // passive starts to see msgIDs starting from a number > 0. - // Once the sync is completed, fast forward highestContiguousMsgId. - // Post sync completion assuming platform will send all msgIds beyond highestContiguousMsgId. - if (highestContiguousMsgId == -1L && isSyncCompleted) { - Long min = nonContiguousMsgIds.last(); - LOGGER.info("Setting highestContiguousMsgId to {} from -1", min); - highestContiguousMsgId = min; - nonContiguousMsgIds.removeIf(msgId -> msgId <= min); - } - - for (long msgId : nonContiguousMsgIds) { - if (msgId <= highestContiguousMsgId) { - nonContiguousMsgIds.remove(msgId); - } else if (msgId > highestContiguousMsgId + 1) { - break; - } else { - // the order is important.. - highestContiguousMsgId = msgId; - nonContiguousMsgIds.remove(msgId); - } - } - - } - - /** - * Try to reconcile, if the lock is available otherwise just return as other thread would have hold the lock and performing reconcile. - */ - private void tryReconcile() { - if (!this.reconciliationLock.tryLock()) { - return; - } - - try { - reconcile(); - - // Keep on warning after every reconcile if nonContiguousMsgIds reaches 500 (kept it a bit higher so that we won't get unnecessary warning due to high concurrency). - if (nonContiguousMsgIds.size() > 500) { - LOGGER.warn("Non - Contiguous Message ID has size : {}, with highestContiguousMsgId as : {}", nonContiguousMsgIds.size(), highestContiguousMsgId); - } - } finally { - this.reconciliationLock.unlock(); - } - } - -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/store/ClusterTierActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/store/ClusterTierActiveEntity.java deleted file mode 100644 index 56f4307cb6..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/store/ClusterTierActiveEntity.java +++ /dev/null @@ -1,696 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.server.store; - -import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.exceptions.ClusterException; -import org.ehcache.clustered.common.internal.exceptions.InvalidOperationException; -import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; -import org.ehcache.clustered.common.internal.exceptions.LifecycleException; -import org.ehcache.clustered.common.internal.messages.ClusterTierReconnectMessage; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; -import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; -import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateServerStore; -import org.ehcache.clustered.common.internal.messages.ReconnectMessageCodec; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.AppendMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAck; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAllAck; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetAndAppendMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.KeyBasedServerStoreOpMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ReplaceAtHeadMessage; -import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.ClusterTierEntityConfiguration; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.clustered.server.CommunicatorServiceConfiguration; -import org.ehcache.clustered.server.KeySegmentMapper; -import org.ehcache.clustered.server.ServerSideServerStore; -import org.ehcache.clustered.server.ServerStoreCompatibility; -import org.ehcache.clustered.server.internal.messages.EhcacheDataSyncMessage; -import org.ehcache.clustered.server.internal.messages.EhcacheMessageTrackerMessage; -import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; -import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; -import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; -import org.ehcache.clustered.server.management.ClusterTierManagement; -import org.ehcache.clustered.server.state.EhcacheStateService; -import org.ehcache.clustered.server.state.InvalidationTracker; -import org.ehcache.clustered.server.state.config.EhcacheStoreStateServiceConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.terracotta.client.message.tracker.OOOMessageHandler; -import org.terracotta.client.message.tracker.OOOMessageHandlerConfiguration; -import org.terracotta.entity.ActiveInvokeContext; -import org.terracotta.entity.ActiveServerEntity; -import org.terracotta.entity.BasicServiceConfiguration; -import org.terracotta.entity.ClientCommunicator; -import org.terracotta.entity.ClientDescriptor; -import org.terracotta.entity.ClientSourceId; -import org.terracotta.entity.ConfigurationException; -import org.terracotta.entity.EntityUserException; -import org.terracotta.entity.IEntityMessenger; -import org.terracotta.entity.InvokeContext; -import org.terracotta.entity.MessageCodecException; -import org.terracotta.entity.PassiveSynchronizationChannel; -import org.terracotta.entity.ReconnectRejectedException; -import org.terracotta.entity.ServiceException; -import org.terracotta.entity.ServiceRegistry; -import org.terracotta.entity.StateDumpCollector; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; - -import static java.util.stream.Collectors.toMap; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.allInvalidationDone; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.clientInvalidateAll; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.clientInvalidateHash; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.failure; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.getResponse; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.hashInvalidationDone; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.resolveRequest; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.serverInvalidateHash; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.success; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isLifecycleMessage; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStateRepoOperationMessage; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStoreOperationMessage; -import static org.ehcache.clustered.server.ConcurrencyStrategies.DEFAULT_KEY; -import static org.ehcache.clustered.server.ConcurrencyStrategies.clusterTierConcurrency; - -/** - * ClusterTierActiveEntity - */ -public class ClusterTierActiveEntity implements ActiveServerEntity { - - private static final Logger LOGGER = LoggerFactory.getLogger(ClusterTierActiveEntity.class); - static final String SYNC_DATA_SIZE_PROP = "ehcache.sync.data.size.threshold"; - private static final long DEFAULT_SYNC_DATA_SIZE_THRESHOLD = 4 * 1024 * 1024; - static final String CHAIN_COMPACTION_THRESHOLD_PROP = "ehcache.server.chain.compaction.threshold"; - static final int DEFAULT_CHAIN_COMPACTION_THRESHOLD = 8; - - private final String storeIdentifier; - private final ServerStoreConfiguration configuration; - private final ClientCommunicator clientCommunicator; - private final EhcacheStateService stateService; - private final OOOMessageHandler messageHandler; - private final IEntityMessenger entityMessenger; - private final ServerStoreCompatibility storeCompatibility = new ServerStoreCompatibility(); - private final AtomicBoolean reconnectComplete = new AtomicBoolean(true); - private final AtomicInteger invalidationIdGenerator = new AtomicInteger(); - private final ConcurrentMap clientsWaitingForInvalidation = new ConcurrentHashMap<>(); - private final ReconnectMessageCodec reconnectMessageCodec = new ReconnectMessageCodec(); - private final ClusterTierManagement management; - private final String managerIdentifier; - private final Object inflightInvalidationsMutex = new Object(); - private volatile List inflightInvalidations; - private final Set connectedClients = ConcurrentHashMap.newKeySet(); - private final int chainCompactionLimit; - - @SuppressWarnings("unchecked") - public ClusterTierActiveEntity(ServiceRegistry registry, ClusterTierEntityConfiguration entityConfiguration, KeySegmentMapper defaultMapper) throws ConfigurationException { - if (entityConfiguration == null) { - throw new ConfigurationException("ClusteredStoreEntityConfiguration cannot be null"); - } - storeIdentifier = entityConfiguration.getStoreIdentifier(); - configuration = entityConfiguration.getConfiguration(); - managerIdentifier = entityConfiguration.getManagerIdentifier(); - try { - clientCommunicator = registry.getService(new CommunicatorServiceConfiguration()); - stateService = registry.getService(new EhcacheStoreStateServiceConfig(entityConfiguration.getManagerIdentifier(), defaultMapper)); - entityMessenger = registry.getService(new BasicServiceConfiguration<>(IEntityMessenger.class)); - messageHandler = registry.getService(new OOOMessageHandlerConfiguration(managerIdentifier + "###" + storeIdentifier, - ClusterTierActiveEntity::isTrackedMessage, defaultMapper.getSegments() + 1, new MessageToTrackerSegmentFunction(clusterTierConcurrency(defaultMapper)))); - } catch (ServiceException e) { - throw new ConfigurationException("Unable to retrieve service: " + e.getMessage()); - } - if (entityMessenger == null) { - throw new AssertionError("Server failed to retrieve IEntityMessenger service."); - } - management = new ClusterTierManagement(registry, stateService, true, storeIdentifier, entityConfiguration.getManagerIdentifier()); - chainCompactionLimit = Integer.getInteger(CHAIN_COMPACTION_THRESHOLD_PROP, DEFAULT_CHAIN_COMPACTION_THRESHOLD); - } - - static boolean isTrackedMessage(EhcacheEntityMessage msg) { - if (msg instanceof EhcacheOperationMessage) { - return EhcacheMessageType.isTrackedOperationMessage(((EhcacheOperationMessage) msg).getMessageType()); - } else { - return false; - } - } - - @Override - public void addStateTo(StateDumpCollector dump) { - ClusterTierDump.dump(dump, managerIdentifier, storeIdentifier, configuration); - Set clients = new HashSet<>(getConnectedClients()); - - List allClients = new ArrayList<>(clients.size()); - for (ClientDescriptor entry : clients) { - Map clientMap = new HashMap<>(1); - clientMap.put("clientDescriptor", entry.toString()); - allClients.add(clientMap); - } - dump.addState("clientCount", String.valueOf(clients.size())); - dump.addState("clients", allClients); - } - - @Override - public void createNew() throws ConfigurationException { - ServerSideServerStore store = stateService.createStore(storeIdentifier, configuration, true); - store.setEvictionListener(this::invalidateHashAfterEviction); - management.init(); - } - - List getInflightInvalidations() { - return this.inflightInvalidations; - } - - @Override - public void loadExisting() { - inflightInvalidations = new ArrayList<>(); - if (!isStrong()) { - LOGGER.debug("Preparing for handling inflight invalidations"); - addInflightInvalidationsForEventualCaches(); - } - stateService.loadStore(storeIdentifier, configuration).setEvictionListener(this::invalidateHashAfterEviction); - reconnectComplete.set(false); - management.reload(); - } - - private void invalidateHashAfterEviction(long key) { - Set clientsToInvalidate = new HashSet<>(getConnectedClients()); - for (ClientDescriptor clientDescriptorThatHasToInvalidate : clientsToInvalidate) { - LOGGER.debug("SERVER: eviction happened; asking client {} to invalidate hash {} from cache {}", clientDescriptorThatHasToInvalidate, key, storeIdentifier); - try { - clientCommunicator.sendNoResponse(clientDescriptorThatHasToInvalidate, serverInvalidateHash(key)); - } catch (MessageCodecException mce) { - throw new AssertionError("Codec error", mce); - } - } - } - - @Override - public void connected(ClientDescriptor clientDescriptor) { - connectedClients.add(clientDescriptor); - } - - @Override - public void disconnected(ClientDescriptor clientDescriptor) { - // cleanup all invalidation requests waiting for a ack from this client - Set invalidationIds = clientsWaitingForInvalidation.keySet(); - for (Integer invalidationId : invalidationIds) { - clientInvalidated(clientDescriptor, invalidationId); - } - - // cleanup all invalidation request this client was blocking on - for(Iterator> it = clientsWaitingForInvalidation.entrySet().iterator(); it.hasNext();) { - Map.Entry next = it.next(); - ClientDescriptor clientDescriptorWaitingForInvalidation = next.getValue().clientDescriptorWaitingForInvalidation; - if (clientDescriptorWaitingForInvalidation != null && clientDescriptorWaitingForInvalidation.equals(clientDescriptor)) { - it.remove(); - } - } - - connectedClients.remove(clientDescriptor); - } - - @Override - public EhcacheEntityResponse invokeActive(ActiveInvokeContext context, EhcacheEntityMessage message) throws EntityUserException { - return messageHandler.invoke(context, message, this::invokeActiveInternal); - } - - private EhcacheEntityResponse invokeActiveInternal(InvokeContext context, EhcacheEntityMessage message) { - - try { - if (message instanceof EhcacheOperationMessage) { - EhcacheOperationMessage operationMessage = (EhcacheOperationMessage) message; - EhcacheMessageType messageType = operationMessage.getMessageType(); - if (isStoreOperationMessage(messageType)) { - return invokeServerStoreOperation(context, (ServerStoreOpMessage) message); - } else if (isLifecycleMessage(messageType)) { - return invokeLifeCycleOperation(context, (LifecycleMessage) message); - } else if (isStateRepoOperationMessage(messageType)) { - return invokeStateRepositoryOperation((StateRepositoryOpMessage) message); - } - } - throw new AssertionError("Unsupported message : " + message.getClass()); - } catch (ClusterException e) { - return failure(e); - } catch (Exception e) { - LOGGER.error("Unexpected exception raised during operation: " + message, e); - return failure(new InvalidOperationException(e)); - } - } - - private EhcacheEntityResponse invokeStateRepositoryOperation(StateRepositoryOpMessage message) throws ClusterException { - return stateService.getStateRepositoryManager().invoke(message); - } - - private EhcacheEntityResponse invokeLifeCycleOperation(InvokeContext context, LifecycleMessage message) throws ClusterException { - ActiveInvokeContext activeInvokeContext = (ActiveInvokeContext)context; - switch (message.getMessageType()) { - case VALIDATE_SERVER_STORE: - validateServerStore(activeInvokeContext.getClientDescriptor(), (ValidateServerStore) message); - break; - default: - throw new AssertionError("Unsupported LifeCycle operation " + message); - } - return success(); - } - - private void validateServerStore(ClientDescriptor clientDescriptor, ValidateServerStore validateServerStore) throws ClusterException { - ServerStoreConfiguration clientConfiguration = validateServerStore.getStoreConfiguration(); - LOGGER.info("Client {} validating cluster tier '{}'", clientDescriptor, storeIdentifier); - ServerSideServerStore store = stateService.getStore(storeIdentifier); - if (store != null) { - storeCompatibility.verify(store.getStoreConfiguration(), clientConfiguration); - } else { - throw new InvalidStoreException("cluster tier '" + storeIdentifier + "' does not exist"); - } - } - - private EhcacheEntityResponse invokeServerStoreOperation(InvokeContext context, ServerStoreOpMessage message) throws ClusterException { - ActiveInvokeContext activeInvokeContext = (ActiveInvokeContext) context; - ClientDescriptor clientDescriptor = activeInvokeContext.getClientDescriptor(); - - ServerSideServerStore cacheStore = stateService.getStore(storeIdentifier); - if (cacheStore == null) { - // An operation on a non-existent store should never get out of the client - throw new LifecycleException("cluster tier does not exist : '" + storeIdentifier + "'"); - } - - if (inflightInvalidations != null) { - synchronized (inflightInvalidationsMutex) { - // This logic totally counts on the fact that invokes will only happen - // after all handleReconnects are done, else this is flawed. - if (inflightInvalidations != null) { - List tmpInflightInvalidations = this.inflightInvalidations; - this.inflightInvalidations = null; - LOGGER.debug("Stalling all operations for cluster tier {} for firing inflight invalidations again.", storeIdentifier); - tmpInflightInvalidations.forEach(invalidationState -> { - if (invalidationState.isClearInProgress()) { - invalidateAll(invalidationState.getClientDescriptor()); - } - invalidationState.getInvalidationsInProgress() - .forEach(hashInvalidationToBeResent -> invalidateHashForClient(invalidationState.getClientDescriptor(), hashInvalidationToBeResent)); - }); - } - } - } - - switch (message.getMessageType()) { - case GET_STORE: { - GetMessage getMessage = (GetMessage) message; - try { - return getResponse(cacheStore.get(getMessage.getKey())); - } catch (TimeoutException e) { - throw new AssertionError("Server side store is not expected to throw timeout exception"); - } - } - case APPEND: { - AppendMessage appendMessage = (AppendMessage)message; - - long key = appendMessage.getKey(); - InvalidationTracker invalidationTracker = stateService.getInvalidationTracker(storeIdentifier); - if (invalidationTracker != null) { - invalidationTracker.trackHashInvalidation(key); - } - - final Chain newChain; - try { - cacheStore.append(key, appendMessage.getPayload()); - newChain = cacheStore.get(key); - } catch (TimeoutException e) { - throw new AssertionError("Server side store is not expected to throw timeout exception"); - } - sendMessageToSelfAndDeferRetirement(activeInvokeContext, appendMessage, newChain); - invalidateHashForClient(clientDescriptor, key); - if (newChain.length() > chainCompactionLimit) { - requestChainResolution(clientDescriptor, key, newChain); - } - return success(); - } - case GET_AND_APPEND: { - ServerStoreOpMessage.GetAndAppendMessage getAndAppendMessage = (ServerStoreOpMessage.GetAndAppendMessage)message; - LOGGER.trace("Message {} : GET_AND_APPEND on key {} from client {}", message, getAndAppendMessage.getKey(), context.getClientSource().toLong()); - - InvalidationTracker invalidationTracker = stateService.getInvalidationTracker(storeIdentifier); - if (invalidationTracker != null) { - invalidationTracker.trackHashInvalidation(getAndAppendMessage.getKey()); - } - - final Chain result; - final Chain newChain; - try { - result = cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload()); - newChain = cacheStore.get(getAndAppendMessage.getKey()); - } catch (TimeoutException e) { - throw new AssertionError("Server side store is not expected to throw timeout exception"); - } - sendMessageToSelfAndDeferRetirement(activeInvokeContext, getAndAppendMessage, newChain); - LOGGER.debug("Send invalidations for key {}", getAndAppendMessage.getKey()); - invalidateHashForClient(clientDescriptor, getAndAppendMessage.getKey()); - return getResponse(result); - } - case REPLACE: { - ReplaceAtHeadMessage replaceAtHeadMessage = (ReplaceAtHeadMessage) message; - cacheStore.replaceAtHead(replaceAtHeadMessage.getKey(), replaceAtHeadMessage.getExpect(), replaceAtHeadMessage.getUpdate()); - return success(); - } - case CLIENT_INVALIDATION_ACK: { - ClientInvalidationAck clientInvalidationAck = (ClientInvalidationAck) message; - int invalidationId = clientInvalidationAck.getInvalidationId(); - LOGGER.debug("SERVER: got notification of invalidation ack in cache {} from {} (ID {})", storeIdentifier, clientDescriptor, invalidationId); - clientInvalidated(clientDescriptor, invalidationId); - return success(); - } - case CLIENT_INVALIDATION_ALL_ACK: { - ClientInvalidationAllAck clientInvalidationAllAck = (ClientInvalidationAllAck) message; - int invalidationId = clientInvalidationAllAck.getInvalidationId(); - LOGGER.debug("SERVER: got notification of invalidation ack in cache {} from {} (ID {})", storeIdentifier, clientDescriptor, invalidationId); - clientInvalidated(clientDescriptor, invalidationId); - return success(); - } - case CLEAR: { - LOGGER.info("Clearing cluster tier {}", storeIdentifier); - try { - cacheStore.clear(); - } catch (TimeoutException e) { - throw new AssertionError("Server side store is not expected to throw timeout exception"); - } - - InvalidationTracker invalidationTracker = stateService.getInvalidationTracker(storeIdentifier); - if (invalidationTracker != null) { - invalidationTracker.setClearInProgress(true); - } - invalidateAll(clientDescriptor); - return success(); - } - default: - throw new AssertionError("Unsupported ServerStore operation : " + message); - } - } - - private void invalidateAll(ClientDescriptor originatingClientDescriptor) { - int invalidationId = invalidationIdGenerator.getAndIncrement(); - Set clientsToInvalidate = new HashSet<>(getConnectedClients()); - if (originatingClientDescriptor != null) { - clientsToInvalidate.remove(originatingClientDescriptor); - } - - InvalidationHolder invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate); - clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); - - LOGGER.debug("SERVER: requesting {} client(s) invalidation of all in cache {} (ID {})", clientsToInvalidate.size(), storeIdentifier, invalidationId); - for (ClientDescriptor clientDescriptorThatHasToInvalidate : clientsToInvalidate) { - LOGGER.debug("SERVER: asking client {} to invalidate all from cache {} (ID {})", clientDescriptorThatHasToInvalidate, storeIdentifier, invalidationId); - try { - clientCommunicator.sendNoResponse(clientDescriptorThatHasToInvalidate, clientInvalidateAll(invalidationId)); - } catch (MessageCodecException mce) { - throw new AssertionError("Codec error", mce); - } - } - - if (clientsToInvalidate.isEmpty()) { - clientInvalidated(invalidationHolder.clientDescriptorWaitingForInvalidation, invalidationId); - } - } - - private void clientInvalidated(ClientDescriptor clientDescriptor, int invalidationId) { - InvalidationHolder invalidationHolder = clientsWaitingForInvalidation.get(invalidationId); - - if (invalidationHolder == null) { // Happens when client is re-sending/sending invalidations for which server has lost track since fail-over happened. - LOGGER.debug("Ignoring invalidation from client {} " + clientDescriptor); - return; - } - - invalidationHolder.clientsHavingToInvalidate.remove(clientDescriptor); - if (invalidationHolder.clientsHavingToInvalidate.isEmpty()) { - if (clientsWaitingForInvalidation.remove(invalidationId) != null) { - try { - Long key = invalidationHolder.key; - if (key == null) { - if (isStrong()) { - clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, allInvalidationDone()); - LOGGER.debug("SERVER: notifying originating client that all other clients invalidated all in cache {} from {} (ID {})", storeIdentifier, clientDescriptor, invalidationId); - } else { - entityMessenger.messageSelf(new ClearInvalidationCompleteMessage()); - - InvalidationTracker invalidationTracker = stateService.getInvalidationTracker(storeIdentifier); - if (invalidationTracker != null) { - invalidationTracker.setClearInProgress(false); - } - - } - } else { - if (isStrong()) { - clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, hashInvalidationDone(key)); - LOGGER.debug("SERVER: notifying originating client that all other clients invalidated key {} in cache {} from {} (ID {})", key, storeIdentifier, clientDescriptor, invalidationId); - } else { - entityMessenger.messageSelf(new InvalidationCompleteMessage(key)); - - InvalidationTracker invalidationTracker = stateService.getInvalidationTracker(storeIdentifier); - if (invalidationTracker != null) { - invalidationTracker.untrackHashInvalidation(key); - } - } - } - } catch (MessageCodecException mce) { - throw new AssertionError("Codec error", mce); - } - } - } - } - - private void invalidateHashForClient(ClientDescriptor originatingClientDescriptor, long key) { - int invalidationId = invalidationIdGenerator.getAndIncrement(); - Set clientsToInvalidate = new HashSet<>(getConnectedClients()); - if (originatingClientDescriptor != null) { - clientsToInvalidate.remove(originatingClientDescriptor); - } - - InvalidationHolder invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, key); - clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); - - LOGGER.debug("SERVER: requesting {} client(s) invalidation of hash {} in cache {} (ID {})", clientsToInvalidate.size(), key, storeIdentifier, invalidationId); - for (ClientDescriptor clientDescriptorThatHasToInvalidate : clientsToInvalidate) { - LOGGER.debug("SERVER: asking client {} to invalidate hash {} from cache {} (ID {})", clientDescriptorThatHasToInvalidate, key, storeIdentifier, invalidationId); - try { - clientCommunicator.sendNoResponse(clientDescriptorThatHasToInvalidate, clientInvalidateHash(key, invalidationId)); - } catch (MessageCodecException mce) { - throw new AssertionError("Codec error", mce); - } - } - - if (clientsToInvalidate.isEmpty()) { - clientInvalidated(invalidationHolder.clientDescriptorWaitingForInvalidation, invalidationId); - } - } - - private void requestChainResolution(ClientDescriptor clientDescriptor, long key, Chain chain) { - try { - clientCommunicator.sendNoResponse(clientDescriptor, resolveRequest(key, chain)); - } catch (MessageCodecException e) { - throw new AssertionError("Codec error", e); - } - } - - /** - * Send a {@link PassiveReplicationMessage} to the passive, reuse the same transaction id and client id as the original message since this - * original message won't ever be sent to the passive and these ids will be used to prevent duplication if the active goes down and the - * client resends the original message to the passive (now our new active). - * - * @param context context of the message - * @param message message to be forwarded - * @param newChain resulting chain to send - */ - private void sendMessageToSelfAndDeferRetirement(ActiveInvokeContext context, KeyBasedServerStoreOpMessage message, Chain newChain) { - try { - long clientId = context.getClientSource().toLong(); - entityMessenger.messageSelfAndDeferRetirement(message, new PassiveReplicationMessage.ChainReplicationMessage(message.getKey(), newChain, - context.getCurrentTransactionId(), context.getOldestTransactionId(), clientId)); - } catch (MessageCodecException e) { - throw new AssertionError("Codec error", e); - } - } - - private void addInflightInvalidationsForEventualCaches() { - InvalidationTracker invalidationTracker = stateService.getInvalidationTracker(storeIdentifier); - if (invalidationTracker != null) { - inflightInvalidations.add(new InvalidationTuple(null, invalidationTracker.getTrackedKeys(), invalidationTracker.isClearInProgress())); - invalidationTracker.clear(); - } - } - - @Override - public void notifyDestroyed(ClientSourceId sourceId) { - messageHandler.untrackClient(sourceId); - } - - @Override - public ReconnectHandler startReconnect() { - return (clientDescriptor, bytes) -> { - if (inflightInvalidations == null) { - throw new AssertionError("Load existing was not invoked before handleReconnect"); - } - - ClusterTierReconnectMessage reconnectMessage = reconnectMessageCodec.decode(bytes); - ServerSideServerStore serverStore = stateService.getStore(storeIdentifier); - addInflightInvalidationsForStrongCache(clientDescriptor, reconnectMessage, serverStore); - - LOGGER.info("Client '{}' successfully reconnected to newly promoted ACTIVE after failover.", clientDescriptor); - - connectedClients.add(clientDescriptor); - }; - } - - private void addInflightInvalidationsForStrongCache(ClientDescriptor clientDescriptor, ClusterTierReconnectMessage reconnectMessage, ServerSideServerStore serverStore) { - if (serverStore.getStoreConfiguration().getConsistency().equals(Consistency.STRONG)) { - Set invalidationsInProgress = reconnectMessage.getInvalidationsInProgress(); - LOGGER.debug("Number of Inflight Invalidations from client ID {} for cache {} is {}.", clientDescriptor.getSourceId().toLong(), storeIdentifier, invalidationsInProgress - .size()); - inflightInvalidations.add(new InvalidationTuple(clientDescriptor, invalidationsInProgress, reconnectMessage.isClearInProgress())); - } - } - - @Override - public void synchronizeKeyToPassive(PassiveSynchronizationChannel syncChannel, int concurrencyKey) { - LOGGER.info("Sync started for concurrency key {}.", concurrencyKey); - if (concurrencyKey == DEFAULT_KEY) { - stateService.getStateRepositoryManager().syncMessageFor(storeIdentifier).forEach(syncChannel::synchronizeToPassive); - } else { - int segmentId = concurrencyKey - DEFAULT_KEY - 1; - Long dataSizeThreshold = Long.getLong(SYNC_DATA_SIZE_PROP, DEFAULT_SYNC_DATA_SIZE_THRESHOLD); - AtomicLong size = new AtomicLong(0); - ServerSideServerStore store = stateService.getStore(storeIdentifier); - final AtomicReference> mappingsToSend = new AtomicReference<>(new HashMap<>()); - store.getSegmentKeySets().get(segmentId) - .forEach(key -> { - final Chain chain; - try { - chain = store.get(key); - } catch (TimeoutException e) { - throw new AssertionError("Server side store is not expected to throw timeout exception"); - } - for (Element element : chain) { - size.addAndGet(element.getPayload().remaining()); - } - mappingsToSend.get().put(key, chain); - if (size.get() > dataSizeThreshold) { - syncChannel.synchronizeToPassive(new EhcacheDataSyncMessage(mappingsToSend.get())); - mappingsToSend.set(new HashMap<>()); - size.set(0); - } - }); - if (!mappingsToSend.get().isEmpty()) { - syncChannel.synchronizeToPassive(new EhcacheDataSyncMessage(mappingsToSend.get())); - mappingsToSend.set(new HashMap<>()); - size.set(0); - } - } - sendMessageTrackerReplication(syncChannel, concurrencyKey - 1); - - LOGGER.info("Sync complete for concurrency key {}.", concurrencyKey); - } - - private void sendMessageTrackerReplication(PassiveSynchronizationChannel syncChannel, int concurrencyKey) { - Map> clientSourceIdTrackingMap = messageHandler.getTrackedClients() - .collect(toMap(ClientSourceId::toLong, clientSourceId -> messageHandler.getTrackedResponsesForSegment(concurrencyKey, clientSourceId))); - if (!clientSourceIdTrackingMap.isEmpty()) { - syncChannel.synchronizeToPassive(new EhcacheMessageTrackerMessage(concurrencyKey, clientSourceIdTrackingMap)); - } - } - - @Override - public void destroy() { - LOGGER.info("Destroying cluster tier '{}'", storeIdentifier); - try { - stateService.destroyServerStore(storeIdentifier); - } catch (ClusterException e) { - LOGGER.error("Failed to destroy server store - does not exist", e); - } - management.close(); - } - - Set getConnectedClients() { - return connectedClients; - } - - ConcurrentMap getClientsWaitingForInvalidation() { - return clientsWaitingForInvalidation; - } - - private boolean isStrong() { - return this.configuration.getConsistency() == Consistency.STRONG; - } - - static class InvalidationHolder { - final ClientDescriptor clientDescriptorWaitingForInvalidation; - final Set clientsHavingToInvalidate; - final Long key; - - InvalidationHolder(ClientDescriptor clientDescriptorWaitingForInvalidation, Set clientsHavingToInvalidate, Long key) { - this.clientDescriptorWaitingForInvalidation = clientDescriptorWaitingForInvalidation; - this.clientsHavingToInvalidate = clientsHavingToInvalidate; - this.key = key; - } - - InvalidationHolder(ClientDescriptor clientDescriptorWaitingForInvalidation, Set clientsHavingToInvalidate) { - this(clientDescriptorWaitingForInvalidation, clientsHavingToInvalidate, null); - } - } - - private static class InvalidationTuple { - private final ClientDescriptor clientDescriptor; - private final Set invalidationsInProgress; - private final boolean isClearInProgress; - - InvalidationTuple(ClientDescriptor clientDescriptor, Set invalidationsInProgress, boolean isClearInProgress) { - this.clientDescriptor = clientDescriptor; - this.invalidationsInProgress = invalidationsInProgress; - this.isClearInProgress = isClearInProgress; - } - - ClientDescriptor getClientDescriptor() { - return clientDescriptor; - } - - Set getInvalidationsInProgress() { - return invalidationsInProgress; - } - - boolean isClearInProgress() { - return isClearInProgress; - } - } -} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockActiveEntityTest.java deleted file mode 100644 index 9f6178071a..0000000000 --- a/clustered/server/src/test/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockActiveEntityTest.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.lock.server; - -import org.ehcache.clustered.common.internal.lock.LockMessaging; -import org.ehcache.clustered.common.internal.lock.LockMessaging.LockTransition; -import org.hamcrest.beans.HasPropertyWithValue; -import org.junit.Test; -import org.terracotta.entity.ClientCommunicator; -import org.terracotta.entity.ClientDescriptor; -import org.terracotta.entity.EntityResponse; -import org.terracotta.entity.MessageCodecException; - -import static org.ehcache.clustered.common.internal.lock.LockMessaging.HoldType.READ; -import static org.ehcache.clustered.common.internal.lock.LockMessaging.HoldType.WRITE; -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; - -public class VoltronReadWriteLockActiveEntityTest { - - @Test - public void testWriteLock() { - ClientCommunicator communicator = mock(ClientCommunicator.class); - VoltronReadWriteLockActiveEntity entity = new VoltronReadWriteLockActiveEntity(communicator); - - ClientDescriptor client = mock(ClientDescriptor.class); - - LockTransition transition = entity.invoke(client, LockMessaging.lock(WRITE)); - - assertThat(transition.isAcquired(), is(true)); - } - - @Test - public void testReadLock() { - ClientCommunicator communicator = mock(ClientCommunicator.class); - VoltronReadWriteLockActiveEntity entity = new VoltronReadWriteLockActiveEntity(communicator); - - ClientDescriptor client = mock(ClientDescriptor.class); - - LockTransition transition = entity.invoke(client, LockMessaging.lock(READ)); - - assertThat(transition.isAcquired(), is(true)); - } - - @Test - public void testWriteUnlock() { - ClientCommunicator communicator = mock(ClientCommunicator.class); - VoltronReadWriteLockActiveEntity entity = new VoltronReadWriteLockActiveEntity(communicator); - - ClientDescriptor client = mock(ClientDescriptor.class); - entity.invoke(client, LockMessaging.lock(WRITE)); - - LockTransition transition = entity.invoke(client, LockMessaging.unlock(WRITE)); - - assertThat(transition.isReleased(), is(true)); - } - - @Test - public void testReadUnlock() { - ClientCommunicator communicator = mock(ClientCommunicator.class); - VoltronReadWriteLockActiveEntity entity = new VoltronReadWriteLockActiveEntity(communicator); - - ClientDescriptor client = mock(ClientDescriptor.class); - entity.invoke(client, LockMessaging.lock(READ)); - - LockTransition transition = entity.invoke(client, LockMessaging.unlock(READ)); - - assertThat(transition.isReleased(), is(true)); - } - - @Test - public void testTryWriteLockWhenWriteLocked() { - ClientCommunicator communicator = mock(ClientCommunicator.class); - VoltronReadWriteLockActiveEntity entity = new VoltronReadWriteLockActiveEntity(communicator); - - entity.invoke(mock(ClientDescriptor.class), LockMessaging.lock(WRITE)); - - LockTransition transition = entity.invoke(mock(ClientDescriptor.class), LockMessaging.tryLock(WRITE)); - - assertThat(transition.isAcquired(), is(false)); - } - - @Test - public void testTryReadLockWhenWriteLocked() { - ClientCommunicator communicator = mock(ClientCommunicator.class); - VoltronReadWriteLockActiveEntity entity = new VoltronReadWriteLockActiveEntity(communicator); - - entity.invoke(mock(ClientDescriptor.class), LockMessaging.lock(WRITE)); - - LockTransition transition = entity.invoke(mock(ClientDescriptor.class), LockMessaging.tryLock(READ)); - - assertThat(transition.isAcquired(), is(false)); - } - - @Test - public void testTryWriteLockWhenReadLocked() { - ClientCommunicator communicator = mock(ClientCommunicator.class); - VoltronReadWriteLockActiveEntity entity = new VoltronReadWriteLockActiveEntity(communicator); - - entity.invoke(mock(ClientDescriptor.class), LockMessaging.lock(READ)); - - LockTransition transition = entity.invoke(mock(ClientDescriptor.class), LockMessaging.tryLock(WRITE)); - - assertThat(transition.isAcquired(), is(false)); - } - - @Test - public void testTryReadLockWhenReadLocked() { - ClientCommunicator communicator = mock(ClientCommunicator.class); - VoltronReadWriteLockActiveEntity entity = new VoltronReadWriteLockActiveEntity(communicator); - - entity.invoke(mock(ClientDescriptor.class), LockMessaging.lock(READ)); - - LockTransition transition = entity.invoke(mock(ClientDescriptor.class), LockMessaging.tryLock(READ)); - - assertThat(transition.isAcquired(), is(true)); - } - - @Test - public void testWriteUnlockNotifiesListeners() throws MessageCodecException { - ClientCommunicator communicator = mock(ClientCommunicator.class); - VoltronReadWriteLockActiveEntity entity = new VoltronReadWriteLockActiveEntity(communicator); - - ClientDescriptor locker = mock(ClientDescriptor.class); - ClientDescriptor waiter = mock(ClientDescriptor.class); - - entity.invoke(locker, LockMessaging.lock(WRITE)); - entity.invoke(waiter, LockMessaging.lock(WRITE)); - entity.invoke(locker, LockMessaging.unlock(WRITE)); - - verify(communicator).sendNoResponse(eq(waiter), argThat( - HasPropertyWithValue.hasProperty("released", is(true)))); - } - - @Test - public void testReadUnlockNotifiesListeners() throws MessageCodecException { - ClientCommunicator communicator = mock(ClientCommunicator.class); - VoltronReadWriteLockActiveEntity entity = new VoltronReadWriteLockActiveEntity(communicator); - - ClientDescriptor locker = mock(ClientDescriptor.class); - ClientDescriptor waiter = mock(ClientDescriptor.class); - - entity.invoke(locker, LockMessaging.lock(READ)); - entity.invoke(waiter, LockMessaging.lock(WRITE)); - entity.invoke(locker, LockMessaging.unlock(READ)); - - verify(communicator).sendNoResponse(eq(waiter), argThat( - HasPropertyWithValue.hasProperty("released", is(true)))); - } - - -} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/TestClientDescriptor.java b/clustered/server/src/test/java/org/ehcache/clustered/server/TestClientDescriptor.java deleted file mode 100644 index 26b2f20923..0000000000 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/TestClientDescriptor.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.server; - -import org.terracotta.entity.ClientDescriptor; -import org.terracotta.entity.ClientSourceId; - -import java.util.concurrent.atomic.AtomicInteger; - -public final class TestClientDescriptor implements ClientDescriptor { - private static final AtomicInteger counter = new AtomicInteger(0); - - private final int clientId = counter.incrementAndGet(); - - public static ClientDescriptor create() { - return new TestClientDescriptor(); - } - - @Override - public ClientSourceId getSourceId() { - return new TestClientSourceId(clientId); - } - - @Override - public String toString() { - return "TestClientDescriptor[" + clientId + "]"; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - TestClientDescriptor that = (TestClientDescriptor) o; - - return clientId == that.clientId; - } - - @Override - public int hashCode() { - return clientId; - } -} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/TestInvokeContext.java b/clustered/server/src/test/java/org/ehcache/clustered/server/TestInvokeContext.java deleted file mode 100644 index e6da1dac22..0000000000 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/TestInvokeContext.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.server; - -import org.terracotta.entity.ActiveInvokeChannel; -import org.terracotta.entity.ActiveInvokeContext; -import org.terracotta.entity.ClientDescriptor; -import org.terracotta.entity.ClientSourceId; - -import java.util.concurrent.atomic.AtomicLong; - -public final class TestInvokeContext implements ActiveInvokeContext { - - private final AtomicLong currentTransactionId = new AtomicLong(); - - private final ClientDescriptor clientDescriptor = new TestClientDescriptor(); - - public void incrementCurrentTransactionId() { - currentTransactionId.incrementAndGet(); - } - - @Override - public ClientDescriptor getClientDescriptor() { - return clientDescriptor; - } - - @Override - public ActiveInvokeChannel openInvokeChannel() { - return null; - } - - @Override - public ClientSourceId getClientSource() { - return clientDescriptor.getSourceId(); - } - - @Override - public long getCurrentTransactionId() { - return currentTransactionId.get(); - } - - @Override - public long getOldestTransactionId() { - return 1; - } - - @Override - public boolean isValidClientInformation() { - return true; - } - - @Override - public ClientSourceId makeClientSourceId(long l) { - return new TestClientSourceId(l); - } - - @Override - public int getConcurrencyKey() { - return 1; - } -} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java deleted file mode 100644 index 5f85554248..0000000000 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java +++ /dev/null @@ -1,248 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.offheap; - -import java.nio.ByteBuffer; -import java.util.Random; - -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.clustered.server.KeySegmentMapper; -import org.ehcache.clustered.server.store.ChainBuilder; -import org.ehcache.clustered.server.store.ElementBuilder; -import org.ehcache.clustered.common.internal.store.ServerStore; -import org.ehcache.clustered.server.store.ServerStoreTest; -import org.junit.Test; -import org.mockito.ArgumentMatchers; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; -import org.terracotta.offheapstore.buffersource.OffHeapBufferSource; -import org.terracotta.offheapstore.exceptions.OversizeMappingException; -import org.terracotta.offheapstore.paging.UnlimitedPageSource; -import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.core.Is.is; -import org.junit.Assert; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyLong; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.when; -import static org.terracotta.offheapstore.util.MemoryUnit.GIGABYTES; -import static org.terracotta.offheapstore.util.MemoryUnit.KILOBYTES; -import static org.terracotta.offheapstore.util.MemoryUnit.MEGABYTES; - -public class OffHeapServerStoreTest extends ServerStoreTest { - - private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); - - @SuppressWarnings("unchecked") - private OffHeapChainMap getOffHeapChainMapMock() { - return mock(OffHeapChainMap.class); - } - - @Override - public ServerStore newStore() { - return new OffHeapServerStore(new UnlimitedPageSource(new OffHeapBufferSource()), DEFAULT_MAPPER); - } - - @Override - public ChainBuilder newChainBuilder() { - return elements -> { - ByteBuffer[] buffers = new ByteBuffer[elements.length]; - for (int i = 0; i < buffers.length; i++) { - buffers[i] = elements[i].getPayload(); - } - return OffHeapChainMap.chain(buffers); - }; - } - - @Override - public ElementBuilder newElementBuilder() { - return payLoad -> () -> payLoad; - } - - @Test - public void testGetMaxSize() { - assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(2)), is(64L)); - assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(4)), is(128L)); - assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(16)), is(512L)); - assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(64)), is(2048L)); - assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(128)), is(4096L)); - assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(256)), is(8192L)); - assertThat(OffHeapServerStore.getMaxSize(MEGABYTES.toBytes(512)), is(8192L)); - - assertThat(OffHeapServerStore.getMaxSize(GIGABYTES.toBytes(2)), is(8192L)); - } - - @Test - public void test_append_doesNotConsumeBuffer_evenWhenOversizeMappingException() throws Exception { - OffHeapServerStore store = (OffHeapServerStore) spy(newStore()); - final OffHeapChainMap offHeapChainMap = getOffHeapChainMapMock(); - doThrow(OversizeMappingException.class).when(offHeapChainMap).append(any(Object.class), any(ByteBuffer.class)); - - when(store.segmentFor(anyLong())).then(new Answer() { - int invocations = 0; - @Override - public Object answer(InvocationOnMock invocation) throws Throwable { - if (invocations++ < 10) { - return offHeapChainMap; - } else { - return invocation.callRealMethod(); - } - } - }); - when(store.handleOversizeMappingException(anyLong())).thenReturn(true); - - ByteBuffer payload = createPayload(1L); - - store.append(1L, payload); - assertThat(payload.remaining(), is(8)); - } - - @Test - public void test_getAndAppend_doesNotConsumeBuffer_evenWhenOversizeMappingException() throws Exception { - OffHeapServerStore store = (OffHeapServerStore) spy(newStore()); - final OffHeapChainMap offHeapChainMap = getOffHeapChainMapMock(); - doThrow(OversizeMappingException.class).when(offHeapChainMap).getAndAppend(any(), any(ByteBuffer.class)); - - when(store.segmentFor(anyLong())).then(new Answer() { - int invocations = 0; - @Override - public Object answer(InvocationOnMock invocation) throws Throwable { - if (invocations++ < 10) { - return offHeapChainMap; - } else { - return invocation.callRealMethod(); - } - } - }); - when(store.handleOversizeMappingException(anyLong())).thenReturn(true); - - - ByteBuffer payload = createPayload(1L); - - store.getAndAppend(1L, payload); - assertThat(payload.remaining(), is(8)); - - Chain expected = newChainBuilder().build(newElementBuilder().build(payload), newElementBuilder().build(payload)); - Chain update = newChainBuilder().build(newElementBuilder().build(payload)); - store.replaceAtHead(1L, expected, update); - assertThat(payload.remaining(), is(8)); - } - - @Test - public void test_replaceAtHead_doesNotConsumeBuffer_evenWhenOversizeMappingException() throws Exception { - OffHeapServerStore store = (OffHeapServerStore) spy(newStore()); - final OffHeapChainMap offHeapChainMap = getOffHeapChainMapMock(); - doThrow(OversizeMappingException.class).when(offHeapChainMap).replaceAtHead(any(), any(Chain.class), any(Chain.class)); - - when(store.segmentFor(anyLong())).then(new Answer() { - int invocations = 0; - @Override - public Object answer(InvocationOnMock invocation) throws Throwable { - if (invocations++ < 10) { - return offHeapChainMap; - } else { - return invocation.callRealMethod(); - } - } - }); - when(store.handleOversizeMappingException(anyLong())).thenReturn(true); - - - ByteBuffer payload = createPayload(1L); - - Chain expected = newChainBuilder().build(newElementBuilder().build(payload), newElementBuilder().build(payload)); - Chain update = newChainBuilder().build(newElementBuilder().build(payload)); - store.replaceAtHead(1L, expected, update); - assertThat(payload.remaining(), is(8)); - } - - @Test - public void testCrossSegmentShrinking() { - long seed = System.nanoTime(); - Random random = new Random(seed); - try { - OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), MEGABYTES.toBytes(1L), MEGABYTES.toBytes(1)), DEFAULT_MAPPER); - - ByteBuffer smallValue = ByteBuffer.allocate(1024); - for (int i = 0; i < 10000; i++) { - try { - store.getAndAppend(random.nextInt(500), smallValue.duplicate()); - } catch (OversizeMappingException e) { - //ignore - } - } - - ByteBuffer largeValue = ByteBuffer.allocate(100 * 1024); - for (int i = 0; i < 10000; i++) { - try { - store.getAndAppend(random.nextInt(500), largeValue.duplicate()); - } catch (OversizeMappingException e) { - //ignore - } - } - } catch (Throwable t) { - throw (AssertionError) new AssertionError("Failed with seed " + seed).initCause(t); - } - } - - @Test - public void testServerSideUsageStats() { - - long maxBytes = MEGABYTES.toBytes(1); - OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), maxBytes, MEGABYTES.toBytes(1)), new KeySegmentMapper(16)); - - int oneKb = 1024; - long smallLoopCount = 5; - ByteBuffer smallValue = ByteBuffer.allocate(oneKb); - for (long i = 0; i < smallLoopCount; i++) { - store.getAndAppend(i, smallValue.duplicate()); - } - - Assert.assertThat(store.getAllocatedMemory(),lessThanOrEqualTo(maxBytes)); - Assert.assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo(smallLoopCount * oneKb)); - Assert.assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo(store.getOccupiedMemory())); - - //asserts above already guarantee that occupiedMemory <= maxBytes and that occupiedMemory <= allocatedMemory - Assert.assertThat(store.getOccupiedMemory(),greaterThanOrEqualTo(smallLoopCount * oneKb)); - - Assert.assertThat(store.getSize(), is(smallLoopCount)); - - int multiplier = 100; - long largeLoopCount = 5 + smallLoopCount; - ByteBuffer largeValue = ByteBuffer.allocate(multiplier * oneKb); - for (long i = smallLoopCount; i < largeLoopCount; i++) { - store.getAndAppend(i, largeValue.duplicate()); - } - - Assert.assertThat(store.getAllocatedMemory(),lessThanOrEqualTo(maxBytes)); - Assert.assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo( (smallLoopCount * oneKb) + ( (largeLoopCount - smallLoopCount) * oneKb * multiplier) )); - Assert.assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo(store.getOccupiedMemory())); - - //asserts above already guarantee that occupiedMemory <= maxBytes and that occupiedMemory <= allocatedMemory - Assert.assertThat(store.getOccupiedMemory(),greaterThanOrEqualTo(smallLoopCount * oneKb)); - - Assert.assertThat(store.getSize(), is(smallLoopCount + (largeLoopCount - smallLoopCount))); - - } - -} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/store/ClusterTierActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/store/ClusterTierActiveEntityTest.java deleted file mode 100644 index 6fc35d6037..0000000000 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/store/ClusterTierActiveEntityTest.java +++ /dev/null @@ -1,1297 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.server.store; - -import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.common.PoolAllocation; -import org.ehcache.clustered.common.PoolAllocation.Dedicated; -import org.ehcache.clustered.common.PoolAllocation.Shared; -import org.ehcache.clustered.common.ServerSideConfiguration; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.exceptions.InvalidServerStoreConfigurationException; -import org.ehcache.clustered.common.internal.messages.ConcurrentEntityMessage; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; -import org.ehcache.clustered.common.internal.messages.EhcacheResponseType; -import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; -import org.ehcache.clustered.common.internal.store.ClusterTierEntityConfiguration; -import org.ehcache.clustered.server.ConcurrencyStrategies; -import org.ehcache.clustered.server.EhcacheStateServiceImpl; -import org.ehcache.clustered.server.KeySegmentMapper; -import org.ehcache.clustered.server.ServerSideServerStore; -import org.ehcache.clustered.server.ServerStoreEvictionListener; -import org.ehcache.clustered.server.TestClientDescriptor; -import org.ehcache.clustered.server.TestInvokeContext; -import org.ehcache.clustered.server.internal.messages.EhcacheDataSyncMessage; -import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; -import org.ehcache.clustered.server.state.EhcacheStateService; -import org.ehcache.clustered.server.state.InvalidationTracker; -import org.ehcache.clustered.server.store.ClusterTierActiveEntity.InvalidationHolder; -import org.junit.Before; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.terracotta.client.message.tracker.OOOMessageHandler; -import org.terracotta.client.message.tracker.OOOMessageHandlerConfiguration; -import org.terracotta.client.message.tracker.OOOMessageHandlerImpl; -import org.terracotta.entity.ClientCommunicator; -import org.terracotta.entity.ClientDescriptor; -import org.terracotta.entity.ConfigurationException; -import org.terracotta.entity.IEntityMessenger; -import org.terracotta.entity.PassiveSynchronizationChannel; -import org.terracotta.entity.ServiceConfiguration; -import org.terracotta.entity.ServiceRegistry; -import org.terracotta.management.service.monitoring.EntityManagementRegistry; -import org.terracotta.management.service.monitoring.EntityManagementRegistryConfiguration; -import org.terracotta.management.service.monitoring.EntityMonitoringService; -import org.terracotta.offheapresource.OffHeapResource; -import org.terracotta.offheapresource.OffHeapResourceIdentifier; -import org.terracotta.offheapresource.OffHeapResources; -import org.terracotta.offheapstore.util.MemoryUnit; - -import java.nio.ByteBuffer; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Random; -import java.util.Set; -import java.util.UUID; - -import static org.ehcache.clustered.common.internal.store.Util.createPayload; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.hasItems; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.sameInstance; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.isNotNull; -import static org.mockito.Mockito.atLeast; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; - -public class ClusterTierActiveEntityTest { - - private static final LifeCycleMessageFactory MESSAGE_FACTORY = new LifeCycleMessageFactory(); - private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); - - private String defaultStoreName = "store"; - private String defaultResource = "default"; - private String defaultSharedPool = "defaultShared"; - private String identifier = "identifier"; - private OffHeapIdentifierRegistry defaultRegistry; - private ServerStoreConfiguration defaultStoreConfiguration; - private ClusterTierEntityConfiguration defaultConfiguration; - - @Before - public void setUp() { - defaultRegistry = new OffHeapIdentifierRegistry(); - defaultRegistry.addResource(defaultResource, 10, MemoryUnit.MEGABYTES); - defaultStoreConfiguration = new ServerStoreConfigBuilder().dedicated(defaultResource, 1024, MemoryUnit.KILOBYTES).build(); - defaultConfiguration = new ClusterTierEntityConfiguration(identifier, defaultStoreName, - defaultStoreConfiguration); - } - - @Test(expected = ConfigurationException.class) - public void testConfigNull() throws Exception { - new ClusterTierActiveEntity(mock(ServiceRegistry.class), null, DEFAULT_MAPPER); - } - - @Test - public void testConnected() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - - ClientDescriptor client = new TestClientDescriptor(); - activeEntity.connected(client); - - Set connectedClients = activeEntity.getConnectedClients(); - assertThat(connectedClients, hasSize(1)); - assertThat(connectedClients, hasItem(client)); - } - - @Test - public void testConnectedAgain() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - - ClientDescriptor client = new TestClientDescriptor(); - activeEntity.connected(client); - - activeEntity.connected(client); - Set connectedClients = activeEntity.getConnectedClients(); - assertThat(connectedClients, hasSize(1)); - assertThat(connectedClients, hasItem(client)); - } - - @Test - public void testConnectedSecond() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - - ClientDescriptor client1 = new TestClientDescriptor(); - activeEntity.connected(client1); - - ClientDescriptor client2 = new TestClientDescriptor(); - activeEntity.connected(client2); - - Set connectedClients = activeEntity.getConnectedClients(); - assertThat(connectedClients, hasSize(2)); - assertThat(connectedClients, hasItems(client1, client2)); - } - - @Test - public void testDisconnectedNotConnected() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - - ClientDescriptor client1 = new TestClientDescriptor(); - activeEntity.disconnected(client1); - // Not expected to fail ... - } - - /** - * Ensures the disconnect of a connected client is properly tracked. - */ - @Test - public void testDisconnected() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - - ClientDescriptor client1 = new TestClientDescriptor(); - activeEntity.connected(client1); - activeEntity.disconnected(client1); - - assertThat(activeEntity.getConnectedClients(), hasSize(0)); - } - - /** - * Ensures the disconnect of a connected client is properly tracked and does not affect others. - */ - @Test - public void testDisconnectedSecond() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - - ClientDescriptor client1 = new TestClientDescriptor(); - activeEntity.connected(client1); - - ClientDescriptor client2 = new TestClientDescriptor(); - activeEntity.connected(client2); - - assertThat(activeEntity.getConnectedClients(), hasSize(2)); - - activeEntity.disconnected(client1); - - Set connectedClients = activeEntity.getConnectedClients(); - assertThat(connectedClients, hasSize(1)); - assertThat(connectedClients, hasItem(client2)); - } - - @Test - public void testLoadExistingRegistersEvictionListener() throws Exception { - EhcacheStateService stateService = mock(EhcacheStateService.class); - - ServerSideServerStore store = mock(ServerSideServerStore.class); - when(stateService.loadStore(eq(defaultStoreName), any())).thenReturn(store); - - IEntityMessenger entityMessenger = mock(IEntityMessenger.class); - ServiceRegistry registry = getCustomMockedServiceRegistry(stateService, null, entityMessenger, null, null); - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(registry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.loadExisting(); - verify(store).setEvictionListener(any(ServerStoreEvictionListener.class)); - } - - @Test - public void testAppendInvalidationAcksTakenIntoAccount() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context1 = new TestInvokeContext(); - TestInvokeContext context2 = new TestInvokeContext(); - TestInvokeContext context3 = new TestInvokeContext(); - activeEntity.connected(context1.getClientDescriptor()); - activeEntity.connected(context2.getClientDescriptor()); - activeEntity.connected(context3.getClientDescriptor()); - - - // attach to the store - assertSuccess( - activeEntity.invokeActive(context1, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - assertSuccess( - activeEntity.invokeActive(context2, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - assertSuccess( - activeEntity.invokeActive(context3, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - - // perform an append - assertSuccess( - activeEntity.invokeActive(context1, new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L))) - ); - - // assert that an invalidation request is pending - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(1)); - InvalidationHolder invalidationHolder = activeEntity.getClientsWaitingForInvalidation().values().iterator().next(); - assertThat(invalidationHolder.clientDescriptorWaitingForInvalidation, is(context1.getClientDescriptor())); - assertThat(invalidationHolder.clientsHavingToInvalidate.size(), is(2)); - assertThat(invalidationHolder.clientsHavingToInvalidate, containsInAnyOrder(context2.getClientDescriptor(), context3.getClientDescriptor())); - - // client 2 acks - assertSuccess( - activeEntity.invokeActive(context2, new ServerStoreOpMessage.ClientInvalidationAck(1L, activeEntity.getClientsWaitingForInvalidation().keySet().iterator().next())) - ); - - // assert that client 2 is not waited for anymore - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(1)); - invalidationHolder = activeEntity.getClientsWaitingForInvalidation().values().iterator().next(); - assertThat(invalidationHolder.clientDescriptorWaitingForInvalidation, is(context1.getClientDescriptor())); - assertThat(invalidationHolder.clientsHavingToInvalidate.size(), is(1)); - assertThat(invalidationHolder.clientsHavingToInvalidate, contains(context3.getClientDescriptor())); - - // client 3 acks - assertSuccess( - activeEntity.invokeActive(context3, new ServerStoreOpMessage.ClientInvalidationAck(1L, activeEntity.getClientsWaitingForInvalidation().keySet().iterator().next())) - ); - - // assert that the invalidation request is done since all clients disconnected - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); - } - - @Test - public void testClearInvalidationAcksTakenIntoAccount() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context1 = new TestInvokeContext(); - TestInvokeContext context2 = new TestInvokeContext(); - TestInvokeContext context3 = new TestInvokeContext(); - activeEntity.connected(context1.getClientDescriptor()); - activeEntity.connected(context2.getClientDescriptor()); - activeEntity.connected(context3.getClientDescriptor()); - - UUID client2Id = UUID.randomUUID(); - UUID client3Id = UUID.randomUUID(); - - // attach to the store - assertSuccess( - activeEntity.invokeActive(context1, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - assertSuccess( - activeEntity.invokeActive(context2, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - assertSuccess( - activeEntity.invokeActive(context3, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - - // perform a clear - assertSuccess( - activeEntity.invokeActive(context1, new ServerStoreOpMessage.ClearMessage()) - ); - - // assert that an invalidation request is pending - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(1)); - InvalidationHolder invalidationHolder = activeEntity.getClientsWaitingForInvalidation().values().iterator().next(); - assertThat(invalidationHolder.clientDescriptorWaitingForInvalidation, is(context1.getClientDescriptor())); - assertThat(invalidationHolder.clientsHavingToInvalidate.size(), is(2)); - assertThat(invalidationHolder.clientsHavingToInvalidate, containsInAnyOrder(context2.getClientDescriptor(), context3.getClientDescriptor())); - - // client 2 acks - assertSuccess( - activeEntity.invokeActive(context2, new ServerStoreOpMessage.ClientInvalidationAllAck(activeEntity.getClientsWaitingForInvalidation().keySet().iterator().next())) - ); - - // assert that client 2 is not waited for anymore - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(1)); - invalidationHolder = activeEntity.getClientsWaitingForInvalidation().values().iterator().next(); - assertThat(invalidationHolder.clientDescriptorWaitingForInvalidation, is(context1.getClientDescriptor())); - assertThat(invalidationHolder.clientsHavingToInvalidate.size(), is(1)); - assertThat(invalidationHolder.clientsHavingToInvalidate, contains(context3.getClientDescriptor())); - - // client 3 acks - assertSuccess( - activeEntity.invokeActive(context3, new ServerStoreOpMessage.ClientInvalidationAllAck(activeEntity.getClientsWaitingForInvalidation().keySet().iterator().next())) - ); - - // assert that the invalidation request is done since all clients disconnected - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); - } - - @Test - public void testAppendInvalidationDisconnectionOfInvalidatingClientsTakenIntoAccount() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context1 = new TestInvokeContext(); - TestInvokeContext context2 = new TestInvokeContext(); - TestInvokeContext context3 = new TestInvokeContext(); - activeEntity.connected(context1.getClientDescriptor()); - activeEntity.connected(context2.getClientDescriptor()); - activeEntity.connected(context3.getClientDescriptor()); - - // attach to the store - assertSuccess( - activeEntity.invokeActive(context1, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - assertSuccess( - activeEntity.invokeActive(context2, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - assertSuccess( - activeEntity.invokeActive(context3, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - - // perform an append - assertSuccess( - activeEntity.invokeActive(context1, new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L))) - ); - - // disconnect client2 - activeEntity.disconnected(context2.getClientDescriptor()); - - // assert that client 2 is not waited for anymore - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(1)); - InvalidationHolder invalidationHolder = activeEntity.getClientsWaitingForInvalidation().values().iterator().next(); - assertThat(invalidationHolder.clientDescriptorWaitingForInvalidation, is(context1.getClientDescriptor())); - assertThat(invalidationHolder.clientsHavingToInvalidate.size(), is(1)); - assertThat(invalidationHolder.clientsHavingToInvalidate, contains(context3.getClientDescriptor())); - - // disconnect client3 - activeEntity.disconnected(context3.getClientDescriptor()); - - // assert that the invalidation request is done since all clients disconnected - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); - } - - @Test - public void testClearInvalidationDisconnectionOfInvalidatingClientsTakenIntoAccount() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context1 = new TestInvokeContext(); - TestInvokeContext context2 = new TestInvokeContext(); - TestInvokeContext context3 = new TestInvokeContext(); - activeEntity.connected(context1.getClientDescriptor()); - activeEntity.connected(context2.getClientDescriptor()); - activeEntity.connected(context3.getClientDescriptor()); - - UUID client2Id = UUID.randomUUID(); - UUID client3Id = UUID.randomUUID(); - - // attach to the store - assertSuccess( - activeEntity.invokeActive(context1, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - assertSuccess( - activeEntity.invokeActive(context2, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - assertSuccess( - activeEntity.invokeActive(context3, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - - // perform an append - assertSuccess( - activeEntity.invokeActive(context1, new ServerStoreOpMessage.ClearMessage()) - ); - - // disconnect client2 - activeEntity.disconnected(context2.getClientDescriptor()); - - // assert that client 2 is not waited for anymore - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(1)); - InvalidationHolder invalidationHolder = activeEntity.getClientsWaitingForInvalidation().values().iterator().next(); - assertThat(invalidationHolder.clientDescriptorWaitingForInvalidation, is(context1.getClientDescriptor())); - assertThat(invalidationHolder.clientsHavingToInvalidate.size(), is(1)); - assertThat(invalidationHolder.clientsHavingToInvalidate, contains(context3.getClientDescriptor())); - - // disconnect client3 - activeEntity.disconnected(context3.getClientDescriptor()); - - // assert that the invalidation request is done since all clients disconnected - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); - } - - @Test - public void testAppendInvalidationDisconnectionOfBlockingClientTakenIntoAccount() throws Exception { - ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfigBuilder() - .dedicated(defaultResource, 4, MemoryUnit.MEGABYTES) - .consistency(Consistency.STRONG) - .build(); - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, - new ClusterTierEntityConfiguration(identifier, defaultStoreName, serverStoreConfiguration), DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context1 = new TestInvokeContext(); - TestInvokeContext context2 = new TestInvokeContext(); - TestInvokeContext context3 = new TestInvokeContext(); - activeEntity.connected(context1.getClientDescriptor()); - activeEntity.connected(context2.getClientDescriptor()); - activeEntity.connected(context3.getClientDescriptor()); - - UUID client2Id = UUID.randomUUID(); - UUID client3Id = UUID.randomUUID(); - - // attach to the store - assertSuccess( - activeEntity.invokeActive(context1, new LifecycleMessage.ValidateServerStore(defaultStoreName, serverStoreConfiguration)) - ); - assertSuccess( - activeEntity.invokeActive(context2, new LifecycleMessage.ValidateServerStore(defaultStoreName, serverStoreConfiguration)) - ); - assertSuccess( - activeEntity.invokeActive(context3, new LifecycleMessage.ValidateServerStore(defaultStoreName, serverStoreConfiguration)) - ); - - // perform an append - assertSuccess( - activeEntity.invokeActive(context1, new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L))) - ); - - // disconnect client1 - activeEntity.disconnected(context1.getClientDescriptor()); - - // assert that the invalidation request is done since the originating client disconnected - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); - } - - @Test - public void testClearInvalidationDisconnectionOfBlockingClientTakenIntoAccount() throws Exception { - ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfigBuilder() - .dedicated(defaultResource, 4, MemoryUnit.MEGABYTES) - .consistency(Consistency.STRONG) - .build(); - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, - new ClusterTierEntityConfiguration(identifier, defaultStoreName, serverStoreConfiguration), DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context1 = new TestInvokeContext(); - TestInvokeContext context2 = new TestInvokeContext(); - TestInvokeContext context3 = new TestInvokeContext(); - activeEntity.connected(context1.getClientDescriptor()); - activeEntity.connected(context2.getClientDescriptor()); - activeEntity.connected(context3.getClientDescriptor()); - - UUID client2Id = UUID.randomUUID(); - UUID client3Id = UUID.randomUUID(); - - // attach to the store - assertSuccess( - activeEntity.invokeActive(context1, new LifecycleMessage.ValidateServerStore(defaultStoreName, serverStoreConfiguration)) - ); - assertSuccess( - activeEntity.invokeActive(context2, new LifecycleMessage.ValidateServerStore(defaultStoreName, serverStoreConfiguration)) - ); - assertSuccess( - activeEntity.invokeActive(context3, new LifecycleMessage.ValidateServerStore(defaultStoreName, serverStoreConfiguration)) - ); - - // perform an append - assertSuccess( - activeEntity.invokeActive(context1, new ServerStoreOpMessage.ClearMessage()) - ); - - // disconnect client1 - activeEntity.disconnected(context1.getClientDescriptor()); - - // assert that the invalidation request is done since the originating client disconnected - assertThat(activeEntity.getClientsWaitingForInvalidation().size(), is(0)); - } - - @Test - public void testWithAttachmentSucceedsInvokingServerStoreOperation() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - // attach to the store - assertSuccess( - activeEntity.invokeActive(context, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - - assertSuccess( - activeEntity.invokeActive(context, new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L))) - ); - - EhcacheEntityResponse response = activeEntity.invokeActive(context, new ServerStoreOpMessage.GetMessage(1L)); - assertThat(response, instanceOf(EhcacheEntityResponse.GetResponse.class)); - EhcacheEntityResponse.GetResponse getResponse = (EhcacheEntityResponse.GetResponse) response; - assertThat(getResponse.getChain().isEmpty(), is(false)); - } - - @Test - public void testCreateDedicatedServerStore() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - assertThat(defaultRegistry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder(defaultStoreName)); - - assertThat(defaultRegistry.getResource(defaultResource).getUsed(), is(MemoryUnit.MEGABYTES.toBytes(1L))); - - assertThat(activeEntity.getConnectedClients(), empty()); - assertThat(defaultRegistry.getStoreManagerService().getStores(), containsInAnyOrder(defaultStoreName)); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - assertSuccess( - activeEntity.invokeActive(context, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration)) - ); - - assertThat(activeEntity.getConnectedClients(), contains(context.getClientDescriptor())); - - /* - * Ensure the dedicated resource pool remains after client disconnect. - */ - activeEntity.disconnected(context.getClientDescriptor()); - - assertThat(defaultRegistry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder(defaultStoreName)); - - assertThat(activeEntity.getConnectedClients(), empty()); - assertThat(defaultRegistry.getStoreManagerService().getStores(), containsInAnyOrder(defaultStoreName)); - } - - @Test - public void testCreateDedicatedServerStoreExisting() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - ClusterTierActiveEntity otherEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - try { - otherEntity.createNew(); - fail("Duplicate creation should fail with an exception"); - } catch (ConfigurationException e) { - assertThat(e.getMessage(), containsString("already exists")); - } - } - - @Test - public void testValidateDedicatedServerStore() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - TestInvokeContext context2 = new TestInvokeContext(); - activeEntity.connected(context2.getClientDescriptor()); - - assertSuccess(activeEntity.invokeActive(context2, - new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration))); - - assertThat(defaultRegistry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder(defaultStoreName)); - - assertThat(defaultRegistry.getResource(defaultResource).getUsed(), is(MemoryUnit.MEGABYTES.toBytes(1L))); - - assertThat(activeEntity.getConnectedClients(), hasSize(2)); - assertThat(activeEntity.getConnectedClients(), containsInAnyOrder(context.getClientDescriptor(), context2.getClientDescriptor())); - assertThat(defaultRegistry.getStoreManagerService().getStores(), contains(defaultStoreName)); - } - - @Test - public void testValidateDedicatedServerStoreBad() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - assertFailure(activeEntity.invokeActive(context, - new LifecycleMessage.ValidateServerStore(defaultStoreName, - new ServerStoreConfigBuilder() - .dedicated(defaultResource, 8, MemoryUnit.MEGABYTES) - .build())), - InvalidServerStoreConfigurationException.class); - - } - - @Test - public void testValidateUnknown() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - assertSuccess(activeEntity.invokeActive(context, new LifecycleMessage.ValidateServerStore(defaultStoreName, - new ServerStoreConfigBuilder().unknown().build()))); - } - - @Test - public void testCreateSharedServerStore() throws Exception { - defaultRegistry.addSharedPool(defaultSharedPool, MemoryUnit.MEGABYTES.toBytes(2), defaultResource); - ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() - .shared(defaultSharedPool) - .build(); - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, - new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER); - activeEntity.createNew(); - - - assertThat(defaultRegistry.getStoreManagerService().getStores(), containsInAnyOrder(defaultStoreName)); - - assertThat(defaultRegistry.getStoreManagerService().getSharedResourcePoolIds(), containsInAnyOrder(defaultSharedPool)); - assertThat(defaultRegistry.getStoreManagerService().getDedicatedResourcePoolIds(), empty()); - - assertThat(defaultRegistry.getResource(defaultResource).getUsed(), is(MemoryUnit.MEGABYTES.toBytes(2L))); - - } - - @Test - public void testCreateSharedServerStoreExisting() throws Exception { - defaultRegistry.addSharedPool(defaultSharedPool, MemoryUnit.MEGABYTES.toBytes(2), defaultResource); - ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() - .shared(defaultSharedPool) - .build(); - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, - new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER); - activeEntity.createNew(); - - ClusterTierActiveEntity otherEntity = new ClusterTierActiveEntity(defaultRegistry, - new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER); - try { - otherEntity.createNew(); - fail("Duplicate creation should fail with an exception"); - } catch (ConfigurationException e) { - assertThat(e.getMessage(), containsString("already exists")); - } - } - - @Test - public void testValidateSharedServerStore() throws Exception { - defaultRegistry.addSharedPool(defaultSharedPool, MemoryUnit.MEGABYTES.toBytes(2), defaultResource); - ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() - .shared(defaultSharedPool) - .build(); - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, - new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - assertSuccess(activeEntity.invokeActive(context, new LifecycleMessage.ValidateServerStore(defaultStoreName, storeConfiguration))); - - assertThat(activeEntity.getConnectedClients(), contains(context.getClientDescriptor())); - } - - @Test - public void testValidateServerStore_DedicatedStoresDifferentSizes() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() - .dedicated(defaultResource, 2, MemoryUnit.MEGABYTES) - .build(); - - String expectedMessageContent = "Existing ServerStore configuration is not compatible with the desired configuration: " + - "\n\t" + - "resourcePoolType existing: " + - defaultStoreConfiguration.getPoolAllocation() + - ", desired: " + - storeConfiguration.getPoolAllocation(); - - assertFailure(activeEntity.invokeActive(context, new LifecycleMessage.ValidateServerStore(defaultStoreName, storeConfiguration)), - InvalidServerStoreConfigurationException.class, expectedMessageContent); - } - - @Test - public void testValidateServerStore_DedicatedStoreResourceNamesDifferent() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() - .dedicated("otherResource", 1, MemoryUnit.MEGABYTES) - .build(); - - String expectedMessageContent = "Existing ServerStore configuration is not compatible with the desired configuration: " + - "\n\t" + - "resourcePoolType existing: " + - defaultStoreConfiguration.getPoolAllocation() + - ", desired: " + - storeConfiguration.getPoolAllocation(); - - assertFailure(activeEntity.invokeActive(context, new LifecycleMessage.ValidateServerStore(defaultStoreName, storeConfiguration)), - InvalidServerStoreConfigurationException.class, expectedMessageContent); - } - - @Test - public void testValidateServerStore_DifferentSharedPools() throws Exception { - defaultRegistry.addSharedPool(defaultSharedPool, MemoryUnit.MEGABYTES.toBytes(2), defaultResource); - ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() - .shared(defaultSharedPool) - .build(); - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, - new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - ServerStoreConfiguration otherConfiguration = new ServerStoreConfigBuilder() - .shared("other") - .build(); - - String expectedMessageContent = "Existing ServerStore configuration is not compatible with the desired configuration: " + - "\n\t" + - "resourcePoolType existing: " + - storeConfiguration.getPoolAllocation() + - ", desired: " + - otherConfiguration.getPoolAllocation(); - - assertFailure(activeEntity.invokeActive(context, - new LifecycleMessage.ValidateServerStore(defaultStoreName, - otherConfiguration)),InvalidServerStoreConfigurationException.class,expectedMessageContent); - } - - @Test - public void testDestroyServerStore() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - activeEntity.destroy(); - - assertThat(defaultRegistry.getResource(defaultResource).getUsed(), is(0L)); - - assertThat(defaultRegistry.getStoreManagerService().getStores(), empty()); - assertThat(defaultRegistry.getStoreManagerService().getDedicatedResourcePoolIds(), empty()); - } - - /** - * Ensures shared pool and store (cache) name spaces are independent. - * The cache alias is used as the name for a {@code ServerStore} instance; this name can be - * the same as, but is independent of, the shared pool name. The - */ - @Test - public void testSharedPoolCacheNameCollision() throws Exception { - defaultRegistry.addSharedPool(defaultStoreName, MemoryUnit.MEGABYTES.toBytes(2), defaultResource); - - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - assertThat(defaultRegistry.getStoreManagerService().getSharedResourcePoolIds(), contains(defaultStoreName)); - assertThat(defaultRegistry.getStoreManagerService().getDedicatedResourcePoolIds(), contains(defaultStoreName)); - assertThat(defaultRegistry.getStoreManagerService().getStores(), containsInAnyOrder(defaultStoreName)); - } - - @Test - public void testCreateNonExistentSharedPool() throws Exception { - ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() - .shared(defaultSharedPool) - .build(); - - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, - new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER); - try { - activeEntity.createNew(); - fail("Creation with non-existent shared pool should have failed"); - } catch (ConfigurationException e) { - assertThat(e.getMessage(), containsString("undefined")); - } - } - - @Test - public void testCreateUnknownServerResource() throws Exception { - ServerStoreConfiguration storeConfiguration = new ServerStoreConfigBuilder() - .dedicated("unknown", 2, MemoryUnit.MEGABYTES) - .build(); - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, - new ClusterTierEntityConfiguration(identifier, defaultStoreName, storeConfiguration), DEFAULT_MAPPER); - try { - activeEntity.createNew(); - fail("Creation with non-existent shared pool should have failed"); - } catch (ConfigurationException e) { - assertThat(e.getMessage(), containsString("Non-existent server side resource")); - } - } - - @Test - public void testSyncToPassiveNoData() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - - assertSuccess(activeEntity.invokeActive(context, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration))); - - @SuppressWarnings("unchecked") - PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); - activeEntity.synchronizeKeyToPassive(syncChannel, 3); - - verifyZeroInteractions(syncChannel); - } - - @Test - public void testSyncToPassiveBatchedByDefault() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - - assertSuccess(activeEntity.invokeActive(context, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration))); - - ByteBuffer payload = ByteBuffer.allocate(512); - // Put keys that maps to the same concurrency key - assertSuccess(activeEntity.invokeActive(context, new ServerStoreOpMessage.AppendMessage(1L, payload))); - assertSuccess(activeEntity.invokeActive(context, new ServerStoreOpMessage.AppendMessage(-2L, payload))); - assertSuccess(activeEntity.invokeActive(context, new ServerStoreOpMessage.AppendMessage(17L, payload))); - - @SuppressWarnings("unchecked") - PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); - activeEntity.synchronizeKeyToPassive(syncChannel, 3); - - verify(syncChannel).synchronizeToPassive(any(EhcacheDataSyncMessage.class)); - } - - @Test - public void testDataSyncToPassiveCustomBatchSize() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - - assertSuccess(activeEntity.invokeActive(context, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration))); - - ByteBuffer payload = ByteBuffer.allocate(512); - // Put keys that maps to the same concurrency key - ServerStoreOpMessage.AppendMessage testMessage = new ServerStoreOpMessage.AppendMessage(1L, payload); - activeEntity.invokeActive(context, testMessage); - activeEntity.invokeActive(context, new ServerStoreOpMessage.AppendMessage(-2L, payload)); - activeEntity.invokeActive(context, new ServerStoreOpMessage.AppendMessage(17L, payload)); - activeEntity.invokeActive(context, new ServerStoreOpMessage.AppendMessage(33L, payload)); - - System.setProperty(ClusterTierActiveEntity.SYNC_DATA_SIZE_PROP, "512"); - ConcurrencyStrategies.DefaultConcurrencyStrategy concurrencyStrategy = new ConcurrencyStrategies.DefaultConcurrencyStrategy(DEFAULT_MAPPER); - int concurrencyKey = concurrencyStrategy.concurrencyKey(testMessage); - try { - @SuppressWarnings("unchecked") - PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); - activeEntity.synchronizeKeyToPassive(syncChannel, concurrencyKey); - - verify(syncChannel, atLeast(2)).synchronizeToPassive(any(EhcacheDataSyncMessage.class)); - } finally { - System.clearProperty(ClusterTierActiveEntity.SYNC_DATA_SIZE_PROP); - } - } - - @Test - public void testLoadExistingRecoversInflightInvalidationsForEventualCache() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - EhcacheStateServiceImpl ehcacheStateService = defaultRegistry.getStoreManagerService(); - ehcacheStateService.createStore(defaultStoreName, defaultStoreConfiguration, false); //Passive would have done this before failover - - InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(defaultStoreName); - - Random random = new Random(); - random.ints(0, 100).limit(10).forEach(invalidationTracker::trackHashInvalidation); - - activeEntity.loadExisting(); - - assertThat(activeEntity.getInflightInvalidations().isEmpty(), is(false)); - } - - @Test - @SuppressWarnings("unchecked") - public void testReplicationMessageAndOriginalServerStoreOpMessageHasSameConcurrency() throws Exception { - - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - IEntityMessenger entityMessenger = defaultRegistry.getEntityMessenger(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - assertSuccess(activeEntity.invokeActive(context, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration))); - - reset(entityMessenger); - EhcacheEntityMessage getAndAppend = new ServerStoreOpMessage.GetAndAppendMessage(1L, createPayload(1L)); - activeEntity.invokeActive(context, getAndAppend); - - ArgumentCaptor captor = ArgumentCaptor.forClass(PassiveReplicationMessage.ChainReplicationMessage.class); - verify(entityMessenger).messageSelfAndDeferRetirement(isNotNull(), captor.capture()); - PassiveReplicationMessage.ChainReplicationMessage replicatedMessage = captor.getValue(); - - assertThat(replicatedMessage.concurrencyKey(), is(((ConcurrentEntityMessage) getAndAppend).concurrencyKey())); - } - - @Test - public void testInvalidMessageThrowsError() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - try { - activeEntity.invokeActive(context, new InvalidMessage()); - fail("Invalid message should result in AssertionError"); - } catch (AssertionError e) { - assertThat(e.getMessage(), containsString("Unsupported")); - } - } - - @Test - public void testActiveTracksMessageDuplication() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - activeEntity.createNew(); - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - assertSuccess(activeEntity.invokeActive(context, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration))); - - ServerStoreOpMessage.AppendMessage message = new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L)); - activeEntity.invokeActive(context, message); - - // create another message that has the same message ID - message = new ServerStoreOpMessage.AppendMessage(2L, createPayload(1L)); - - activeEntity.invokeActive(context, message); // this invoke should be rejected due to duplicate message id - - ServerStoreOpMessage.GetMessage getMessage = new ServerStoreOpMessage.GetMessage(2L); - EhcacheEntityResponse.GetResponse response = (EhcacheEntityResponse.GetResponse) activeEntity.invokeActive(context, getMessage); - assertThat(response.getChain().isEmpty(), is(false)); - } - - @Test - public void testActiveMessageTracking() throws Exception { - ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); - EhcacheStateServiceImpl ehcacheStateService = defaultRegistry.getStoreManagerService(); - ehcacheStateService.createStore(defaultStoreName, defaultStoreConfiguration, false); //hack to enable message tracking on active - - TestInvokeContext context = new TestInvokeContext(); - activeEntity.connected(context.getClientDescriptor()); - - assertSuccess(activeEntity.invokeActive(context, new LifecycleMessage.ValidateServerStore(defaultStoreName, defaultStoreConfiguration))); - - context.incrementCurrentTransactionId(); - - ServerStoreOpMessage.AppendMessage message = new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L)); - EhcacheEntityResponse expected = activeEntity.invokeActive(context, message); - - // create another message that has the same message ID - message = new ServerStoreOpMessage.AppendMessage(1L, createPayload(1L)); - - EhcacheEntityResponse actual = activeEntity.invokeActive(context, message); // this invoke should be rejected due to duplicate message id - assertThat(actual, sameInstance(expected)); - } - - private void assertSuccess(EhcacheEntityResponse response) throws Exception { - if (!EhcacheResponseType.SUCCESS.equals(response.getResponseType())) { - throw ((EhcacheEntityResponse.Failure) response).getCause(); - } - } - - private void assertFailure(EhcacheEntityResponse response, Class expectedException) { - assertThat(response.getResponseType(), is(EhcacheResponseType.FAILURE)); - assertThat(((EhcacheEntityResponse.Failure) response).getCause(), is(instanceOf(expectedException))); - } - - private void assertFailure(EhcacheEntityResponse response, Class expectedException, String expectedMessageContent) { - assertThat(response.getResponseType(), is(EhcacheResponseType.FAILURE)); - Exception cause = ((EhcacheEntityResponse.Failure) response).getCause(); - assertThat(cause, is(instanceOf(expectedException))); - assertThat(cause.getMessage(), containsString(expectedMessageContent)); - } - - @SuppressWarnings("unchecked") - ServiceRegistry getCustomMockedServiceRegistry(EhcacheStateService stateService, ClientCommunicator clientCommunicator, - IEntityMessenger entityMessenger, EntityMonitoringService entityMonitoringService, - EntityManagementRegistry entityManagementRegistry) { - return new ServiceRegistry() { - @Override - public T getService(final ServiceConfiguration configuration) { - Class serviceType = configuration.getServiceType(); - if (serviceType.isAssignableFrom(ClientCommunicator.class)) { - return (T) clientCommunicator; - } else if (serviceType.isAssignableFrom(IEntityMessenger.class)) { - return (T) entityMessenger; - } else if (serviceType.isAssignableFrom(EhcacheStateService.class)) { - return (T) stateService; - } else if (serviceType.isAssignableFrom(EntityMonitoringService.class)) { - return (T) entityMonitoringService; - } else if (serviceType.isAssignableFrom(EntityManagementRegistry.class)) { - return (T) entityManagementRegistry; - } else if (serviceType.isAssignableFrom(OOOMessageHandler.class)) { - return (T) new OOOMessageHandlerImpl(message -> true, 1, message -> 0); - } - throw new AssertionError("Unknown service configuration of type: " + serviceType); - } - - @Override - public Collection getServices(ServiceConfiguration configuration) { - return Collections.singleton(getService(configuration)); - } - }; - } - - /** - * Builder for {@link ServerStoreConfiguration} instances. - */ - private static final class ServerStoreConfigBuilder { - private PoolAllocation poolAllocation; - private String storedKeyType = "java.lang.Long"; - private String storedValueType = "java.lang.String"; - private String keySerializerType; - private String valueSerializerType; - private Consistency consistency = Consistency.EVENTUAL; - - - ServerStoreConfigBuilder consistency(Consistency consistency) { - this.consistency = consistency; - return this; - } - - ServerStoreConfigBuilder dedicated(String resourceName, int size, MemoryUnit unit) { - this.poolAllocation = new Dedicated(resourceName, unit.toBytes(size)); - return this; - } - - ServerStoreConfigBuilder shared(String resourcePoolName) { - this.poolAllocation = new Shared(resourcePoolName); - return this; - } - - ServerStoreConfigBuilder unknown() { - this.poolAllocation = new PoolAllocation.Unknown(); - return this; - } - - ServerStoreConfigBuilder setStoredKeyType(Class storedKeyType) { - this.storedKeyType = storedKeyType.getName(); - return this; - } - - ServerStoreConfigBuilder setStoredValueType(Class storedValueType) { - this.storedValueType = storedValueType.getName(); - return this; - } - - ServerStoreConfigBuilder setKeySerializerType(Class keySerializerType) { - this.keySerializerType = keySerializerType.getName(); - return this; - } - - ServerStoreConfigBuilder setValueSerializerType(Class valueSerializerType) { - this.valueSerializerType = valueSerializerType.getName(); - return this; - } - - ServerStoreConfiguration build() { - return new ServerStoreConfiguration(poolAllocation, storedKeyType, storedValueType, - keySerializerType, valueSerializerType, consistency); - } - } - - /** - * Provides a {@link ServiceRegistry} for off-heap resources. This is a "server-side" object. - */ - private static final class OffHeapIdentifierRegistry implements ServiceRegistry { - - private final long offHeapSize; - private final String defaultResource; - - private EhcacheStateServiceImpl storeManagerService; - - private IEntityMessenger entityMessenger; - - private ClientCommunicator clientCommunicator; - - private final Map pools = - new HashMap<>(); - - private final Map sharedPools = new HashMap<>(); - - /** - * Instantiate an "open" {@code ServiceRegistry}. Using this constructor creates a - * registry that creates {@code OffHeapResourceIdentifier} entries as they are - * referenced. - */ - private OffHeapIdentifierRegistry(String defaultResource) { - this.defaultResource = defaultResource; - this.offHeapSize = 0; - } - - /** - * Instantiate a "closed" {@code ServiceRegistry}. Using this constructor creates a - * registry that only returns {@code OffHeapResourceIdentifier} entries supplied - * through the {@link #addResource} method. - */ - private OffHeapIdentifierRegistry() { - this(null); - } - - private void addSharedPool(String name, long size, String resourceName) { - sharedPools.put(name, new ServerSideConfiguration.Pool(size, resourceName)); - } - - /** - * Adds an off-heap resource of the given name to this registry. - * - * @param name the name of the resource - * @param offHeapSize the off-heap size - * @param unit the size unit type - * @return {@code this} {@code OffHeapIdentifierRegistry} - */ - private OffHeapIdentifierRegistry addResource(String name, int offHeapSize, MemoryUnit unit) { - this.pools.put(OffHeapResourceIdentifier.identifier(name), new TestOffHeapResource(unit.toBytes(offHeapSize))); - return this; - } - - private TestOffHeapResource getResource(String resourceName) { - return this.pools.get(OffHeapResourceIdentifier.identifier(resourceName)); - } - - private EhcacheStateServiceImpl getStoreManagerService() { - return this.storeManagerService; - } - - - private IEntityMessenger getEntityMessenger() { - return entityMessenger; - } - - private ClientCommunicator getClientCommunicator() { - return clientCommunicator; - } - - private static Set getIdentifiers(Set pools) { - Set names = new HashSet<>(); - for (OffHeapResourceIdentifier identifier: pools) { - names.add(identifier.getName()); - } - - return Collections.unmodifiableSet(names); - } - - @SuppressWarnings("unchecked") - @Override - public T getService(ServiceConfiguration serviceConfiguration) { - if (serviceConfiguration.getServiceType().equals(ClientCommunicator.class)) { - if (this.clientCommunicator == null) { - this.clientCommunicator = mock(ClientCommunicator.class); - } - return (T) this.clientCommunicator; - } else if (serviceConfiguration.getServiceType().equals(EhcacheStateService.class)) { - if (storeManagerService == null) { - this.storeManagerService = new EhcacheStateServiceImpl(new OffHeapResources() { - @Override - public Set getAllIdentifiers() { - return pools.keySet(); - } - - @Override - public OffHeapResource getOffHeapResource(OffHeapResourceIdentifier identifier) { - return pools.get(identifier); - } - }, new ServerSideConfiguration(sharedPools), DEFAULT_MAPPER, service -> {}); - try { - this.storeManagerService.configure(); - } catch (ConfigurationException e) { - throw new AssertionError("Test setup failed!"); - } - } - return (T) (this.storeManagerService); - } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { - if (this.entityMessenger == null) { - this.entityMessenger = mock(IEntityMessenger.class); - } - return (T) this.entityMessenger; - } else if(serviceConfiguration instanceof EntityManagementRegistryConfiguration) { - return null; - } else if(serviceConfiguration instanceof OOOMessageHandlerConfiguration) { - OOOMessageHandlerConfiguration oooMessageHandlerConfiguration = (OOOMessageHandlerConfiguration) serviceConfiguration; - return (T) new OOOMessageHandlerImpl(oooMessageHandlerConfiguration.getTrackerPolicy(), - oooMessageHandlerConfiguration.getSegments(), oooMessageHandlerConfiguration.getSegmentationStrategy()); - } - - throw new UnsupportedOperationException("Registry.getService does not support " + serviceConfiguration.getClass().getName()); - } - - @Override - public Collection getServices(ServiceConfiguration configuration) { - return Collections.singleton(getService(configuration)); - } - } - - /** - * Testing implementation of {@link OffHeapResource}. This is a "server-side" object. - */ - private static final class TestOffHeapResource implements OffHeapResource { - - private long capacity; - private long used; - - private TestOffHeapResource(long capacity) { - this.capacity = capacity; - } - - @Override - public boolean reserve(long size) throws IllegalArgumentException { - if (size < 0) { - throw new IllegalArgumentException(); - } - if (size > available()) { - return false; - } else { - this.used += size; - return true; - } - } - - @Override - public void release(long size) throws IllegalArgumentException { - if (size < 0) { - throw new IllegalArgumentException(); - } - this.used -= size; - } - - @Override - public long available() { - return this.capacity - this.used; - } - - @Override - public long capacity() { - return capacity; - } - - private long getUsed() { - return used; - } - } -} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/store/ServerStoreTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/store/ServerStoreTest.java deleted file mode 100644 index 62dbfc848f..0000000000 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/store/ServerStoreTest.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.store; - - -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.clustered.common.internal.store.ServerStore; -import org.hamcrest.MatcherAssert; -import org.hamcrest.core.Is; -import org.junit.Test; - -import java.nio.ByteBuffer; -import java.util.Iterator; - -import static org.junit.Assert.assertThat; -import static org.hamcrest.Matchers.is; - -/** - * Verify Server Store - */ -public abstract class ServerStoreTest { - - public abstract ServerStore newStore(); - - public abstract ChainBuilder newChainBuilder(); - - public abstract ElementBuilder newElementBuilder(); - - private final ChainBuilder chainBuilder = newChainBuilder(); - private final ElementBuilder elementBuilder = newElementBuilder(); - - private static void populateStore(ServerStore store) throws Exception { - for(int i = 1 ; i <= 16; i++) { - store.append(i, createPayload(i)); - } - } - - private static long readPayLoad(ByteBuffer byteBuffer) { - return byteBuffer.getLong(); - } - - protected static ByteBuffer createPayload(long key) { - ByteBuffer byteBuffer = ByteBuffer.allocate(8).putLong(key); - byteBuffer.flip(); - return byteBuffer; - } - - private static void assertChainAndReverseChainOnlyHave(Chain chain, long... payLoads) { - Iterator elements = chain.iterator(); - for (long payLoad : payLoads) { - assertThat(readPayLoad(elements.next().getPayload()), is(Long.valueOf(payLoad))); - } - assertThat(elements.hasNext(), is(false)); - - Iterator reverseElements = chain.reverseIterator(); - - for (int i = payLoads.length -1; i >= 0; i--) { - assertThat(readPayLoad(reverseElements.next().getPayload()), is(Long.valueOf(payLoads[i]))); - } - assertThat(reverseElements.hasNext(), is(false)); - } - - @Test - public void testGetNoMappingExists() throws Exception { - ServerStore store = newStore(); - Chain chain = store.get(1); - assertThat(chain.isEmpty(), is(true)); - assertThat(chain.iterator().hasNext(), is(false)); - } - - @Test - public void testGetMappingExists() throws Exception { - ServerStore store = newStore(); - populateStore(store); - Chain chain = store.get(1); - assertThat(chain.isEmpty(), is(false)); - assertChainAndReverseChainOnlyHave(chain, 1); - } - - @Test - public void testAppendNoMappingExists() throws Exception { - ServerStore store = newStore(); - store.append(1, createPayload(1)); - Chain chain = store.get(1); - assertThat(chain.isEmpty(), is(false)); - assertChainAndReverseChainOnlyHave(chain, 1); - } - - @Test - public void testAppendMappingExists() throws Exception { - ServerStore store = newStore(); - populateStore(store); - store.append(2, createPayload(22)); - Chain chain = store.get(2); - assertThat(chain.isEmpty(), is(false)); - assertChainAndReverseChainOnlyHave(chain, 2, 22); - } - - @Test - public void testGetAndAppendNoMappingExists() throws Exception { - ServerStore store = newStore(); - Chain chain = store.getAndAppend(1, createPayload(1)); - assertThat(chain.isEmpty(), is(true)); - chain = store.get(1); - assertChainAndReverseChainOnlyHave(chain, 1); - } - - @Test - public void testGetAndAppendMappingExists() throws Exception { - ServerStore store = newStore(); - populateStore(store); - Chain chain = store.getAndAppend(1, createPayload(22)); - for (Element element : chain) { - assertThat(readPayLoad(element.getPayload()), is(Long.valueOf(1))); - } - chain = store.get(1); - assertChainAndReverseChainOnlyHave(chain, 1, 22); - } - - @Test - public void testReplaceAtHeadSucceedsMappingExistsHeadMatchesStrictly() throws Exception { - ServerStore store = newStore(); - populateStore(store); - Chain existingMapping = store.get(1); - - store.replaceAtHead(1, existingMapping, chainBuilder.build(elementBuilder.build(createPayload(11)))); - Chain chain = store.get(1); - assertChainAndReverseChainOnlyHave(chain, 11); - - store.append(2, createPayload(22)); - store.append(2, createPayload(222)); - - existingMapping = store.get(2); - - store.replaceAtHead(2, existingMapping, chainBuilder.build(elementBuilder.build(createPayload(2222)))); - - chain = store.get(2); - - assertChainAndReverseChainOnlyHave(chain, 2222); - } - - @Test - public void testReplaceAtHeadSucceedsMappingExistsHeadMatches() throws Exception { - ServerStore store = newStore(); - populateStore(store); - - Chain existingMapping = store.get(1); - - store.append(1, createPayload(11)); - - store.replaceAtHead(1, existingMapping, chainBuilder.build(elementBuilder.build(createPayload(111)))); - Chain chain = store.get(1); - - assertChainAndReverseChainOnlyHave(chain, 111, 11); - - store.append(2, createPayload(22)); - existingMapping = store.get(2); - - store.append(2, createPayload(222)); - - store.replaceAtHead(2, existingMapping, chainBuilder.build(elementBuilder.build(createPayload(2222)))); - - chain = store.get(2); - assertChainAndReverseChainOnlyHave(chain, 2222, 222); - } - - @Test - public void testReplaceAtHeadIgnoredMappingExistsHeadMisMatch() throws Exception { - ServerStore store = newStore(); - populateStore(store); - - store.append(1, createPayload(11)); - store.append(1, createPayload(111)); - - Chain mappingReadFirst = store.get(1); - store.replaceAtHead(1, mappingReadFirst, chainBuilder.build(elementBuilder.build(createPayload(111)))); - - Chain current = store.get(1); - assertChainAndReverseChainOnlyHave(current, 111); - - store.append(1, createPayload(1111)); - store.replaceAtHead(1, mappingReadFirst, chainBuilder.build(elementBuilder.build(createPayload(11111)))); - - Chain toVerify = store.get(1); - - assertChainAndReverseChainOnlyHave(toVerify, 111, 1111); - } - - - @Test - public void test_append_doesNotConsumeBuffer() throws Exception { - ServerStore store = newStore(); - ByteBuffer payload = createPayload(1L); - - store.append(1L, payload); - MatcherAssert.assertThat(payload.remaining(), Is.is(8)); - } - - @Test - public void test_getAndAppend_doesNotConsumeBuffer() throws Exception { - ServerStore store = newStore(); - ByteBuffer payload = createPayload(1L); - - store.getAndAppend(1L, payload); - MatcherAssert.assertThat(payload.remaining(), Is.is(8)); - } - - @Test - public void test_replaceAtHead_doesNotConsumeBuffer() throws Exception { - ServerStore store = newStore(); - ByteBuffer payload = createPayload(1L); - - Chain expected = newChainBuilder().build(newElementBuilder().build(payload), newElementBuilder().build(payload)); - Chain update = newChainBuilder().build(newElementBuilder().build(payload)); - store.replaceAtHead(1L, expected, update); - MatcherAssert.assertThat(payload.remaining(), Is.is(8)); - } - -} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/ReferenceStoreImpl.java b/clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/ReferenceStoreImpl.java deleted file mode 100644 index 287bee99a7..0000000000 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/store/impl/ReferenceStoreImpl.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.store.impl; - -import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Element; -import org.ehcache.clustered.common.internal.store.ServerStore; - -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; - -/** - * Implements {@link ServerStore} - */ -public class ReferenceStoreImpl implements ServerStore { - - private final Map map = new HashMap<>(); - private final List locks = new ArrayList<>(); - private final AtomicLong sequenceGenerator = new AtomicLong(); - - private final int LOCK_COUNT = 16; - - public ReferenceStoreImpl() { - for (int i = 0; i < LOCK_COUNT; i++) { - locks.add(new ReentrantReadWriteLock()); - } - } - - private ReadWriteLock getLock(long key) { - return locks.get((int)Math.abs(key)%LOCK_COUNT); - } - - @Override - public Chain get(long key) { - Lock lock = getLock(key).readLock(); - lock.lock(); - try { - Chain chain = map.get(key); - if (chain != null) { - return chain; - } else { - return new HeapChainImpl(); - } - } finally { - lock.unlock(); - } - } - - @Override - public void append(long key, ByteBuffer payLoad) { - Lock lock = getLock(key).writeLock(); - lock.lock(); - try { - Chain mapping = map.get(key); - if (mapping == null) { - map.put(key, new HeapChainImpl(new HeapElementImpl(sequenceGenerator.incrementAndGet(), payLoad))); - return; - } - Chain newMapping = cast(mapping).append(new HeapElementImpl(sequenceGenerator.incrementAndGet(), payLoad)); - map.put(key, newMapping); - } finally { - lock.unlock(); - } - } - - @Override - public Chain getAndAppend(long key, ByteBuffer payLoad) { - Lock lock = getLock(key).writeLock(); - lock.lock(); - try { - Chain mapping = map.get(key); - if (mapping != null) { - Chain newMapping = cast(mapping).append(new HeapElementImpl(sequenceGenerator.incrementAndGet(), payLoad)); - map.put(key, newMapping); - return mapping; - } else { - map.put(key, new HeapChainImpl(new HeapElementImpl(sequenceGenerator.incrementAndGet(), payLoad))); - return new HeapChainImpl(); - } - } finally { - lock.unlock(); - } - } - - @Override - public void replaceAtHead(long key, Chain expect, Chain update) { - Lock lock = getLock(key).writeLock(); - lock.lock(); - try { - Chain mapping = map.get(key); - if (mapping == null) { - return; - } - boolean replaceable = true; - List elements = new LinkedList<>(); - Iterator current = mapping.iterator(); - Iterator expected = expect.iterator(); - while (expected.hasNext()) { - if (current.hasNext()) { - HeapElementImpl expectedLink = (HeapElementImpl)expected.next(); - if (expectedLink.getSequenceNumber() != ((HeapElementImpl)current.next()).getSequenceNumber()) { - replaceable = false; - break; - } - } else { - replaceable = false; - break; - } - } - - if (replaceable) { - for (Element element : update) { - elements.add(element); - } - while(current.hasNext()) { - elements.add(current.next()); - } - map.put(key, new HeapChainImpl(elements.toArray(new Element[elements.size()]))); - } - - } finally { - lock.unlock(); - } - } - - private void writeLockAll() { - for (ReadWriteLock lock : locks) { - lock.writeLock().lock(); - } - } - - private void writeUnlockAll() { - for (ReadWriteLock lock : locks) { - lock.writeLock().unlock(); - } - } - - @Override - public void clear() { - writeLockAll(); - try { - map.clear(); - } finally { - writeUnlockAll(); - } - } - - private HeapChainImpl cast(Chain chain) { - return (HeapChainImpl)chain; - } - -} diff --git a/clustered/test-utils/build.gradle b/clustered/test-utils/build.gradle new file mode 100644 index 0000000000..067d92bd33 --- /dev/null +++ b/clustered/test-utils/build.gradle @@ -0,0 +1,8 @@ +plugins { + id 'org.ehcache.build.conventions.java-library' +} + +dependencies { + api project(':clustered:ehcache-common') + api "org.hamcrest:hamcrest-core:$hamcrestVersion" +} diff --git a/clustered/test-utils/config/checkstyle-suppressions.xml b/clustered/test-utils/config/checkstyle-suppressions.xml new file mode 100644 index 0000000000..cb41d0baf7 --- /dev/null +++ b/clustered/test-utils/config/checkstyle-suppressions.xml @@ -0,0 +1,9 @@ + + + + + + + diff --git a/clustered/test-utils/src/main/java/org/ehcache/clustered/ChainUtils.java b/clustered/test-utils/src/main/java/org/ehcache/clustered/ChainUtils.java new file mode 100644 index 0000000000..b6001e1ac2 --- /dev/null +++ b/clustered/test-utils/src/main/java/org/ehcache/clustered/ChainUtils.java @@ -0,0 +1,109 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.common.internal.store.SequencedElement; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +public class ChainUtils { + + public static long readPayload(ByteBuffer byteBuffer) { + return byteBuffer.getLong(); + } + + public static ByteBuffer createPayload(long key) { + ByteBuffer byteBuffer = ByteBuffer.allocate(8).putLong(key); + byteBuffer.flip(); + return byteBuffer.asReadOnlyBuffer(); + } + + public static ByteBuffer createPayload(long key, int payloadSize) { + if (payloadSize < 8) { + throw new IllegalArgumentException("payload must be at least 8 bytes long"); + } + ByteBuffer byteBuffer = ByteBuffer.allocate(payloadSize); + byteBuffer.putLong(key); + for (int i = 0; i < payloadSize - 8; i++) { + byteBuffer.put((byte) 0); + } + byteBuffer.flip(); + return byteBuffer.asReadOnlyBuffer(); + } + + public static Element getElement(final ByteBuffer payload) { + return payload::asReadOnlyBuffer; + } + + public static Chain chainOf(ByteBuffer... buffers) { + List elements = new ArrayList<>(); + for (final ByteBuffer buffer : buffers) { + elements.add(getElement(buffer)); + } + return getChain(elements); + } + + public static Chain sequencedChainOf(ByteBuffer ... buffers) { + List elements = new ArrayList<>(); + long counter = 0; + for (final ByteBuffer buffer : buffers) { + elements.add(getElement(counter++, buffer)); + } + return getChain(elements); + } + + private static Chain getChain(final List elements) { + return new Chain() { + private final List list = Collections.unmodifiableList(elements); + + @Override + public boolean isEmpty() { + return list.isEmpty(); + } + + @Override + public int length() { + return list.size(); + } + + @Override + public Iterator iterator() { + return list.iterator(); + } + }; + } + + public static SequencedElement getElement(final long sequence, final ByteBuffer payload) { + return new SequencedElement() { + @Override + public long getSequenceNumber() { + return sequence; + } + + @Override + public ByteBuffer getPayload() { + return payload.asReadOnlyBuffer(); + } + }; + } +} diff --git a/clustered/test-utils/src/main/java/org/ehcache/clustered/Matchers.java b/clustered/test-utils/src/main/java/org/ehcache/clustered/Matchers.java new file mode 100644 index 0000000000..344af5e0b3 --- /dev/null +++ b/clustered/test-utils/src/main/java/org/ehcache/clustered/Matchers.java @@ -0,0 +1,120 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered; + +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.common.internal.store.SequencedElement; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import static org.ehcache.clustered.ChainUtils.readPayload; + +public class Matchers { + + public static Matcher> entry(Matcher key, Matcher value) { + return new TypeSafeMatcher>() { + @Override + protected boolean matchesSafely(Map.Entry item) { + return key.matches(item.getKey()) && value.matches(item.getValue()); + } + + @Override + public void describeTo(Description description) { + description.appendText("an entry with key ").appendDescriptionOf(key).appendText(" and value ").appendDescriptionOf(value); + } + }; + } + + public static Matcher matchesChain(Chain expected) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(Chain item) { + Iterator expectedIt = expected.iterator(); + Iterator itemIt = item.iterator(); + + while (expectedIt.hasNext() && itemIt.hasNext()) { + Element expectedNext = expectedIt.next(); + Element itemNext = itemIt.next(); + + if (!expectedNext.getPayload().equals(itemNext.getPayload())) { + return false; + } + } + + return !expectedIt.hasNext() && !itemIt.hasNext(); + } + + @Override + public void describeTo(Description description) { + description.appendText(" a chain matching ").appendValue(expected); + } + }; + } + + public static Matcher hasPayloads(long ... payloads) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(Chain item) { + Iterator elements = item.iterator(); + for (long payload : payloads) { + if (readPayload(elements.next().getPayload()) != payload) { + return false; + } + } + return !elements.hasNext(); + } + + @Override + public void describeTo(Description description) { + description.appendText(" a chain containing the payloads ").appendValueList("[", ", ", "]", payloads); + } + }; + } + + + public static Matcher sameSequenceAs(Chain original) { + List sequenceNumbers = sequenceNumbersOf(original); + + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(Chain item) { + return sequenceNumbers.equals(sequenceNumbersOf(item)); + } + + @Override + public void describeTo(Description description) { + description.appendValue("a chain with sequence numbers matching ").appendValue(original); + } + }; + } + + private static List sequenceNumbersOf(Chain chain) { + List sequenceNumbers = new ArrayList<>(chain.length()); + for (Element element : chain) { + sequenceNumbers.add(((SequencedElement) element).getSequenceNumber()); + } + return sequenceNumbers; + } + +} diff --git a/config/checkstyle.xml b/config/checkstyle.xml index 190ec79cba..6a9374d111 100644 --- a/config/checkstyle.xml +++ b/config/checkstyle.xml @@ -24,25 +24,21 @@ - - - - - - - - - - + + + + + + - + diff --git a/config/owasp-supressions.xml b/config/owasp-supressions.xml new file mode 100644 index 0000000000..8a5a28b93e --- /dev/null +++ b/config/owasp-supressions.xml @@ -0,0 +1,53 @@ + + + + + Ehcache modules are not Gradle! + ^pkg:maven/org\.ehcache.*@.*$ + CVE-2019-11065 + + + Ehcache modules are not Gradle! + ^pkg:maven/org\.ehcache.*@.*$ + CVE-2019-15052 + + + Ehcache modules are not Gradle! + ^pkg:maven/org\.ehcache.*@.*$ + CVE-2019-16370 + + + TC Tripwire is unrelated to the other Tripwire + ^pkg:maven/org\.terracotta/tc\-tripwire\-plugin@.*$ + cpe:/a:tripwire:tripwire + + + BND isn't Eclipse + ^pkg:maven/biz\.aQute\.bnd/biz\.aQute\.bndlib@.*$ + cpe:/a:eclipse:eclipse_ide + + + Ehcache 3 builds require Java 8+ : 4.13.1 is safe + pkg:maven/junit/junit@4.13.1 + CVE-2020-15250 + + + + PAX URL Aether repackages httpclient and isn't (yet) fixed + db40edda8b95d880d2a810560fd5e46eb4fa6909 + CVE-2020-13956 + + + PAX URL Aether repackages commons-io and isn't (yet) fixed + 5060835593e5b6ed18c82fc2e782f0a3c30a00b1 + CVE-2021-29425 + + + PAX Exame JUnit4 doesn't have a 4.13.1 depending release + ^pkg:maven/org\.ops4j\.pax\.exam/pax\-exam\-junit4@.*$ + CVE-2020-15250 + + diff --git a/core-spi-test/.gitignore b/core-spi-test/.gitignore deleted file mode 100755 index ae3c172604..0000000000 --- a/core-spi-test/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/bin/ diff --git a/core-spi-test/build.gradle b/core-spi-test/build.gradle index 60fe9e0e59..0b8db145f4 100644 --- a/core-spi-test/build.gradle +++ b/core-spi-test/build.gradle @@ -13,14 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +plugins { + id 'org.ehcache.build.conventions.java-library' +} dependencies { - compile project(':spi-tester'), project(':core'), "org.hamcrest:hamcrest-library:$hamcrestVersion", "junit:junit:$junitVersion" - compile ("org.mockito:mockito-core:$mockitoVersion") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } + api project(':spi-tester') + implementation project(':ehcache-core') + implementation project(':ehcache-impl') + implementation "junit:junit:$junitVersion" + implementation "org.mockito:mockito-core:$mockitoVersion" + implementation "org.hamcrest:hamcrest-library:$hamcrestVersion" } -tasks.withType(JavaCompile) { - options.compilerArgs += ['-Werror'] -} diff --git a/core-spi-test/gradle.properties b/core-spi-test/gradle.properties deleted file mode 100644 index 38ab6ff6e6..0000000000 --- a/core-spi-test/gradle.properties +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -subPomName = Ehcache 3 Core SPI test module -subPomDesc = The Core SPI test module of Ehcache 3 diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreBulkComputeIfAbsentTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreBulkComputeIfAbsentTest.java index 7e9b6182d3..1d030950e6 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreBulkComputeIfAbsentTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreBulkComputeIfAbsentTest.java @@ -16,7 +16,8 @@ package org.ehcache.internal.store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.exceptions.StorePassThroughException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.test.After; import org.ehcache.spi.test.Before; @@ -104,7 +105,7 @@ public void missingIterableEntriesAreIgnoredByTheStore() throws Exception { kvStore.bulkComputeIfAbsent(inputKeys, entries -> emptySet()); for (Map.Entry mappedEntry : mappedEntries.entrySet()) { - assertThat(kvStore.get(mappedEntry.getKey()).value(), is(mappedEntry.getValue())); + assertThat(kvStore.get(mappedEntry.getKey()).get(), is(mappedEntry.getValue())); } } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); @@ -138,7 +139,7 @@ public void mappingIsSameInTheStoreForEntriesReturnedWithDifferentValueFromMappi }); for (Map.Entry mappedEntry : mappedEntries.entrySet()) { - assertThat(kvStore.get(mappedEntry.getKey()).value(), is(mappedEntry.getValue())); + assertThat(kvStore.get(mappedEntry.getKey()).get(), is(mappedEntry.getValue())); } } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); @@ -193,7 +194,7 @@ public void computeValuesForEveryKeyUsingAMappingFunction() throws Exception { }); for (Map.Entry entry : computedEntries.entrySet()) { - assertThat(kvStore.get(entry.getKey()).value(), is(entry.getValue())); + assertThat(kvStore.get(entry.getKey()).get(), is(entry.getValue())); } } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); @@ -267,4 +268,31 @@ public void testMappingFunctionProducesWrongValueType() throws Exception { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } } + + @SPITest + public void exception() throws Exception { + Set inputKeys = Collections.singleton(factory.createKey(0)); + + RuntimeException exception = new RuntimeException("error"); + + try { + kvStore.bulkComputeIfAbsent(inputKeys, entries -> { throw exception; }); + } catch (StoreAccessException e) { + assertThat(e.getCause(), is(exception)); + } + } + + @SPITest + public void passThroughException() throws Exception { + Set inputKeys = Collections.singleton(factory.createKey(0)); + + RuntimeException exception = new RuntimeException("error"); + StorePassThroughException ste = new StorePassThroughException(exception); + + try { + kvStore.bulkComputeIfAbsent(inputKeys, entries -> { throw ste; }); + } catch (RuntimeException e) { + assertThat(e, is(exception)); + } + } } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreBulkComputeTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreBulkComputeTest.java index 4156a33b8e..1ffdf221ff 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreBulkComputeTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreBulkComputeTest.java @@ -16,7 +16,8 @@ package org.ehcache.internal.store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.exceptions.StorePassThroughException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.test.After; import org.ehcache.spi.test.Before; @@ -24,6 +25,7 @@ import org.ehcache.spi.test.SPITest; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -204,7 +206,7 @@ public void computeValuesForEveryKeyUsingARemappingFunction() throws Exception { }); for (K inputKey : inputKeys) { - assertThat(kvStore.get(inputKey).value(), is(computedEntries.get(inputKey))); + assertThat(kvStore.get(inputKey).get(), is(computedEntries.get(inputKey))); } } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); @@ -278,4 +280,31 @@ public void remappingFunctionProducesWrongValueType() throws Exception { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } } + + @SPITest + public void exception() throws Exception { + Set inputKeys = Collections.singleton(factory.createKey(0)); + + RuntimeException exception = new RuntimeException("error"); + + try { + kvStore.bulkCompute(inputKeys, entries -> { throw exception; }); + } catch (StoreAccessException e) { + assertThat(e.getCause(), is(exception)); + } + } + + @SPITest + public void passThroughException() throws Exception { + Set inputKeys = Collections.singleton(factory.createKey(0)); + + RuntimeException exception = new RuntimeException("error"); + StorePassThroughException ste = new StorePassThroughException(exception); + + try { + kvStore.bulkCompute(inputKeys, entries -> { throw ste; }); + } catch (RuntimeException e) { + assertThat(e, is(exception)); + } + } } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreClearTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreClearTest.java index 1edb4dc6f4..b4f82c43d2 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreClearTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreClearTest.java @@ -16,7 +16,7 @@ package org.ehcache.internal.store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.test.After; import org.ehcache.spi.test.Before; diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCloseTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCloseTest.java index b260b712a7..f68e75e377 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCloseTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCloseTest.java @@ -17,7 +17,7 @@ package org.ehcache.internal.store; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.spi.test.After; import org.ehcache.spi.test.Before; import org.ehcache.spi.test.SPITest; diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeIfAbsentTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeIfAbsentTest.java index 12de90867b..3017f59bcd 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeIfAbsentTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeIfAbsentTest.java @@ -15,18 +15,19 @@ */ package org.ehcache.internal.store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.core.exceptions.StorePassThroughException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.internal.TestTimeSource; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; +import java.time.Duration; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Function; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; @@ -40,7 +41,6 @@ public StoreComputeIfAbsentTest(StoreFactory factory) { } protected Store kvStore; - protected Store kvStore2; @After public void tearDown() { @@ -48,12 +48,6 @@ public void tearDown() { factory.close(kvStore); kvStore = null; } - if (kvStore2 != null) { - @SuppressWarnings("unchecked") - Store kvStore2 = (Store) this.kvStore2; - factory.close(kvStore2); - this.kvStore2 = null; - } } @SPITest @@ -91,7 +85,7 @@ public void testWrongReturnValueType() throws Exception { @SPITest @SuppressWarnings("unchecked") public void testWrongKeyType() throws Exception { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); if (factory.getKeyType() == Object.class) { System.err.println("Warning, store uses Object as key type, cannot verify in this configuration"); @@ -107,7 +101,7 @@ public void testWrongKeyType() throws Exception { try { // wrong key type - kvStore2.computeIfAbsent(badKey, key -> { + kvStore.computeIfAbsent((K) badKey, key -> { throw new AssertionError(); }); throw new AssertionError(); @@ -128,7 +122,7 @@ public void testComputePutsValueInStoreWhenKeyIsAbsent() throws Exception { assertThat(kvStore.get(key), nullValue()); try { kvStore.computeIfAbsent(key, keyParam -> value); - assertThat(kvStore.get(key).value(), is(value)); + assertThat(kvStore.get(key).get(), is(value)); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } @@ -151,7 +145,7 @@ public void testFunctionNotInvokedWhenPresent() throws Exception { } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } - assertThat(kvStore.get(key).value(), is(value)); + assertThat(kvStore.get(key).get(), is(value)); } @SPITest @@ -179,28 +173,42 @@ public void testFunctionReturnsNull() throws Exception { public void testException() throws Exception { kvStore = factory.newStore(); - final K key = factory.createKey(1L); - - assertThat(kvStore.get(key), nullValue()); + K key = factory.createKey(1L); - final RuntimeException re = new RuntimeException(); + RuntimeException re = new RuntimeException(); try { kvStore.computeIfAbsent(key, keyParam -> { throw re; }); - } catch (RuntimeException e) { - assertThat(e, is(re)); } catch (StoreAccessException e) { - throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); + assertThat(e.getCause(), is(re)); } assertThat(kvStore.get(key), nullValue()); } + @SPITest + public void testStorePassThroughException() throws Exception { + kvStore = factory.newStore(); + + K key = factory.createKey(1L); + + RuntimeException exception = new RuntimeException("error"); + StorePassThroughException re = new StorePassThroughException(exception); + + try { + kvStore.computeIfAbsent(key, keyParam -> { + throw re; + }); + } catch (RuntimeException e) { + assertThat(e, is(exception)); + } + } + @SPITest public void testComputeIfAbsentValuePresentExpiresOnAccess() throws LegalSPITesterException { TestTimeSource timeSource = new TestTimeSource(10043L); - kvStore = factory.newStoreWithExpiry(Expirations.builder().setAccess(Duration.ZERO).build(), timeSource); + kvStore = factory.newStoreWithExpiry(ExpiryPolicyBuilder.expiry().access(Duration.ZERO).build(), timeSource); K key = factory.createKey(250928L); V value = factory.createValue(2059820L); @@ -212,7 +220,7 @@ public void testComputeIfAbsentValuePresentExpiresOnAccess() throws LegalSPITest fail("Should not be invoked"); return newValue; }); - assertThat(result.value(), is(value)); + assertThat(result.get(), is(value)); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeTest.java deleted file mode 100644 index 5b2fba1e72..0000000000 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeTest.java +++ /dev/null @@ -1,230 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.internal.store; - -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.core.spi.store.Store; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.internal.TestTimeSource; -import org.ehcache.spi.test.After; -import org.ehcache.spi.test.LegalSPITesterException; -import org.ehcache.spi.test.SPITest; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.nullValue; -import org.junit.Assert; - -import java.util.function.BiFunction; -import java.util.function.Supplier; - -public class StoreComputeTest extends SPIStoreTester { - - public StoreComputeTest(StoreFactory factory) { - super(factory); - } - - protected Store kvStore; - protected Store kvStore2; - - @After - public void tearDown() { - if (kvStore != null) { - factory.close(kvStore); - kvStore = null; - } - if (kvStore2 != null) { - @SuppressWarnings("unchecked") - Store kvStore2 = this.kvStore2; - factory.close(kvStore2); - this.kvStore2 = null; - } - } - - @SuppressWarnings("unchecked") - @SPITest - public void testWrongReturnValueType() throws Exception { - kvStore = factory.newStore(); - - if (factory.getValueType() == Object.class) { - Assert.fail("Warning, store uses Object as value type, cannot verify in this configuration"); - } - - final Object value; - if (factory.getValueType() == String.class) { - value = this; - } else { - value = "value"; - } - - final K key = factory.createKey(13); - try { - kvStore.compute(key, (BiFunction) (key1, oldValue) -> { - return value; // returning wrong value type from function - }); - throw new AssertionError(); - } catch (ClassCastException e) { - // expected - } catch (StoreAccessException e) { - throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); - } - } - - @SuppressWarnings("unchecked") - @SPITest - public void testWrongKeyType() throws Exception { - kvStore2 = factory.newStore(); - - if (factory.getKeyType() == Object.class) { - System.err.println("Warning, store uses Object as key type, cannot verify in this configuration"); - return; - } - - final Object key; - if (factory.getKeyType() == String.class) { - key = this; - } else { - key = "key"; - } - - try { - // wrong key type - kvStore2.compute(key, (key1, oldValue) -> { - throw new AssertionError(); - }); - throw new AssertionError(); - } catch (ClassCastException e) { - // expected - } catch (StoreAccessException e) { - throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); - } - } - - @SPITest - public void testComputePutsValueInStore() throws Exception { - kvStore = factory.newStore(); - - final K key = factory.createKey(14); - final V value = factory.createValue(153); - - try { - kvStore.compute(key, (keyParam, oldValue) -> value); - assertThat(kvStore.get(key).value(), is(value)); - } catch (StoreAccessException e) { - throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); - } - } - - @SPITest - public void testOverwriteExitingValue() throws Exception { - kvStore = factory.newStore(); - - final K key = factory.createKey(151); - final V value = factory.createValue(1525); - final V value2 = factory.createValue(1526); - - assertThat(value2, not(equalTo(value))); - - try { - kvStore.put(key, value); - kvStore.compute(key, (keyParam, oldValue) -> value2); - assertThat(kvStore.get(key).value(), is(value2)); - } catch (StoreAccessException e) { - throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); - } - } - - @SPITest - public void testNullReturnRemovesEntry() throws Exception { - kvStore = factory.newStore(); - - final K key = factory.createKey(1535603985); - final V value = factory.createValue(15920835); - - try { - kvStore.put(key, value); - kvStore.compute(key, (keyParam, oldValue) -> null); - assertThat(kvStore.get(key), nullValue()); - } catch (StoreAccessException e) { - throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); - } - } - - @SPITest - public void testException() throws Exception { - kvStore = factory.newStore(); - - final K key = factory.createKey(520928098); - final V value = factory.createValue(15098209865L); - - final RuntimeException re = new RuntimeException(); - - try { - kvStore.put(key, value); - assertThat(kvStore.get(key).value(), is(value)); - - kvStore.compute(key, (keyParam, oldValue) -> { - throw re; - }); - } catch (RuntimeException e) { - assertThat(e, is(re)); - } catch (StoreAccessException e) { - throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); - } - - assertThat(kvStore.get(key).value(), is(value)); - } - - @SPITest - public void testComputeExpiresOnAccess() throws Exception { - TestTimeSource timeSource = new TestTimeSource(10042L); - kvStore = factory.newStoreWithExpiry(Expirations.builder().setAccess(Duration.ZERO).build(), timeSource); - - final K key = factory.createKey(1042L); - final V value = factory.createValue(1340142L); - - try { - kvStore.put(key, value); - - Store.ValueHolder result = kvStore.compute(key, (k, v) -> v, () -> false); - assertThat(result.value(), is(value)); - } catch (StoreAccessException e) { - throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); - } - } - - @SPITest - public void testComputeExpiresOnUpdate() throws Exception { - TestTimeSource timeSource = new TestTimeSource(10042L); - kvStore = factory.newStoreWithExpiry(Expirations.builder().setUpdate(Duration.ZERO).build(), timeSource); - - final K key = factory.createKey(1042L); - final V value = factory.createValue(1340142L); - final V newValue = factory.createValue(134054142L); - - try { - kvStore.put(key, value); - - Store.ValueHolder result = kvStore.compute(key, (k, v) -> newValue, () -> false); - assertThat(result.value(), is(newValue)); - } catch (StoreAccessException e) { - throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); - } - } -} diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreContainsKeyTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreContainsKeyTest.java index a55f4109a1..695843a5c0 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreContainsKeyTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreContainsKeyTest.java @@ -17,7 +17,7 @@ package org.ehcache.internal.store; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; @@ -39,7 +39,6 @@ public StoreContainsKeyTest(final StoreFactory factory) { } protected Store kvStore; - protected Store kvStore2; @After public void tearDown() { @@ -47,12 +46,6 @@ public void tearDown() { factory.close(kvStore); kvStore = null; } - if (kvStore2 != null) { - @SuppressWarnings("unchecked") - Store kvStore2 = this.kvStore2; - factory.close(kvStore2); - this.kvStore2 = null; - } } @SPITest @@ -93,13 +86,13 @@ public void nullKeyThrowsException() @SuppressWarnings("unchecked") public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); try { if (this.factory.getKeyType() == String.class) { - kvStore2.containsKey(1.0f); + kvStore.containsKey((K) (Float) 1.0f); } else { - kvStore2.containsKey("key"); + kvStore.containsKey((K) "key"); } throw new AssertionError("Expected ClassCastException because the key is of the wrong type"); } catch (ClassCastException e) { diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCreationEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCreationEventListenerTest.java index b3f9c79a0c..e8bd272ac7 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCreationEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCreationEventListenerTest.java @@ -17,7 +17,7 @@ package org.ehcache.internal.store; import org.ehcache.event.EventType; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.events.StoreEvent; import org.ehcache.core.spi.store.events.StoreEventListener; @@ -29,9 +29,6 @@ import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; -import java.util.function.BiFunction; -import java.util.function.Function; - import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.hamcrest.MockitoHamcrest.argThat; @@ -78,7 +75,7 @@ public void testPutIfAbsentCreates() throws LegalSPITesterException { StoreEventListener listener = addListener(store); try { - store.putIfAbsent(factory.createKey(42L), factory.createValue(42L)); + store.putIfAbsent(factory.createKey(42L), factory.createValue(42L), b -> {}); verifyListenerInteractions(listener); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); @@ -90,7 +87,7 @@ public void testComputeCreates() throws LegalSPITesterException { StoreEventListener listener = addListener(store); try { - store.compute(factory.createKey(125L), (k, v) -> factory.createValue(215L)); + store.getAndCompute(factory.createKey(125L), (k, v) -> factory.createValue(215L)); verifyListenerInteractions(listener); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreEvictionEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreEvictionEventListenerTest.java index 07c29dde4d..bdfcd0bf98 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreEvictionEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreEvictionEventListenerTest.java @@ -19,7 +19,7 @@ import org.ehcache.Cache; import org.ehcache.core.spi.store.Store; import org.ehcache.event.EventType; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.events.StoreEvent; import org.ehcache.core.spi.store.events.StoreEventListener; import org.ehcache.spi.test.After; @@ -27,9 +27,6 @@ import org.ehcache.spi.test.SPITest; import org.hamcrest.Matcher; -import java.util.function.BiFunction; -import java.util.function.Function; - import static org.ehcache.internal.store.StoreCreationEventListenerTest.eventType; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; @@ -48,10 +45,10 @@ public StoreEvictionEventListenerTest(StoreFactory factory) { } final K k = factory.createKey(1L); - final V v = factory.createValue(1l); + final V v = factory.createValue(1L); final K k2 = factory.createKey(2L); - final V v2 = factory.createValue(2l); - final V v3 = factory.createValue(3l); + final V v2 = factory.createValue(2L); + final V v3 = factory.createValue(3L); protected Store kvStore; @@ -77,7 +74,7 @@ public void testPutIfAbsentOnEviction() throws Exception { kvStore = factory.newStoreWithCapacity(1L); kvStore.put(k, v); StoreEventListener listener = addListener(kvStore); - kvStore.putIfAbsent(k2, v2); + kvStore.putIfAbsent(k2, v2, b -> {}); verifyListenerInteractions(listener); } @@ -92,7 +89,7 @@ public void testReplaceTwoArgsOnEviction() throws Exception { kvStore.put(k2, v2); verifyListenerInteractions(listener); kvStore.replace(getOnlyKey(kvStore.iterator()), v3); - assertThat(kvStore.get(getOnlyKey(kvStore.iterator())).value(), is(v3)); + assertThat(kvStore.get(getOnlyKey(kvStore.iterator())).get(), is(v3)); } @SPITest @@ -100,7 +97,7 @@ public void testComputeOnEviction() throws Exception { kvStore = factory.newStoreWithCapacity(1L); kvStore.put(k, v); StoreEventListener listener = addListener(kvStore); - kvStore.compute(k2, (mappedKey, mappedValue) -> v2); + kvStore.getAndCompute(k2, (mappedKey, mappedValue) -> v2); verifyListenerInteractions(listener); } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreExpiryEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreExpiryEventListenerTest.java index b2605a02df..2e260e4dac 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreExpiryEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreExpiryEventListenerTest.java @@ -16,27 +16,24 @@ package org.ehcache.internal.store; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.Store.RemoveStatus; import org.ehcache.core.spi.store.Store.ReplaceStatus; import org.ehcache.event.EventType; import org.ehcache.core.spi.store.events.StoreEvent; import org.ehcache.core.spi.store.events.StoreEventListener; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; import org.ehcache.internal.TestTimeSource; import org.ehcache.spi.test.After; import org.ehcache.spi.test.Before; import org.ehcache.spi.test.SPITest; import org.hamcrest.Matcher; -import java.util.concurrent.TimeUnit; -import java.util.function.BiFunction; -import java.util.function.Function; +import java.time.Duration; import static org.ehcache.internal.store.StoreCreationEventListenerTest.eventType; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; -import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.hamcrest.MockitoHamcrest.argThat; @@ -54,15 +51,15 @@ public StoreExpiryEventListenerTest(StoreFactory factory) { } final K k = factory.createKey(1L); - final V v = factory.createValue(1l); - final V v2 = factory.createValue(2l); + final V v = factory.createValue(1L); + final V v2 = factory.createValue(2L); protected Store kvStore; @Before public void setUp() { timeSource = new TestTimeSource(); - kvStore = factory.newStoreWithExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS)), timeSource); + kvStore = factory.newStoreWithExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1)), timeSource); } @After @@ -95,7 +92,7 @@ public void testPutIfAbsentOnExpiration() throws Exception { kvStore.put(k, v); StoreEventListener listener = addListener(kvStore); timeSource.advanceTime(1); - assertThat(kvStore.putIfAbsent(k, v), is(nullValue())); + assertThat(kvStore.putIfAbsent(k, v, b -> {}), is(nullValue())); verifyListenerInteractions(listener); } @@ -136,11 +133,12 @@ public void testReplaceThreeArgsOnExpiration() throws Exception { } @SPITest - public void testComputeOnExpiration() throws Exception { + public void testGetAndComputeOnExpiration() throws Exception { kvStore.put(k, v); StoreEventListener listener = addListener(kvStore); timeSource.advanceTime(1); - assertThat(kvStore.compute(k, (mappedKey, mappedValue) -> v2).value(), is(v2)); + assertThat(kvStore.getAndCompute(k, (mappedKey, mappedValue) -> v2), nullValue()); + assertThat(kvStore.get(k).get(), is(v2)); verifyListenerInteractions(listener); } @@ -150,7 +148,7 @@ public void testComputeIfAbsentOnExpiration() throws Exception { StoreEventListener listener = addListener(kvStore); timeSource.advanceTime(1); - assertThat(kvStore.computeIfAbsent(k, mappedKey -> v2).value(), is(v2)); + assertThat(kvStore.computeIfAbsent(k, mappedKey -> v2).get(), is(v2)); verifyListenerInteractions(listener); } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreFactory.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreFactory.java index 1cf75f0100..05bd94f3fd 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreFactory.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreFactory.java @@ -17,8 +17,8 @@ package org.ehcache.internal.store; import org.ehcache.config.EvictionAdvisor; -import org.ehcache.expiry.Expiry; import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.service.Service; @@ -35,7 +35,7 @@ public interface StoreFactory { Store newStoreWithEvictionAdvisor(EvictionAdvisor evictionAdvisor); - Store newStoreWithExpiry(Expiry expiry, TimeSource timeSource); + Store newStoreWithExpiry(ExpiryPolicy expiry, TimeSource timeSource); Store.ValueHolder newValueHolder(V value); @@ -43,7 +43,7 @@ public interface StoreFactory { Class getValueType(); - ServiceConfiguration[] getServiceConfigurations(); + ServiceConfiguration[] getServiceConfigurations(); ServiceProvider getServiceProvider(); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetAndComputeTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetAndComputeTest.java new file mode 100644 index 0000000000..5c65c5153b --- /dev/null +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetAndComputeTest.java @@ -0,0 +1,298 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.internal.store; + +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.core.exceptions.StorePassThroughException; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.core.spi.store.Store; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.internal.TestTimeSource; +import org.ehcache.spi.test.After; +import org.ehcache.spi.test.Ignore; +import org.ehcache.spi.test.LegalSPITesterException; +import org.ehcache.spi.test.SPITest; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; +import org.junit.Assert; + +import java.time.Duration; +import java.util.function.BiFunction; + +public class StoreGetAndComputeTest extends SPIStoreTester { + + public StoreGetAndComputeTest(StoreFactory factory) { + super(factory); + } + + protected Store kvStore; + + @After + public void tearDown() { + if (kvStore != null) { + factory.close(kvStore); + kvStore = null; + } + } + + @SuppressWarnings("unchecked") + @SPITest + public void testWrongReturnValueType() throws Exception { + kvStore = factory.newStore(); + + if (factory.getValueType() == Object.class) { + Assert.fail("Warning, store uses Object as value type, cannot verify in this configuration"); + } + + final Object value; + if (factory.getValueType() == String.class) { + value = this; + } else { + value = "value"; + } + + final K key = factory.createKey(13); + try { + kvStore.getAndCompute(key, (BiFunction) (key1, oldValue) -> { + return value; // returning wrong value type from function + }); + throw new AssertionError(); + } catch (ClassCastException e) { + // expected + } catch (StoreAccessException e) { + throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); + } + } + + @SuppressWarnings("unchecked") + @SPITest + public void testWrongKeyType() throws Exception { + kvStore = factory.newStore(); + + if (factory.getKeyType() == Object.class) { + System.err.println("Warning, store uses Object as key type, cannot verify in this configuration"); + return; + } + + final Object key; + if (factory.getKeyType() == String.class) { + key = this; + } else { + key = "key"; + } + + try { + // wrong key type + kvStore.getAndCompute((K) key, (key1, oldValue) -> { + throw new AssertionError(); + }); + throw new AssertionError(); + } catch (ClassCastException e) { + // expected + } catch (StoreAccessException e) { + throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); + } + } + + @SPITest + public void testComputePutsValueInStore() throws Exception { + kvStore = factory.newStore(); + + final K key = factory.createKey(14); + final V value = factory.createValue(153); + + try { + Store.ValueHolder compute = kvStore.getAndCompute(key, (keyParam, oldValue) -> value); + assertThat(kvStore.get(key).get(), is(value)); + assertThat(compute, nullValue()); + } catch (StoreAccessException e) { + throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); + } + } + + @SPITest + public void testOverwriteExistingValue() throws Exception { + kvStore = factory.newStore(); + + final K key = factory.createKey(151); + final V value = factory.createValue(1525); + final V value2 = factory.createValue(1526); + + assertThat(value2, not(equalTo(value))); + + try { + kvStore.put(key, value); + Store.ValueHolder compute = kvStore.getAndCompute(key, (keyParam, oldValue) -> value2); + assertThat(kvStore.get(key).get(), is(value2)); + assertThat(compute.get(), is(value)); + } catch (StoreAccessException e) { + throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); + } + } + + @SPITest + public void testNullReturnRemovesEntry() throws Exception { + kvStore = factory.newStore(); + + final K key = factory.createKey(1535603985); + final V value = factory.createValue(15920835); + + try { + kvStore.put(key, value); + Store.ValueHolder compute = kvStore.getAndCompute(key, (keyParam, oldValue) -> null); + assertThat(kvStore.get(key), nullValue()); + assertThat(compute.get(), is(value)); + } catch (StoreAccessException e) { + throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); + } + } + + @SPITest + public void testException() throws Exception { + kvStore = factory.newStore(); + + final K key = factory.createKey(520928098); + final V value = factory.createValue(15098209865L); + + final RuntimeException re = new RuntimeException(); + + try { + kvStore.put(key, value); + assertThat(kvStore.get(key).get(), is(value)); + + kvStore.getAndCompute(key, (keyParam, oldValue) -> { + throw re; + }); + } catch (RuntimeException e) { + assertThat(e, is(re)); + } catch (StoreAccessException e) { + throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); + } + + assertThat(kvStore.get(key).get(), is(value)); + } + + @SPITest + public void testStorePassThroughException() throws Exception { + kvStore = factory.newStore(); + + K key = factory.createKey(520928098); + V value = factory.createValue(15098209865L); + + RuntimeException exception = new RuntimeException("error"); + StorePassThroughException re = new StorePassThroughException(exception); + + try { + kvStore.put(key, value); + assertThat(kvStore.get(key).get(), is(value)); + + kvStore.getAndCompute(key, (keyParam, oldValue) -> { + throw re; + }); + } catch (RuntimeException e) { + assertThat(e, is(exception)); + } + + assertThat(kvStore.get(key).get(), is(value)); + } + + @SPITest + public void testExceptionOnSupplier() throws Exception { + kvStore = factory.newStore(); + + K key = factory.createKey(520928098); + V value = factory.createValue(15098209865L); + + RuntimeException re = new RuntimeException(); + + try { + kvStore.put(key, value); + assertThat(kvStore.get(key).get(), is(value)); + + kvStore.computeAndGet(key, (keyParam, oldValue) -> oldValue, () -> { throw re; }, () -> false); + } catch (StoreAccessException e) { + assertThat(e.getCause(), is(re)); + } + + assertThat(kvStore.get(key).get(), is(value)); + } + + @SPITest + public void testPassThroughExceptionOnSupplier() throws Exception { + kvStore = factory.newStore(); + + K key = factory.createKey(520928098); + V value = factory.createValue(15098209865L); + + RuntimeException exception = new RuntimeException("error"); + StorePassThroughException re = new StorePassThroughException(exception); + + try { + kvStore.put(key, value); + assertThat(kvStore.get(key).get(), is(value)); + + kvStore.computeAndGet(key, (keyParam, oldValue) -> oldValue, () -> { throw re; }, () -> false); + } catch (RuntimeException e) { + assertThat(e, is(exception)); + } + + assertThat(kvStore.get(key).get(), is(value)); + } + + @Ignore + @SPITest + public void testComputeExpiresOnAccess() throws Exception { + TestTimeSource timeSource = new TestTimeSource(10042L); + kvStore = factory.newStoreWithExpiry(ExpiryPolicyBuilder.expiry().access(Duration.ZERO).build(), timeSource); + + final K key = factory.createKey(1042L); + final V value = factory.createValue(1340142L); + + try { + kvStore.put(key, value); + + Store.ValueHolder result = kvStore.getAndCompute(key, (k, v) -> v); + assertThat(result.get(), is(value)); + assertThat(kvStore.get(key), nullValue()); + } catch (StoreAccessException e) { + throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); + } + } + + @SPITest + public void testComputeExpiresOnUpdate() throws Exception { + TestTimeSource timeSource = new TestTimeSource(10042L); + kvStore = factory.newStoreWithExpiry(ExpiryPolicyBuilder.expiry().update(Duration.ZERO).build(), timeSource); + + final K key = factory.createKey(1042L); + final V value = factory.createValue(1340142L); + final V newValue = factory.createValue(134054142L); + + try { + kvStore.put(key, value); + + Store.ValueHolder result = kvStore.getAndCompute(key, (k, v) -> newValue); + assertThat(result.get(), is(value)); + assertThat(kvStore.get(key), nullValue()); + } catch (StoreAccessException e) { + throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); + } + } +} diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetTest.java index 198cb048a6..abdc1f1a38 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetTest.java @@ -16,18 +16,18 @@ package org.ehcache.internal.store; -import org.ehcache.ValueSupplier; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.internal.TestTimeSource; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; +import java.time.Duration; + import static org.ehcache.core.spi.store.Store.ValueHolder; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; @@ -50,7 +50,6 @@ public StoreGetTest(final StoreFactory factory) { } protected Store kvStore; - protected Store kvStore2; @After public void tearDown() { @@ -58,12 +57,6 @@ public void tearDown() { factory.close(kvStore); kvStore = null; } - if (kvStore2 != null) { - @SuppressWarnings("unchecked") - Store kvStore2 = this.kvStore2; - factory.close(kvStore2); - this.kvStore2 = null; - } } @SPITest @@ -108,7 +101,7 @@ public void existingKeyMappedInStoreReturnsCorrectValueHolder() kvStore.put(key, value); try { - assertThat(kvStore.get(key).value(), is(equalTo(value))); + assertThat(kvStore.get(key).get(), is(equalTo(value))); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } @@ -135,13 +128,13 @@ public void nullKeyThrowsException() @SuppressWarnings({ "unchecked", "rawtypes" }) public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); try { if (this.factory.getKeyType() == String.class) { - kvStore2.get(1.0f); + kvStore.get((K) (Float) 1.0f); } else { - kvStore2.get("key"); + kvStore.get((K) "key"); } throw new AssertionError("Expected ClassCastException because the key is of the wrong type"); } catch (ClassCastException e) { @@ -154,14 +147,15 @@ public void wrongKeyTypeThrowsException() @SPITest public void testGetExpiresOnAccess() throws LegalSPITesterException { TestTimeSource timeSource = new TestTimeSource(10043L); - kvStore = factory.newStoreWithExpiry(Expirations.builder().setAccess(Duration.ZERO).build(), timeSource); + kvStore = factory.newStoreWithExpiry(ExpiryPolicyBuilder.expiry() + .access(Duration.ZERO).build(), timeSource); K key = factory.createKey(250928L); V value = factory.createValue(2059820L); try { kvStore.put(key, value); - assertThat(kvStore.get(key).value(), is(value)); + assertThat(kvStore.get(key).get(), is(value)); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreIteratorHasNextTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreIteratorHasNextTest.java index b319599d12..0116bd3bd2 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreIteratorHasNextTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreIteratorHasNextTest.java @@ -18,7 +18,7 @@ import org.ehcache.Cache; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreIteratorNextTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreIteratorNextTest.java index 322da98667..2cbcbbe1c4 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreIteratorNextTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreIteratorNextTest.java @@ -17,7 +17,7 @@ package org.ehcache.internal.store; import org.ehcache.Cache; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; @@ -66,7 +66,7 @@ public void nextReturnsNextElement() try { Cache.Entry> entry = iterator.next(); assertThat(entry.getKey(), is(equalTo(key))); - assertThat(entry.getValue().value(), is(equalTo(value))); + assertThat(entry.getValue().get(), is(equalTo(value))); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreIteratorTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreIteratorTest.java index 3fabd91235..5843989e53 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreIteratorTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreIteratorTest.java @@ -17,7 +17,7 @@ package org.ehcache.internal.store; import org.ehcache.Cache; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.test.After; import org.ehcache.spi.test.Before; @@ -80,7 +80,7 @@ public void iterableContainsValuesInAnyOrder() while (iterator.hasNext()) { Cache.Entry> nextEntry = iterator.next(); keys.add(nextEntry.getKey()); - values.add(nextEntry.getValue().value()); + values.add(nextEntry.getValue().get()); } assertThat(keys, containsInAnyOrder(equalTo(key1), equalTo(key2), equalTo(key3))); assertThat(values, containsInAnyOrder(equalTo(value1), equalTo(value2), equalTo(value3))); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutIfAbsentTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutIfAbsentTest.java index 6e2e4a5b36..bb5abe5102 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutIfAbsentTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutIfAbsentTest.java @@ -16,25 +16,25 @@ package org.ehcache.internal.store; -import org.ehcache.ValueSupplier; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.internal.TestTimeSource; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; +import java.time.Duration; + import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; - /** - * Test the {@link Store#putIfAbsent(Object, Object)} contract of the + * Test the {@link Store#putIfAbsent(Object, Object, java.util.function.Consumer)} contract of the * {@link Store Store} interface. * * @author Aurelien Broszniowski @@ -47,7 +47,6 @@ public StorePutIfAbsentTest(final StoreFactory factory) { } protected Store kvStore; - protected Store kvStore2; @After public void tearDown() { @@ -55,12 +54,6 @@ public void tearDown() { factory.close(kvStore); kvStore = null; } - if (kvStore2 != null) { - @SuppressWarnings("unchecked") - Store kvStore2 = this.kvStore2; - factory.close(kvStore2); - this.kvStore2 = null; - } } @SPITest @@ -72,7 +65,7 @@ public void mapsKeyToValueWhenMappingDoesntExist() V value = factory.createValue(1); try { - assertThat(kvStore.putIfAbsent(key, value), is(nullValue())); + assertThat(kvStore.putIfAbsent(key, value, b -> {}), is(nullValue())); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } @@ -95,7 +88,7 @@ public void doesntMapKeyToValueWhenMappingExists() V updatedValue = factory.createValue(2); try { - assertThat(kvStore.putIfAbsent(key, updatedValue).value(), is(equalTo(value))); + assertThat(kvStore.putIfAbsent(key, updatedValue, b -> {}).get(), is(equalTo(value))); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } @@ -110,7 +103,7 @@ public void nullKeyThrowsException() V value = factory.createValue(1); try { - kvStore.putIfAbsent(key, value); + kvStore.putIfAbsent(key, value, b -> {}); throw new AssertionError("Expected NullPointerException because the key is null"); } catch (NullPointerException e) { // expected @@ -126,7 +119,7 @@ public void nullValueThrowsException() V value = null; try { - kvStore.putIfAbsent(key, value); + kvStore.putIfAbsent(key, value, b -> {}); throw new AssertionError("Expected NullPointerException because the value is null"); } catch (NullPointerException e) { // expected @@ -137,15 +130,15 @@ public void nullValueThrowsException() @SuppressWarnings({ "unchecked", "rawtypes" }) public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); V value = factory.createValue(1); try { if (this.factory.getKeyType() == String.class) { - kvStore2.putIfAbsent(1.0f, value); + kvStore.putIfAbsent((K) (Float) 1.0f, value, b -> {}); } else { - kvStore2.putIfAbsent("key", value); + kvStore.putIfAbsent((K) "key", value, b -> {}); } throw new AssertionError("Expected ClassCastException because the key is of the wrong type"); } catch (ClassCastException e) { @@ -159,15 +152,15 @@ public void wrongKeyTypeThrowsException() @SuppressWarnings({ "unchecked", "rawtypes" }) public void wrongValueTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); K key = factory.createKey(1); try { if (this.factory.getValueType() == String.class) { - kvStore2.putIfAbsent(key, 1.0f); + kvStore.putIfAbsent(key, (V) (Float) 1.0f, b -> {}); } else { - kvStore2.putIfAbsent(key, "value"); + kvStore.putIfAbsent(key, (V) "value", b -> {}); } throw new AssertionError("Expected ClassCastException because the value is of the wrong type"); } catch (ClassCastException e) { @@ -180,7 +173,7 @@ public void wrongValueTypeThrowsException() @SPITest public void testPutIfAbsentValuePresentExpiresOnAccess() throws LegalSPITesterException { TestTimeSource timeSource = new TestTimeSource(10043L); - kvStore = factory.newStoreWithExpiry(Expirations.builder().setAccess(Duration.ZERO).build(), timeSource); + kvStore = factory.newStoreWithExpiry(ExpiryPolicyBuilder.expiry().access(Duration.ZERO).build(), timeSource); K key = factory.createKey(250928L); V value = factory.createValue(2059820L); @@ -188,7 +181,7 @@ public void testPutIfAbsentValuePresentExpiresOnAccess() throws LegalSPITesterEx try { kvStore.put(key, value); - assertThat(kvStore.putIfAbsent(key, newValue).value(), is(value)); + assertThat(kvStore.putIfAbsent(key, newValue, b -> {}).get(), is(value)); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutTest.java index 9bb6977197..9c9c293403 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutTest.java @@ -16,23 +16,22 @@ package org.ehcache.internal.store; -import org.ehcache.ValueSupplier; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.internal.TestTimeSource; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; +import java.time.Duration; + import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; - /** * Test the {@link Store#put(Object, Object)} contract of the * {@link Store Store} interface. @@ -47,7 +46,6 @@ public StorePutTest(final StoreFactory factory) { } protected Store kvStore; - protected Store kvStore2; @After public void tearDown() { @@ -55,12 +53,6 @@ public void tearDown() { factory.close(kvStore); kvStore = null; } - if (kvStore2 != null) { - @SuppressWarnings("unchecked") - Store kvStore2 = this.kvStore2; - factory.close(kvStore2); - this.kvStore2 = null; - } } @SPITest @@ -118,18 +110,18 @@ public void indicatesValuePutAndCanBeRetrievedWithEqualKey() } @SPITest - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings("unchecked") public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); V value = factory.createValue(1); try { if (this.factory.getKeyType() == String.class) { - kvStore2.put(1.0f, value); + kvStore.put((K) (Float) 1.0f, value); } else { - kvStore2.put("key", value); + kvStore.put((K) "key", value); } throw new AssertionError("Expected ClassCastException because the key is of the wrong type"); } catch (ClassCastException e) { @@ -143,15 +135,15 @@ public void wrongKeyTypeThrowsException() @SuppressWarnings({ "unchecked", "rawtypes" }) public void wrongValueTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); K key = factory.createKey(1); try { if (this.factory.getValueType() == String.class) { - kvStore2.put(key, 1.0f); + kvStore.put(key, (V) (Float) 1.0f); } else { - kvStore2.put(key, "value"); + kvStore.put(key, (V) "value"); } throw new AssertionError("Expected ClassCastException because the value is of the wrong type"); } catch (ClassCastException e) { @@ -183,7 +175,7 @@ public void indicatesValueReplaced() throws LegalSPITesterException { public void indicatesValueReplacedWhenUpdateExpires() throws LegalSPITesterException { TestTimeSource timeSource = new TestTimeSource(1000L); - kvStore = factory.newStoreWithExpiry(Expirations.builder().setUpdate(Duration.ZERO).build(), timeSource); + kvStore = factory.newStoreWithExpiry(ExpiryPolicyBuilder.expiry().update(Duration.ZERO).build(), timeSource); K key = factory.createKey(42L); V value = factory.createValue(42L); @@ -204,7 +196,7 @@ public void indicatesValueReplacedWhenUpdateExpires() throws LegalSPITesterExcep public void indicatesOperationNoOp() throws LegalSPITesterException { TestTimeSource timeSource = new TestTimeSource(1000L); - kvStore = factory.newStoreWithExpiry(Expirations.builder().setCreate(Duration.ZERO).build(), timeSource); + kvStore = factory.newStoreWithExpiry(ExpiryPolicyBuilder.expiry().create(Duration.ZERO).build(), timeSource); K key = factory.createKey(42L); try { diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemovalEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemovalEventListenerTest.java index 2185254092..5b16be71f3 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemovalEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemovalEventListenerTest.java @@ -19,7 +19,7 @@ import org.ehcache.event.EventType; import org.ehcache.core.spi.store.events.StoreEvent; import org.ehcache.core.spi.store.events.StoreEventListener; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.test.After; import org.ehcache.spi.test.Before; @@ -27,8 +27,6 @@ import org.ehcache.spi.test.SPITest; import org.hamcrest.Matcher; -import java.util.function.BiFunction; - import static org.ehcache.internal.store.StoreCreationEventListenerTest.eventType; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -93,7 +91,7 @@ public void testComputeRemoves() throws LegalSPITesterException { K key = factory.createKey(125L); store.put(key, factory.createValue(125L)); StoreEventListener listener = addListener(store); - store.compute(key, (k, v) -> null); + store.getAndCompute(key, (k, v) -> null); verifyListenerInteractions(listener); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyTest.java index 87423b5b32..e84623830a 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyTest.java @@ -17,7 +17,7 @@ package org.ehcache.internal.store; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; @@ -39,7 +39,6 @@ public StoreRemoveKeyTest(final StoreFactory factory) { } protected Store kvStore; - protected Store kvStore2; @After public void tearDown() { @@ -47,12 +46,6 @@ public void tearDown() { factory.close(kvStore); kvStore = null; } - if (kvStore2 != null) { - @SuppressWarnings("unchecked") - Store kvStore2 = this.kvStore2; - factory.close(kvStore2); - this.kvStore2 = null; - } } @SPITest @@ -109,13 +102,13 @@ public void nullKeyThrowsException() @SuppressWarnings({ "unchecked", "rawtypes" }) public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); try { if (this.factory.getKeyType() == String.class) { - kvStore2.remove(1.0f); + kvStore.remove((K) (Float) 1.0f); } else { - kvStore2.remove("key"); + kvStore.remove((K) "key"); } throw new AssertionError("Expected ClassCastException because the key is of the wrong type"); } catch (ClassCastException e) { diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyValueTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyValueTest.java index 76db98d3c9..31baf55fda 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyValueTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyValueTest.java @@ -18,7 +18,7 @@ import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.Store.RemoveStatus; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; @@ -203,7 +203,7 @@ public void wrongKeyTypeThrowsException() if (this.factory.getKeyType() == String.class) { kvStore2.remove((K) (Object) 1.0f, value); } else { - kvStore2.remove((K) (Object) "key", value); + kvStore2.remove((K) "key", value); } throw new AssertionError("Expected ClassCastException because the key is of the wrong type"); } catch (ClassCastException e) { @@ -225,7 +225,7 @@ public void wrongValueTypeThrowsException() if (this.factory.getValueType() == String.class) { kvStore2.remove(key, (V) (Object) 1.0f); } else { - kvStore2.remove(key, (V) (Object) "value"); + kvStore2.remove(key, (V) "value"); } throw new AssertionError("Expected ClassCastException because the value is of the wrong type"); } catch (ClassCastException e) { diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueTest.java index 524c086a1f..e1df421889 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueTest.java @@ -17,7 +17,7 @@ package org.ehcache.internal.store; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; @@ -41,7 +41,6 @@ public StoreReplaceKeyValueTest(final StoreFactory factory) { } protected Store kvStore; - protected Store kvStore2; @After public void tearDown() { @@ -49,12 +48,6 @@ public void tearDown() { factory.close(kvStore); kvStore = null; } - if (kvStore2 != null) { - @SuppressWarnings("unchecked") - Store kvStore2 = this.kvStore2; - factory.close(kvStore2); - this.kvStore2 = null; - } } @SPITest @@ -75,7 +68,7 @@ public void replaceKeyAndValue() throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } - assertThat(kvStore.get(key).value(), is(equalTo(newValue))); + assertThat(kvStore.get(key).get(), is(equalTo(newValue))); } @SPITest @@ -91,7 +84,7 @@ public void replaceReturnsOldValue() V newValue = factory.createValue(2); try { - assertThat(kvStore.replace(key, newValue).value(), is(equalTo(originalValue))); + assertThat(kvStore.replace(key, newValue).get(), is(equalTo(originalValue))); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } @@ -154,15 +147,15 @@ public void nullValueThrowsException() @SuppressWarnings("unchecked") public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); V value = factory.createValue(1); try { if (this.factory.getKeyType() == String.class) { - kvStore2.replace(1.0f, value); + kvStore.replace((K) (Float) 1.0f, value); } else { - kvStore2.replace("key", value); + kvStore.replace((K) "key", value); } throw new AssertionError("Expected ClassCastException because the key is of the wrong type"); } catch (ClassCastException e) { @@ -176,15 +169,15 @@ public void wrongKeyTypeThrowsException() @SuppressWarnings("unchecked") public void wrongValueTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); K key = factory.createKey(1); try { if (this.factory.getValueType() == String.class) { - kvStore2.replace(key, 1.0f); + kvStore.replace(key, (V) (Float) 1.0f); } else { - kvStore2.replace(key, "value"); + kvStore.replace(key, (V) "value"); } throw new AssertionError("Expected ClassCastException because the value is of the wrong type"); } catch (ClassCastException e) { diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueValueTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueValueTest.java index 1ba79839f9..298ad03196 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueValueTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueValueTest.java @@ -18,7 +18,7 @@ import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.Store.ReplaceStatus; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; @@ -43,7 +43,6 @@ public StoreReplaceKeyValueValueTest(final StoreFactory factory) { } protected Store kvStore; - protected Store kvStore2; @After public void tearDown() { @@ -51,12 +50,6 @@ public void tearDown() { factory.close(kvStore); kvStore = null; } - if (kvStore2 != null) { - @SuppressWarnings("unchecked") - Store kvStore2 = this.kvStore2; - factory.close(kvStore2); - this.kvStore2 = null; - } } @SPITest @@ -77,7 +70,7 @@ public void replaceCorrectKeyAndValue() throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } - assertThat(kvStore.get(key).value(), is(equalTo(newValue))); + assertThat(kvStore.get(key).get(), is(equalTo(newValue))); } @SPITest @@ -99,7 +92,7 @@ public void replaceCorrectKeyAndWrongValue() throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } - assertThat(kvStore.get(key).value(), is(not(equalTo(wrongValue)))); + assertThat(kvStore.get(key).get(), is(not(equalTo(wrongValue)))); } @SPITest @@ -145,16 +138,16 @@ public void unsuccessfulReplaceReturnsMiss() @SuppressWarnings({ "unchecked", "rawtypes" }) public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); V originalValue = factory.createValue(1); V newValue = factory.createValue(2); try { if (this.factory.getKeyType() == String.class) { - kvStore2.replace(1.0f, originalValue); + kvStore.replace((K) (Float) 1.0f, originalValue); } else { - kvStore2.replace("key", originalValue, newValue); + kvStore.replace((K) "key", originalValue, newValue); } throw new AssertionError("Expected ClassCastException because the key is of the wrong type"); } catch (ClassCastException e) { @@ -168,16 +161,16 @@ public void wrongKeyTypeThrowsException() @SuppressWarnings({ "unchecked", "rawtypes" }) public void wrongOriginalValueTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); K key = factory.createKey(1); V newValue = factory.createValue(1); try { if (this.factory.getValueType() == String.class) { - kvStore2.replace(key, 1.0f, newValue); + kvStore.replace(key, (V) (Float) 1.0f, newValue); } else { - kvStore2.replace(key, "value", newValue); + kvStore.replace(key, (V) "value", newValue); } throw new AssertionError("Expected ClassCastException because the value is of the wrong type"); } catch (ClassCastException e) { @@ -191,16 +184,16 @@ public void wrongOriginalValueTypeThrowsException() @SuppressWarnings({ "unchecked", "rawtypes" }) public void wrongNewValueTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { - kvStore2 = factory.newStore(); + kvStore = factory.newStore(); K key = factory.createKey(1); V originalValue = factory.createValue(1); try { if (this.factory.getValueType() == String.class) { - kvStore2.replace(key, originalValue, 1.0f); + kvStore.replace(key, originalValue, (V) (Float) 1.0f); } else { - kvStore2.replace(key, originalValue, "value"); + kvStore.replace(key, originalValue, (V) "value"); } throw new AssertionError("Expected ClassCastException because the value is of the wrong type"); } catch (ClassCastException e) { diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreSPITest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreSPITest.java index 8b4e9abe32..ed60355ef7 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreSPITest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreSPITest.java @@ -26,8 +26,8 @@ public abstract class StoreSPITest { protected abstract StoreFactory getStoreFactory(); @Test - public void testCompute() throws Exception { - StoreComputeTest testSuite = new StoreComputeTest<>(getStoreFactory()); + public void testGetAndCompute() throws Exception { + StoreGetAndComputeTest testSuite = new StoreGetAndComputeTest<>(getStoreFactory()); testSuite.runTestSuite().reportAndThrow(); } @@ -124,13 +124,6 @@ public void testValueHolderLastAccessTime() throws Exception { testSuite.runTestSuite().reportAndThrow(); } - @Test - public void testValueHolderHitRate() throws Exception { - StoreValueHolderHitRateTest testSuite = - new StoreValueHolderHitRateTest<>(getStoreFactory()); - testSuite.runTestSuite().reportAndThrow(); - } - @Test public void testIteratorHasNext() throws Exception { StoreIteratorHasNextTest testSuite = diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreUpdateEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreUpdateEventListenerTest.java index 61657cfdd0..d31466391a 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreUpdateEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreUpdateEventListenerTest.java @@ -19,7 +19,7 @@ import org.ehcache.event.EventType; import org.ehcache.core.spi.store.events.StoreEvent; import org.ehcache.core.spi.store.events.StoreEventListener; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.test.After; import org.ehcache.spi.test.Before; @@ -27,8 +27,6 @@ import org.ehcache.spi.test.SPITest; import org.hamcrest.Matcher; -import java.util.function.BiFunction; - import static org.ehcache.internal.store.StoreCreationEventListenerTest.eventType; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -109,7 +107,7 @@ public void testComputeUpdates() throws LegalSPITesterException { K key = factory.createKey(125L); store.put(key, factory.createValue(125L)); StoreEventListener listener = addListener(store); - store.compute(key, (k, v) -> factory.createValue(215L)); + store.getAndCompute(key, (k, v) -> factory.createValue(215L)); verifyListenerInteractions(listener); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderCreationTimeTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderCreationTimeTest.java index edd7130e7b..7978e54112 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderCreationTimeTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderCreationTimeTest.java @@ -43,7 +43,7 @@ public void creationTimeCanBeReturned() throws IllegalAccessException, InstantiationException { Store.ValueHolder valueHolder = factory.newValueHolder(factory.createValue(1)); - assertThat(valueHolder.creationTime(TimeUnit.MILLISECONDS), is(notNullValue())); + assertThat(valueHolder.creationTime(), is(notNullValue())); } } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderHitRateTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderHitRateTest.java deleted file mode 100644 index 7a990766ec..0000000000 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderHitRateTest.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.internal.store; - -import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.test.SPITest; - -import java.util.concurrent.TimeUnit; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.core.Is.is; - -/** - * Test the {@link Store.ValueHolder#hitRate(long, TimeUnit)} contract of the - * {@link Store.ValueHolder Store.ValueHolder} interface. - * - * @author Aurelien Broszniowski - */ - -public class StoreValueHolderHitRateTest extends SPIStoreTester { - - public StoreValueHolderHitRateTest(final StoreFactory factory) { - super(factory); - } - - @SPITest - public void hitRateCanBeReturned() - throws IllegalAccessException, InstantiationException { - Store.ValueHolder valueHolder = factory.newValueHolder(factory.createValue(1)); - - assertThat(valueHolder.hitRate(TimeUnit.MILLISECONDS.convert(System.currentTimeMillis(), TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS), anyOf(is(Float.NaN), is(0.0f))); - } -} diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderLastAccessTimeTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderLastAccessTimeTest.java index 7564b4837e..bf5806bdd9 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderLastAccessTimeTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderLastAccessTimeTest.java @@ -43,6 +43,6 @@ public void lastAccessTimeCanBeReturned() throws IllegalAccessException, InstantiationException { Store.ValueHolder valueHolder = factory.newValueHolder(factory.createValue(1)); - assertThat(valueHolder.lastAccessTime(TimeUnit.MILLISECONDS), is(notNullValue())); + assertThat(valueHolder.lastAccessTime(), is(notNullValue())); } } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderValueTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderValueTest.java index be69354a3f..ee0c59289d 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderValueTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreValueHolderValueTest.java @@ -24,7 +24,7 @@ import static org.hamcrest.core.Is.is; /** - * Test the {@link Store.ValueHolder#value()} contract of the + * Test the {@link Store.ValueHolder#get(Object)} contract of the * {@link Store.ValueHolder Store.ValueHolder} interface. * * @author Aurelien Broszniowski @@ -43,7 +43,7 @@ public void valueIsHeldByValueHolder() Store.ValueHolder valueHolder = factory.newValueHolder(value); try { - assertThat(valueHolder.value(), is(equalTo(value))); + assertThat(valueHolder.get(), is(equalTo(value))); } catch (Exception e) { System.err.println("Warning, an exception is thrown due to the SPI test"); e.printStackTrace(); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierComputeIfAbsentAndFault.java b/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierComputeIfAbsentAndFault.java index d747cdc834..8654c3a12b 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierComputeIfAbsentAndFault.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierComputeIfAbsentAndFault.java @@ -16,7 +16,7 @@ package org.ehcache.internal.tier; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; @@ -81,10 +81,10 @@ public void marksTheMappingAsNotEvictableAndComputeValue() throws LegalSPITester try { assertThat(tier.get(key), is(nullValue())); - assertThat(tier.computeIfAbsentAndFault(key, k -> factory.createValue(1L)).value(), is(equalTo(value))); + assertThat(tier.computeIfAbsentAndFault(key, k -> factory.createValue(1L)).get(), is(equalTo(value))); fillTierOverCapacity(tier, factory); - assertThat(tier.get(key).value(), is(equalTo(value))); + assertThat(tier.get(key).get(), is(equalTo(value))); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierFactory.java b/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierFactory.java index a79da797bc..dd55ba2413 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierFactory.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierFactory.java @@ -17,8 +17,8 @@ package org.ehcache.internal.tier; import org.ehcache.config.EvictionAdvisor; -import org.ehcache.expiry.Expiry; import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.internal.store.StoreFactory; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; @@ -34,7 +34,7 @@ public interface AuthoritativeTierFactory extends StoreFactory { AuthoritativeTier newStoreWithCapacity(long capacity); @Override - AuthoritativeTier newStoreWithExpiry(Expiry expiry, TimeSource timeSource); + AuthoritativeTier newStoreWithExpiry(ExpiryPolicy expiry, TimeSource timeSource); @Override AuthoritativeTier newStoreWithEvictionAdvisor(EvictionAdvisor evictionAdvisor); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierFlush.java b/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierFlush.java index 16c7356535..e4c31512f3 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierFlush.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierFlush.java @@ -16,11 +16,10 @@ package org.ehcache.internal.tier; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.spi.test.After; -import org.ehcache.spi.test.Before; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; @@ -61,7 +60,7 @@ public void entryIsFlushed() throws LegalSPITesterException { K key = factory.createKey(1); final V value = factory.createValue(1); Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); - when(valueHolder.expirationTime(any(TimeUnit.class))).thenReturn(1L); + when(valueHolder.expirationTime()).thenReturn(1L); tier = factory.newStoreWithCapacity(1L); @@ -82,7 +81,7 @@ public void entryIsNotFlushed() throws LegalSPITesterException { K key = factory.createKey(1); final V value = factory.createValue(1); Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); - when(valueHolder.expirationTime(any(TimeUnit.class))).thenReturn(1L); + when(valueHolder.expirationTime()).thenReturn(1L); tier = factory.newStoreWithCapacity(1L); @@ -100,7 +99,7 @@ public void entryIsNotFlushed() throws LegalSPITesterException { public void entryDoesNotExist() { K key = factory.createKey(1); Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); - when(valueHolder.expirationTime(any(TimeUnit.class))).thenReturn(1L); + when(valueHolder.expirationTime()).thenReturn(1L); tier = factory.newStoreWithCapacity(1L); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierGetAndFault.java b/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierGetAndFault.java index 0f19d97245..a37f5cd922 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierGetAndFault.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/tier/AuthoritativeTierGetAndFault.java @@ -16,18 +16,16 @@ package org.ehcache.internal.tier; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.internal.TestTimeSource; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; -import org.ehcache.spi.test.After; -import org.ehcache.spi.test.Before; +import org.ehcache.spi.test.After;; import org.ehcache.spi.test.Ignore; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; -import java.util.concurrent.TimeUnit; +import java.time.Duration; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; @@ -88,11 +86,11 @@ public void marksTheMappingAsNotEvictableAndReturnsValue() throws LegalSPITester try { tier.put(key, value); - assertThat(tier.getAndFault(key).value(), is(equalTo(value))); + assertThat(tier.getAndFault(key).get(), is(equalTo(value))); fillTierOverCapacity(tier, factory); - assertThat(tier.get(key).value(), is(equalTo(value))); + assertThat(tier.get(key).get(), is(equalTo(value))); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); @@ -103,7 +101,7 @@ public void marksTheMappingAsNotEvictableAndReturnsValue() throws LegalSPITester @Ignore public void marksTheMappingAsNotExpirable() throws LegalSPITesterException { TestTimeSource timeSource = new TestTimeSource(); - tier = factory.newStoreWithExpiry(Expirations.timeToIdleExpiration(new Duration(1, TimeUnit.MILLISECONDS)), timeSource); + tier = factory.newStoreWithExpiry(ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(1L)), timeSource); K key = factory.createKey(1); V value = factory.createValue(1); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierClear.java b/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierClear.java index 6a9cde5b7e..fd59728d72 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierClear.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierClear.java @@ -16,17 +16,15 @@ package org.ehcache.internal.tier; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.CachingTier; import org.ehcache.spi.test.After; -import org.ehcache.spi.test.Before; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; import java.util.ArrayList; import java.util.List; -import java.util.function.Function; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; @@ -48,10 +46,6 @@ public CachingTierClear(final CachingTierFactory factory) { super(factory); } - @Before - public void setUp() { - } - @After public void tearDown() { if (tier != null) { @@ -71,7 +65,7 @@ public void removeMapping() throws LegalSPITesterException { V newValue= factory.createValue(2); final Store.ValueHolder originalValueHolder = mock(Store.ValueHolder.class); - when(originalValueHolder.value()).thenReturn(originalValue); + when(originalValueHolder.get()).thenReturn(originalValue); try { List keys = new ArrayList<>(); @@ -85,13 +79,13 @@ public void removeMapping() throws LegalSPITesterException { tier.clear(); final Store.ValueHolder newValueHolder = mock(Store.ValueHolder.class); - when(newValueHolder.value()).thenReturn(newValue); + when(newValueHolder.get()).thenReturn(newValue); for (K key : keys) { tier.invalidate(key); Store.ValueHolder newReturnedValueHolder = tier.getOrComputeIfAbsent(key, o -> newValueHolder); - assertThat(newReturnedValueHolder.value(), is(equalTo(newValueHolder.value()))); + assertThat(newReturnedValueHolder.get(), is(equalTo(newValueHolder.get()))); } } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierFactory.java b/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierFactory.java index f3337891c6..3d81c07b9e 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierFactory.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierFactory.java @@ -39,7 +39,7 @@ public interface CachingTierFactory { Class getValueType(); - ServiceConfiguration[] getServiceConfigurations(); + ServiceConfiguration[] getServiceConfigurations(); ServiceProvider getServiceProvider(); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierGetOrComputeIfAbsent.java b/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierGetOrComputeIfAbsent.java index 2ae60634f6..912e7b4c42 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierGetOrComputeIfAbsent.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierGetOrComputeIfAbsent.java @@ -16,11 +16,10 @@ package org.ehcache.internal.tier; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.CachingTier; import org.ehcache.spi.test.After; -import org.ehcache.spi.test.Before; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; @@ -49,10 +48,6 @@ public CachingTierGetOrComputeIfAbsent(final CachingTierFactory factory) { super(factory); } - @Before - public void setUp() { - } - @After public void tearDown() { if (tier != null) { @@ -68,14 +63,14 @@ public void returnTheValueHolderNotInTheCachingTier() throws LegalSPITesterExcep V value = factory.createValue(1); final Store.ValueHolder computedValueHolder = mock(Store.ValueHolder.class); - when(computedValueHolder.value()).thenReturn(value); + when(computedValueHolder.get()).thenReturn(value); tier = factory.newCachingTier(1L); try { Store.ValueHolder valueHolder = tier.getOrComputeIfAbsent(key, k -> computedValueHolder); - assertThat(valueHolder.value(), is(equalTo(value))); + assertThat(valueHolder.get(), is(equalTo(value))); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } @@ -87,8 +82,8 @@ public void returnTheValueHolderCurrentlyInTheCachingTier() throws LegalSPITeste K key = factory.createKey(1); V value = factory.createValue(1); final Store.ValueHolder computedValueHolder = mock(Store.ValueHolder.class); - when(computedValueHolder.value()).thenReturn(value); - when(computedValueHolder.expirationTime(any(TimeUnit.class))).thenReturn(Store.ValueHolder.NO_EXPIRE); + when(computedValueHolder.get()).thenReturn(value); + when(computedValueHolder.expirationTime()).thenReturn(Store.ValueHolder.NO_EXPIRE); tier = factory.newCachingTier(); @@ -98,7 +93,7 @@ public void returnTheValueHolderCurrentlyInTheCachingTier() throws LegalSPITeste Store.ValueHolder valueHolder = tier.getOrComputeIfAbsent(key, k -> null); - assertThat(valueHolder.value(), is(equalTo(value))); + assertThat(valueHolder.get(), is(equalTo(value))); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierInvalidate.java b/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierInvalidate.java index 518178a994..1ef04603e4 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierInvalidate.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierInvalidate.java @@ -17,7 +17,7 @@ package org.ehcache.internal.tier; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.tiering.CachingTier; import org.ehcache.spi.test.After; import org.ehcache.spi.test.LegalSPITesterException; @@ -27,10 +27,9 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Function; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * CachingTierInvalidate @@ -65,7 +64,7 @@ public void invalidateKey() throws LegalSPITesterException { // register invalidation listener final AtomicBoolean invalidated = new AtomicBoolean(false); tier.setInvalidationListener((key1, valueHolder) -> { - assertThat(valueHolder.value(), is(value)); + assertThat(valueHolder.get(), is(value)); invalidated.set(true); }); @@ -133,37 +132,27 @@ public void invalidateAll() throws LegalSPITesterException { private Store.ValueHolder wrap(final V value) { return new Store.ValueHolder() { @Override - public V value() { + public V get() { return value; } @Override - public long creationTime(TimeUnit unit) { + public long creationTime() { return 0L; } @Override - public long expirationTime(TimeUnit unit) { + public long expirationTime() { return 0L; } @Override - public boolean isExpired(long expirationTime, TimeUnit unit) { + public boolean isExpired(long expirationTime) { return false; } @Override - public long lastAccessTime(TimeUnit unit) { - return 0L; - } - - @Override - public float hitRate(long now, TimeUnit unit) { - return 0L; - } - - @Override - public long hits() { + public long lastAccessTime() { return 0L; } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierRemove.java b/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierRemove.java index a9b1c4db93..a7916c4906 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierRemove.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/tier/CachingTierRemove.java @@ -16,16 +16,13 @@ package org.ehcache.internal.tier; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.CachingTier; import org.ehcache.spi.test.After; -import org.ehcache.spi.test.Before; import org.ehcache.spi.test.LegalSPITesterException; import org.ehcache.spi.test.SPITest; -import java.util.function.Function; - import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; @@ -46,10 +43,6 @@ public CachingTierRemove(final CachingTierFactory factory) { super(factory); } - @Before - public void setUp() { - } - @After public void tearDown() { if (tier != null) { @@ -67,7 +60,7 @@ public void removeMapping() throws LegalSPITesterException { V newValue = factory.createValue(2); final Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); - when(valueHolder.value()).thenReturn(originalValue); + when(valueHolder.get()).thenReturn(originalValue); tier = factory.newCachingTier(1L); @@ -77,10 +70,10 @@ public void removeMapping() throws LegalSPITesterException { tier.invalidate(key); final Store.ValueHolder newValueHolder = mock(Store.ValueHolder.class); - when(newValueHolder.value()).thenReturn(newValue); + when(newValueHolder.get()).thenReturn(newValue); Store.ValueHolder newReturnedValueHolder = tier.getOrComputeIfAbsent(key, o -> newValueHolder); - assertThat(newReturnedValueHolder.value(), is(equalTo(newValueHolder.value()))); + assertThat(newReturnedValueHolder.get(), is(equalTo(newValueHolder.get()))); } catch (StoreAccessException e) { throw new LegalSPITesterException("Warning, an exception is thrown due to the SPI test"); } diff --git a/core/.gitignore b/core/.gitignore deleted file mode 100755 index ae3c172604..0000000000 --- a/core/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/bin/ diff --git a/core/build.gradle b/core/build.gradle deleted file mode 100644 index 84ce9d2858..0000000000 --- a/core/build.gradle +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: EhDeploy - -dependencies { - compile project(':api'), "org.slf4j:slf4j-api:$parent.slf4jVersion" - compile ("org.terracotta:statistics:$parent.statisticVersion") { - exclude group:'org.slf4j', module:'slf4j-api' - } - testCompile project(':spi-tester') -} - -tasks.withType(JavaCompile) { - options.compilerArgs += ['-Werror'] -} diff --git a/core/gradle.properties b/core/gradle.properties deleted file mode 100644 index 33ccc9d13a..0000000000 --- a/core/gradle.properties +++ /dev/null @@ -1,3 +0,0 @@ -subPomName = Ehcache 3 Core module -subPomDesc = The Core module of Ehcache 3 -osgi = {"Export-Package" : ["!org.ehcache.core.internal.*"]} diff --git a/core/src/main/java/org/ehcache/core/Ehcache.java b/core/src/main/java/org/ehcache/core/Ehcache.java deleted file mode 100644 index 8e73c9cb40..0000000000 --- a/core/src/main/java/org/ehcache/core/Ehcache.java +++ /dev/null @@ -1,1051 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import java.util.AbstractMap; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.EnumMap; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import java.util.concurrent.atomic.LongAdder; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; - -import org.ehcache.Cache; -import org.ehcache.Status; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.CacheRuntimeConfiguration; -import org.ehcache.core.events.CacheEventDispatcher; -import org.ehcache.core.internal.resilience.LoggingRobustResilienceStrategy; -import org.ehcache.core.internal.resilience.RecoveryCache; -import org.ehcache.core.internal.resilience.ResilienceStrategy; -import org.ehcache.core.spi.LifeCycled; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.Store.PutStatus; -import org.ehcache.core.spi.store.Store.RemoveStatus; -import org.ehcache.core.spi.store.Store.ReplaceStatus; -import org.ehcache.core.spi.store.Store.ValueHolder; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.core.statistics.BulkOps; -import org.ehcache.core.statistics.CacheOperationOutcomes.ClearOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.ConditionalRemoveOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.GetAllOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.PutAllOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.PutIfAbsentOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.PutOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.RemoveAllOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.RemoveOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.ReplaceOutcome; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expiry; -import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.slf4j.Logger; -import org.terracotta.statistics.StatisticsManager; -import org.terracotta.statistics.observer.OperationObserver; - -import static org.ehcache.core.exceptions.ExceptionFactory.newCacheLoadingException; -import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; -import static org.terracotta.statistics.StatisticBuilder.operation; - -/** - * Implementation of the {@link Cache} interface when no {@link CacheLoaderWriter} is involved. - *

- * {@code Ehcache} users should not have to depend on this type but rely exclusively on the api types in package - * {@code org.ehcache}. - * - * @see EhcacheWithLoaderWriter - */ -public class Ehcache implements InternalCache { - - private final StatusTransitioner statusTransitioner; - - private final Store store; - private final ResilienceStrategy resilienceStrategy; - private final EhcacheRuntimeConfiguration runtimeConfiguration; - private final Jsr107CacheImpl jsr107Cache; - protected final Logger logger; - - private final OperationObserver getObserver = operation(GetOutcome.class).named("get").of(this).tag("cache").build(); - private final OperationObserver getAllObserver = operation(GetAllOutcome.class).named("getAll").of(this).tag("cache").build(); - private final OperationObserver putObserver = operation(PutOutcome.class).named("put").of(this).tag("cache").build(); - private final OperationObserver putAllObserver = operation(PutAllOutcome.class).named("putAll").of(this).tag("cache").build(); - private final OperationObserver removeObserver = operation(RemoveOutcome.class).named("remove").of(this).tag("cache").build(); - private final OperationObserver removeAllObserver = operation(RemoveAllOutcome.class).named("removeAll").of(this).tag("cache").build(); - private final OperationObserver conditionalRemoveObserver = operation(ConditionalRemoveOutcome.class).named("conditionalRemove").of(this).tag("cache").build(); - private final OperationObserver putIfAbsentObserver = operation(PutIfAbsentOutcome.class).named("putIfAbsent").of(this).tag("cache").build(); - private final OperationObserver replaceObserver = operation(ReplaceOutcome.class).named("replace").of(this).tag("cache").build(); - private final Map bulkMethodEntries = new EnumMap<>(BulkOps.class); - private final OperationObserver clearObserver = operation(ClearOutcome.class).named("clear").of(this).tag("cache").build(); - - /** - * Creates a new {@code Ehcache} based on the provided parameters. - * - * @param configuration the cache configuration - * @param store the store to use - * @param eventDispatcher the event dispatcher - * @param logger the logger - */ - public Ehcache(CacheConfiguration configuration, final Store store, CacheEventDispatcher eventDispatcher, Logger logger) { - this(new EhcacheRuntimeConfiguration<>(configuration), store, eventDispatcher, logger, new StatusTransitioner(logger)); - } - - Ehcache(EhcacheRuntimeConfiguration runtimeConfiguration, Store store, - CacheEventDispatcher eventDispatcher, Logger logger, StatusTransitioner statusTransitioner) { - this.store = store; - runtimeConfiguration.addCacheConfigurationListener(store.getConfigurationChangeListeners()); - StatisticsManager.associate(store).withParent(this); - - if (store instanceof RecoveryCache) { - this.resilienceStrategy = new LoggingRobustResilienceStrategy<>(castToRecoveryCache(store)); - } else { - this.resilienceStrategy = new LoggingRobustResilienceStrategy<>(recoveryCache(store)); - } - - this.runtimeConfiguration = runtimeConfiguration; - runtimeConfiguration.addCacheConfigurationListener(eventDispatcher.getConfigurationChangeListeners()); - this.jsr107Cache = new Jsr107CacheImpl(); - - this.logger=logger; - this.statusTransitioner = statusTransitioner; - for (BulkOps bulkOp : BulkOps.values()) { - bulkMethodEntries.put(bulkOp, new LongAdder()); - } - } - - /** - * {@inheritDoc} - */ - @Override - public Map getBulkMethodEntries() { - return bulkMethodEntries; - } - - @SuppressWarnings("unchecked") - private RecoveryCache castToRecoveryCache(Store store) { - return (RecoveryCache) store; - } - - private V getNoLoader(K key) { - return get(key); - } - - /** - * {@inheritDoc} - */ - @Override - public V get(final K key) { - getObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key); - - try { - final Store.ValueHolder valueHolder = store.get(key); - - // Check for expiry first - if (valueHolder == null) { - getObserver.end(GetOutcome.MISS); - return null; - } else { - getObserver.end(GetOutcome.HIT); - return valueHolder.value(); - } - } catch (StoreAccessException e) { - try { - return resilienceStrategy.getFailure(key, e); - } finally { - getObserver.end(GetOutcome.FAILURE); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public void put(final K key, final V value) { - putObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key, value); - - try { - PutStatus status = store.put(key, value); - switch (status) { - case PUT: - putObserver.end(PutOutcome.PUT); - break; - case NOOP: - putObserver.end(PutOutcome.NOOP); - break; - default: - throw new AssertionError("Invalid Status."); - } - } catch (StoreAccessException e) { - try { - resilienceStrategy.putFailure(key, value, e); - } finally { - putObserver.end(PutOutcome.FAILURE); - } - } - } - - private boolean newValueAlreadyExpired(K key, V oldValue, V newValue) { - return newValueAlreadyExpired(logger, runtimeConfiguration.getExpiry(), key, oldValue, newValue); - } - - /** - * {@inheritDoc} - */ - @Override - public boolean containsKey(final K key) { - statusTransitioner.checkAvailable(); - checkNonNull(key); - try { - return store.containsKey(key); - } catch (StoreAccessException e) { - return resilienceStrategy.containsKeyFailure(key, e); - } - } - - /** - * {@inheritDoc} - */ - @Override - public void remove(K key) { - removeInternal(key); // ignore return value; - } - - - private boolean removeInternal(final K key) { - removeObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key); - - boolean removed = false; - try { - removed = store.remove(key); - if (removed) { - removeObserver.end(RemoveOutcome.SUCCESS); - } else { - removeObserver.end(RemoveOutcome.NOOP); - } - } catch (StoreAccessException e) { - try { - resilienceStrategy.removeFailure(key, e); - } finally { - removeObserver.end(RemoveOutcome.FAILURE); - } - } - - return removed; - } - - /** - * {@inheritDoc} - */ - @Override - public void clear() { - this.clearObserver.begin(); - statusTransitioner.checkAvailable(); - try { - store.clear(); - this.clearObserver.end(ClearOutcome.SUCCESS); - } catch (StoreAccessException e) { - this.clearObserver.end(ClearOutcome.FAILURE); - resilienceStrategy.clearFailure(e); - } - } - - /** - * {@inheritDoc} - */ - @Override - public Iterator> iterator() { - statusTransitioner.checkAvailable(); - return new CacheEntryIterator(false); - } - - /** - * {@inheritDoc} - */ - @Override - public Map getAll(Set keys) throws BulkCacheLoadingException { - return getAllInternal(keys, true); - } - - private Map getAllInternal(Set keys, boolean includeNulls) throws BulkCacheLoadingException { - getAllObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNullContent(keys); - if(keys.isEmpty()) { - getAllObserver.end(GetAllOutcome.SUCCESS); - return Collections.emptyMap(); - } - - Map result = new HashMap<>(); - try { - Map> computedMap = store.bulkComputeIfAbsent(keys, new GetAllFunction<>()); - - int hits = 0; - int keyCount = 0; - for (Map.Entry> entry : computedMap.entrySet()) { - keyCount++; - if (entry.getValue() != null) { - result.put(entry.getKey(), entry.getValue().value()); - hits++; - } else if (includeNulls) { - result.put(entry.getKey(), null); - } - } - - addBulkMethodEntriesCount(BulkOps.GET_ALL_HITS, hits); - addBulkMethodEntriesCount(BulkOps.GET_ALL_MISS, keyCount - hits); - getAllObserver.end(GetAllOutcome.SUCCESS); - return result; - } catch (StoreAccessException e) { - try { - return resilienceStrategy.getAllFailure(keys, e); - } finally { - getAllObserver.end(GetAllOutcome.FAILURE); - } - } - } - - LinkedHashSet> nullValuesForKeys(final Iterable keys) { - final LinkedHashSet> entries = new LinkedHashSet<>(); - for (K key : keys) { - entries.add(new AbstractMap.SimpleEntry<>(key, null)); - } - return entries; - } - - /** - * {@inheritDoc} - */ - @Override - public void putAll(final Map entries) throws BulkCacheWritingException { - putAllObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(entries); - if(entries.isEmpty()) { - putAllObserver.end(PutAllOutcome.SUCCESS); - return; - } - - // Copy all entries to write into a Map - final Map entriesToRemap = new HashMap<>(); - for (Map.Entry entry: entries.entrySet()) { - // If a key/value is null, throw NPE, nothing gets mutated - if (entry.getKey() == null || entry.getValue() == null) { - throw new NullPointerException(); - } - entriesToRemap.put(entry.getKey(), entry.getValue()); - } - - try { - PutAllFunction putAllFunction = new PutAllFunction<>(logger, entriesToRemap, runtimeConfiguration.getExpiry()); - store.bulkCompute(entries.keySet(), putAllFunction); - addBulkMethodEntriesCount(BulkOps.PUT_ALL, putAllFunction.getActualPutCount().get()); - addBulkMethodEntriesCount(BulkOps.UPDATE_ALL, putAllFunction.getActualUpdateCount().get()); - putAllObserver.end(PutAllOutcome.SUCCESS); - } catch (StoreAccessException e) { - try { - resilienceStrategy.putAllFailure(entries, e); - } finally { - putAllObserver.end(PutAllOutcome.FAILURE); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public void removeAll(final Set keys) throws BulkCacheWritingException { - removeAllObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(keys); - if(keys.isEmpty()) { - removeAllObserver.end(RemoveAllOutcome.SUCCESS); - return; - } - - for (K key: keys) { - if (key == null) { - throw new NullPointerException(); - } - } - - - try { - RemoveAllFunction removeAllFunction = new RemoveAllFunction<>(); - store.bulkCompute(keys, removeAllFunction); - addBulkMethodEntriesCount(BulkOps.REMOVE_ALL, removeAllFunction.getActualRemoveCount().get()); - removeAllObserver.end(RemoveAllOutcome.SUCCESS); - } catch (StoreAccessException e) { - try { - resilienceStrategy.removeAllFailure(keys, e); - } finally { - removeAllObserver.end(RemoveAllOutcome.FAILURE); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public V putIfAbsent(final K key, final V value) { - putIfAbsentObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key, value); - - boolean absent = false; - try { - ValueHolder inCache = store.putIfAbsent(key, value); - absent = (inCache == null); - if (absent) { - putIfAbsentObserver.end(PutIfAbsentOutcome.PUT); - return null; - } else { - putIfAbsentObserver.end(PutIfAbsentOutcome.HIT); - return inCache.value(); - } - } catch (StoreAccessException e) { - try { - return resilienceStrategy.putIfAbsentFailure(key, value, null, e, absent); - } finally { - putIfAbsentObserver.end(PutIfAbsentOutcome.FAILURE); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public boolean remove(final K key, final V value) { - conditionalRemoveObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key, value); - RemoveStatus status = null; - boolean removed = false; - - try { - status = store.remove(key, value); - switch (status) { - case REMOVED: - removed = true; - conditionalRemoveObserver.end(ConditionalRemoveOutcome.SUCCESS); - break; - case KEY_MISSING: - conditionalRemoveObserver.end(ConditionalRemoveOutcome.FAILURE_KEY_MISSING); - break; - case KEY_PRESENT: - conditionalRemoveObserver.end(ConditionalRemoveOutcome.FAILURE_KEY_PRESENT); - break; - default: - throw new AssertionError("Invalid Status."); - } - } catch (StoreAccessException e) { - try { - return resilienceStrategy.removeFailure(key, value, e, removed); - } finally { - conditionalRemoveObserver.end(ConditionalRemoveOutcome.FAILURE); - } - } - return removed; - } - - /** - * {@inheritDoc} - */ - @Override - public V replace(final K key, final V value) { - replaceObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key, value); - - try { - ValueHolder old = store.replace(key, value); - if (old != null) { - replaceObserver.end(ReplaceOutcome.HIT); - } else { - replaceObserver.end(ReplaceOutcome.MISS_NOT_PRESENT); - } - return old == null ? null : old.value(); - } catch (StoreAccessException e) { - try { - return resilienceStrategy.replaceFailure(key, value, e); - } finally { - replaceObserver.end(ReplaceOutcome.FAILURE); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public boolean replace(final K key, final V oldValue, final V newValue) { - replaceObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key, oldValue, newValue); - - ReplaceStatus status = null; - boolean success = false; - - try { - status = store.replace(key, oldValue, newValue); - switch (status) { - case HIT: - success = true; - replaceObserver.end(ReplaceOutcome.HIT); - break; - case MISS_PRESENT: - replaceObserver.end(ReplaceOutcome.MISS_PRESENT); - break; - case MISS_NOT_PRESENT: - replaceObserver.end(ReplaceOutcome.MISS_NOT_PRESENT); - break; - default: - throw new AssertionError("Invalid Status."); - } - - return success; - } catch (StoreAccessException e) { - try { - return resilienceStrategy.replaceFailure(key, oldValue, newValue, e, success); - } finally { - replaceObserver.end(ReplaceOutcome.FAILURE); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public CacheRuntimeConfiguration getRuntimeConfiguration() { - return runtimeConfiguration; - } - - /** - * {@inheritDoc} - */ - @Override - public void init() { - statusTransitioner.init().succeeded(); - } - - /** - * {@inheritDoc} - */ - @Override - public void close() { - statusTransitioner.close().succeeded(); - } - - /** - * {@inheritDoc} - */ - @Override - public Status getStatus() { - return statusTransitioner.currentStatus(); - } - - /** - * {@inheritDoc} - */ - @Override - public void addHook(LifeCycled hook) { - statusTransitioner.addHook(hook); - } - - void removeHook(LifeCycled hook) { - statusTransitioner.removeHook(hook); - } - - private static void checkNonNull(Object thing) { - if(thing == null) { - throw new NullPointerException(); - } - } - - private static void checkNonNull(Object... things) { - for (Object thing : things) { - checkNonNull(thing); - } - } - - private void checkNonNullContent(Collection collectionOfThings) { - checkNonNull(collectionOfThings); - for (Object thing : collectionOfThings) { - checkNonNull(thing); - } - } - - private void addBulkMethodEntriesCount(BulkOps op, long count) { - bulkMethodEntries.get(op).add(count); - } - - /** - * {@inheritDoc} - */ - @Override - public Jsr107Cache getJsr107Cache() { - return jsr107Cache; - } - - /** - * {@inheritDoc} - */ - @Override - public CacheLoaderWriter getCacheLoaderWriter() { - return null; - } - - private final class Jsr107CacheImpl implements Jsr107Cache { - @Override - public void loadAll(Set keys, boolean replaceExistingValues, Function, Map> loadFunction) { - if(keys.isEmpty()) { - return ; - } - if (replaceExistingValues) { - loadAllReplace(keys, loadFunction); - } else { - loadAllAbsent(keys, loadFunction); - } - } - - @Override - public Iterator> specIterator() { - return new SpecIterator<>(this, store); - } - - @Override - public V getNoLoader(K key) { - return Ehcache.this.getNoLoader(key); - } - - @Override - public Map getAll(Set keys) { - return Ehcache.this.getAllInternal(keys, false); - } - - private void loadAllAbsent(Set keys, final Function, Map> loadFunction) { - try { - store.bulkComputeIfAbsent(keys, absentKeys -> cacheLoaderWriterLoadAllForKeys(absentKeys, loadFunction).entrySet()); - } catch (StoreAccessException e) { - throw newCacheLoadingException(e); - } - } - - Map cacheLoaderWriterLoadAllForKeys(Iterable keys, Function, Map> loadFunction) { - try { - Map loaded = loadFunction.apply(keys); - - // put into a new map since we can't assume the 107 cache loader returns things ordered, or necessarily with all the desired keys - Map rv = new LinkedHashMap<>(); - for (K key : keys) { - rv.put(key, loaded.get(key)); - } - return rv; - } catch (Exception e) { - throw newCacheLoadingException(e); - } - } - - private void loadAllReplace(Set keys, final Function, Map> loadFunction) { - try { - store.bulkCompute(keys, entries -> { - Collection keys1 = new ArrayList<>(); - for (Map.Entry entry : entries) { - keys1.add(entry.getKey()); - } - return cacheLoaderWriterLoadAllForKeys(keys1, loadFunction).entrySet(); - }); - } catch (StoreAccessException e) { - throw newCacheLoadingException(e); - } - } - - @Override - public void compute(K key, final BiFunction computeFunction, - final Supplier replaceEqual, final Supplier invokeWriter, final Supplier withStatsAndEvents) { - putObserver.begin(); - removeObserver.begin(); - getObserver.begin(); - - try { - BiFunction fn = (mappedKey, mappedValue) -> { - if (mappedValue == null) { - getObserver.end(GetOutcome.MISS); - } else { - getObserver.end(GetOutcome.HIT); - } - - V newValue = computeFunction.apply(mappedKey, mappedValue); - - if (newValue == mappedValue) { - if (! replaceEqual.get()) { - return mappedValue; - } - } - - if (newValueAlreadyExpired(mappedKey, mappedValue, newValue)) { - return null; - } - - if (withStatsAndEvents.get()) { - if (newValue == null) { - removeObserver.end(RemoveOutcome.SUCCESS); - } else { - putObserver.end(PutOutcome.PUT); - } - } - - return newValue; - }; - - store.compute(key, fn, replaceEqual); - } catch (StoreAccessException e) { - throw new RuntimeException(e); - } - } - - @Override - public V getAndRemove(K key) { - getObserver.begin(); - removeObserver.begin(); - - final AtomicReference existingValue = new AtomicReference<>(); - try { - store.compute(key, (mappedKey, mappedValue) -> { - existingValue.set(mappedValue); - - return null; - }); - } catch (StoreAccessException e) { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); - removeObserver.end(RemoveOutcome.FAILURE); - throw new RuntimeException(e); - } - - V returnValue = existingValue.get(); - if (returnValue != null) { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); - removeObserver.end(RemoveOutcome.SUCCESS); - } else { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS); - } - return returnValue; - } - - @Override - public V getAndPut(K key, final V value) { - getObserver.begin(); - putObserver.begin(); - - final AtomicReference existingValue = new AtomicReference<>(); - try { - store.compute(key, (mappedKey, mappedValue) -> { - existingValue.set(mappedValue); - - if (newValueAlreadyExpired(mappedKey, mappedValue, value)) { - return null; - } - - return value; - }); - } catch (StoreAccessException e) { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); - putObserver.end(PutOutcome.FAILURE); - throw new RuntimeException(e); - } - - V returnValue = existingValue.get(); - if (returnValue != null) { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); - } else { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS); - } - putObserver.end(PutOutcome.PUT); - return returnValue; - } - - @Override - public boolean remove(K key) { - return removeInternal(key); - } - - @Override - public void removeAll() { - Store.Iterator>> iterator = store.iterator(); - while (iterator.hasNext()) { - try { - Entry> next = iterator.next(); - remove(next.getKey()); - } catch (StoreAccessException cae) { - // skip - } - } - } - } - - private class CacheEntryIterator implements Iterator> { - - private final Store.Iterator>> iterator; - private final boolean quiet; - private Cache.Entry> current; - private Cache.Entry> next; - private StoreAccessException nextException; - - public CacheEntryIterator(boolean quiet) { - this.quiet = quiet; - this.iterator = store.iterator(); - advance(); - } - - private void advance() { - try { - while (iterator.hasNext()) { - next = iterator.next(); - if (getNoLoader(next.getKey()) != null) { - return; - } - } - next = null; - } catch (RuntimeException re) { - nextException = new StoreAccessException(re); - next = null; - } catch (StoreAccessException cae) { - nextException = cae; - next = null; - } - } - - @Override - public boolean hasNext() { - statusTransitioner.checkAvailable(); - return nextException != null || next != null; - } - - @Override - public Entry next() { - if (!hasNext()) { - throw new NoSuchElementException(); - } - - if (!quiet) getObserver.begin(); - if (nextException == null) { - if (!quiet) getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); - current = next; - advance(); - return new ValueHolderBasedEntry<>(current); - } else { - if (!quiet) getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); - StoreAccessException cae = nextException; - nextException = null; - return resilienceStrategy.iteratorFailure(cae); - } - } - - @Override - public void remove() { - statusTransitioner.checkAvailable(); - if (current == null) { - throw new IllegalStateException("No current element"); - } - Ehcache.this.remove(current.getKey(), current.getValue().value()); - current = null; - } - } - - private static RecoveryCache recoveryCache(final Store store) { - return new RecoveryCache() { - - @Override - public void obliterate() throws StoreAccessException { - store.clear(); - } - - @Override - public void obliterate(K key) throws StoreAccessException { - store.remove(key); - } - - @Override - public void obliterate(Iterable keys) throws StoreAccessException { - for (K key : keys) { - obliterate(key); - } - } - }; - } - - private static boolean newValueAlreadyExpired(Logger logger, Expiry expiry, K key, V oldValue, V newValue) { - if (newValue == null) { - return false; - } - - Duration duration; - try { - if (oldValue == null) { - duration = expiry.getExpiryForCreation(key, newValue); - } else { - duration = expiry.getExpiryForUpdate(key, supplierOf(oldValue), newValue); - } - } catch (RuntimeException re) { - logger.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); - return true; - } - - return Duration.ZERO.equals(duration); - } - - private static class ValueHolderBasedEntry implements Cache.Entry { - private final Cache.Entry> storeEntry; - - ValueHolderBasedEntry(Cache.Entry> storeEntry) { - this.storeEntry = storeEntry; - } - - @Override - public K getKey() { - return storeEntry.getKey(); - } - - @Override - public V getValue() { - return storeEntry.getValue().value(); - } - - } - - // The compute function that will return the keys to their NEW values, taking the keys to their old values as input; - // but this could happen in batches, i.e. not necessary containing all of the entries of the Iterable passed to this method - public static class PutAllFunction implements Function>, Iterable>> { - - private final Logger logger; - private final Map entriesToRemap; - private final Expiry expiry; - private final AtomicInteger actualPutCount = new AtomicInteger(); - private final AtomicInteger actualUpdateCount = new AtomicInteger(); - - public PutAllFunction(Logger logger, Map entriesToRemap, Expiry expiry) { - this.logger = logger; - this.entriesToRemap = entriesToRemap; - this.expiry = expiry; - } - - @Override - public Iterable> apply(final Iterable> entries) { - Map mutations = new LinkedHashMap<>(); - - // then record we handled these mappings - for (Map.Entry entry: entries) { - K key = entry.getKey(); - V existingValue = entry.getValue(); - V newValue = entriesToRemap.remove(key); - - if (newValueAlreadyExpired(key, existingValue, newValue)) { - mutations.put(key, null); - } else { - actualPutCount.incrementAndGet(); - if(existingValue != null) { - actualUpdateCount.incrementAndGet(); - } - mutations.put(key, newValue); - } - } - - // Finally return the values to be installed in the Cache's Store - return mutations.entrySet(); - } - - public Map getEntriesToRemap() { - return entriesToRemap; - } - - private boolean newValueAlreadyExpired(K key, V oldValue, V newValue) { - return Ehcache.newValueAlreadyExpired(logger, expiry, key, oldValue, newValue); - } - - public AtomicInteger getActualPutCount() { - return actualPutCount; - } - - public AtomicInteger getActualUpdateCount() { - return actualUpdateCount; - } - } - - public static class RemoveAllFunction implements Function>, Iterable>> { - - private final AtomicInteger actualRemoveCount = new AtomicInteger(); - - @Override - public Iterable> apply(final Iterable> entries) { - Map results = new LinkedHashMap<>(); - - for (Map.Entry entry : entries) { - K key = entry.getKey(); - V existingValue = entry.getValue(); - - if (existingValue != null) { - actualRemoveCount.incrementAndGet(); - } - results.put(key, null); - } - - return results.entrySet(); - } - - public AtomicInteger getActualRemoveCount() { - return actualRemoveCount; - } - } - - public static class GetAllFunction implements Function, Iterable>> { - - @Override - public Iterable> apply(final Iterable keys) { - Map computeResult = new LinkedHashMap<>(); - - // put all the entries to get ordering correct - for (K key : keys) { - computeResult.put(key, null); - } - - return computeResult.entrySet(); - } - } - -} diff --git a/core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java b/core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java deleted file mode 100644 index a2af56fbdd..0000000000 --- a/core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.CacheRuntimeConfiguration; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePools; -import org.ehcache.core.internal.events.EventListenerWrapper; -import org.ehcache.event.CacheEventListener; -import org.ehcache.event.EventFiring; -import org.ehcache.event.EventOrdering; -import org.ehcache.event.EventType; -import org.ehcache.expiry.Expiry; -import org.ehcache.spi.service.ServiceConfiguration; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.EnumSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.CopyOnWriteArrayList; - -class EhcacheRuntimeConfiguration implements CacheRuntimeConfiguration, InternalRuntimeConfiguration, HumanReadable { - - private final Collection> serviceConfigurations; - private final CacheConfiguration config; - private final Class keyType; - private final Class valueType; - private final EvictionAdvisor evictionAdvisor; - private final ClassLoader classLoader; - private final Expiry expiry; - private volatile ResourcePools resourcePools; - - private final List cacheConfigurationListenerList - = new CopyOnWriteArrayList<>(); - - EhcacheRuntimeConfiguration(CacheConfiguration config) { - this.config = config; - this.serviceConfigurations = copy(config.getServiceConfigurations()); - this.keyType = config.getKeyType(); - this.valueType = config.getValueType(); - this.evictionAdvisor = config.getEvictionAdvisor(); - this.classLoader = config.getClassLoader(); - this.expiry = config.getExpiry(); - this.resourcePools = config.getResourcePools(); - } - - @Override - public synchronized void updateResourcePools(ResourcePools pools) { - - if(pools == null) { - throw new NullPointerException("Pools to be updated cannot be null"); - } - - ResourcePools updatedResourcePools = config.getResourcePools().validateAndMerge(pools); - fireCacheConfigurationChange(CacheConfigurationProperty.UPDATE_SIZE, config.getResourcePools(), updatedResourcePools); - this.resourcePools = updatedResourcePools; - } - - @Override - public Collection> getServiceConfigurations() { - return this.serviceConfigurations; - } - - @Override - public Class getKeyType() { - return this.keyType; - } - - @Override - public Class getValueType() { - return this.valueType; - } - - @Override - public EvictionAdvisor getEvictionAdvisor() { - return this.evictionAdvisor; - } - - @Override - public ClassLoader getClassLoader() { - return this.classLoader; - } - - @Override - public Expiry getExpiry() { - return expiry; - } - - @Override - public ResourcePools getResourcePools() { - return this.resourcePools; - } - - @Override - public boolean addCacheConfigurationListener(List listeners) { - return this.cacheConfigurationListenerList.addAll(listeners); - } - - @Override - public boolean removeCacheConfigurationListener(CacheConfigurationChangeListener listener) { - return this.cacheConfigurationListenerList.remove(listener); - } - - @Override - public synchronized void deregisterCacheEventListener(CacheEventListener listener) { - fireCacheConfigurationChange(CacheConfigurationProperty.REMOVE_LISTENER, listener, listener); - } - - @Override - public synchronized void registerCacheEventListener(CacheEventListener listener, EventOrdering ordering, - EventFiring firing, Set forEventTypes) { - EventListenerWrapper listenerWrapper = new EventListenerWrapper<>(listener, firing, ordering, EnumSet.copyOf(forEventTypes)); - fireCacheConfigurationChange(CacheConfigurationProperty.ADD_LISTENER, listenerWrapper, listenerWrapper); - } - - @Override - public void registerCacheEventListener(CacheEventListener listener, EventOrdering ordering, EventFiring firing, EventType eventType, EventType... eventTypes) { - EventListenerWrapper listenerWrapper = new EventListenerWrapper<>(listener, firing, ordering, EnumSet.of(eventType, eventTypes)); - fireCacheConfigurationChange(CacheConfigurationProperty.ADD_LISTENER, listenerWrapper, listenerWrapper); - } - - private Collection copy(Collection collection) { - if (collection == null) { - return null; - } - - return Collections.unmodifiableCollection(new ArrayList<>(collection)); - } - - @SuppressWarnings("unchecked") - private void fireCacheConfigurationChange(CacheConfigurationProperty prop, final T oldValue, final T newValue) { - if ((oldValue != null && !oldValue.equals(newValue)) || newValue != null) { - for (CacheConfigurationChangeListener cacheConfigurationListener : cacheConfigurationListenerList) { - cacheConfigurationListener.cacheConfigurationChange(new CacheConfigurationChangeEvent(prop, oldValue, newValue)); - } - } - } - - @Override - public String readableString() { - StringBuilder serviceConfigurationsToStringBuilder = new StringBuilder(); - for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { - serviceConfigurationsToStringBuilder - .append("\n ") - .append("- "); - if(serviceConfiguration instanceof HumanReadable) { - serviceConfigurationsToStringBuilder - .append(((HumanReadable)serviceConfiguration).readableString()) - .append("\n"); - } else { - serviceConfigurationsToStringBuilder - .append(serviceConfiguration.getClass().getName()) - .append("\n"); - } - } - - if(serviceConfigurationsToStringBuilder.length() > 0) { - serviceConfigurationsToStringBuilder.deleteCharAt(serviceConfigurationsToStringBuilder.length() -1); - } else { - serviceConfigurationsToStringBuilder.append(" None"); - } - - return - "keyType: " + keyType.getName() + "\n" + - "valueType: " + valueType.getName() + "\n" + - "serviceConfigurations:" + serviceConfigurationsToStringBuilder.toString().replace("\n", "\n ") + "\n" + - "evictionAdvisor: " + ((evictionAdvisor != null) ? evictionAdvisor.getClass().getName() : "None") + "\n" + - "expiry: " + ((expiry != null) ? expiry.getClass().getSimpleName() : "") + "\n" + - "resourcePools: " + "\n " + ((resourcePools instanceof HumanReadable) ? ((HumanReadable)resourcePools).readableString() : "").replace("\n", "\n "); - } -} diff --git a/core/src/main/java/org/ehcache/core/EhcacheWithLoaderWriter.java b/core/src/main/java/org/ehcache/core/EhcacheWithLoaderWriter.java deleted file mode 100644 index 0f6b94611f..0000000000 --- a/core/src/main/java/org/ehcache/core/EhcacheWithLoaderWriter.java +++ /dev/null @@ -1,1427 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.Cache; -import org.ehcache.Status; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.CacheRuntimeConfiguration; -import org.ehcache.core.events.CacheEventDispatcher; -import org.ehcache.core.exceptions.StorePassThroughException; -import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheLoadingException; -import org.ehcache.spi.loaderwriter.CacheWritingException; -import org.ehcache.expiry.Duration; -import org.ehcache.core.internal.resilience.LoggingRobustResilienceStrategy; -import org.ehcache.core.internal.resilience.RecoveryCache; -import org.ehcache.core.internal.resilience.ResilienceStrategy; -import org.ehcache.core.spi.LifeCycled; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.Store.ValueHolder; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.ehcache.core.statistics.BulkOps; -import org.ehcache.core.statistics.CacheOperationOutcomes.CacheLoadingOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.ConditionalRemoveOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.GetAllOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.PutIfAbsentOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.PutAllOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.PutOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.RemoveAllOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.RemoveOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.ReplaceOutcome; -import org.slf4j.Logger; -import org.terracotta.statistics.StatisticsManager; -import org.terracotta.statistics.observer.OperationObserver; - -import java.util.AbstractMap; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.EnumMap; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import java.util.concurrent.atomic.LongAdder; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; - -import static org.ehcache.core.internal.util.Functions.memoize; -import static org.ehcache.core.exceptions.ExceptionFactory.newCacheLoadingException; -import static org.ehcache.core.exceptions.ExceptionFactory.newCacheWritingException; -import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; -import static org.terracotta.statistics.StatisticBuilder.operation; - -/** - * Implementation of the {@link Cache} interface when a {@link CacheLoaderWriter} is involved. - *

- * {@code Ehcache} users should not have to depend on this type but rely exclusively on the api types in package - * {@code org.ehcache}. - * - * @see Ehcache - */ -public class EhcacheWithLoaderWriter implements InternalCache { - - private final StatusTransitioner statusTransitioner; - - private final Store store; - private final CacheLoaderWriter cacheLoaderWriter; - private final ResilienceStrategy resilienceStrategy; - private final EhcacheRuntimeConfiguration runtimeConfiguration; - private final Jsr107CacheImpl jsr107Cache; - private final boolean useLoaderInAtomics; - protected final Logger logger; - - private final OperationObserver getObserver = operation(GetOutcome.class).named("get").of(this).tag("cache").build(); - private final OperationObserver getAllObserver = operation(GetAllOutcome.class).named("getAll").of(this).tag("cache").build(); - private final OperationObserver putObserver = operation(PutOutcome.class).named("put").of(this).tag("cache").build(); - private final OperationObserver putAllObserver = operation(PutAllOutcome.class).named("putAll").of(this).tag("cache").build(); - private final OperationObserver removeObserver = operation(RemoveOutcome.class).named("remove").of(this).tag("cache").build(); - private final OperationObserver removeAllObserver = operation(RemoveAllOutcome.class).named("removeAll").of(this).tag("cache").build(); - private final OperationObserver conditionalRemoveObserver = operation(ConditionalRemoveOutcome.class).named("conditionalRemove").of(this).tag("cache").build(); - private final OperationObserver cacheLoadingObserver = operation(CacheLoadingOutcome.class).named("cacheLoading").of(this).tag("cache").build(); - private final OperationObserver putIfAbsentObserver = operation(PutIfAbsentOutcome.class).named("putIfAbsent").of(this).tag("cache").build(); - private final OperationObserver replaceObserver = operation(ReplaceOutcome.class).named("replace").of(this).tag("cache").build(); - private final Map bulkMethodEntries = new EnumMap<>(BulkOps.class); - - private static final Supplier REPLACE_FALSE = () -> Boolean.FALSE; - - /** - * Constructs a new {@code EhcacheWithLoaderWriter} based on the provided parameters. - * - * @param configuration the cache configuration - * @param store the underlying store - * @param cacheLoaderWriter the cache loader writer - * @param eventDispatcher the event dispatcher - * @param logger the logger - */ - public EhcacheWithLoaderWriter(CacheConfiguration configuration, Store store, - final CacheLoaderWriter cacheLoaderWriter, - CacheEventDispatcher eventDispatcher, - Logger logger) { - this(configuration, store, cacheLoaderWriter, eventDispatcher, true, logger); - } - - EhcacheWithLoaderWriter(CacheConfiguration runtimeConfiguration, Store store, - CacheLoaderWriter cacheLoaderWriter, - CacheEventDispatcher eventDispatcher, boolean useLoaderInAtomics, Logger logger) { - this(new EhcacheRuntimeConfiguration<>(runtimeConfiguration), store, cacheLoaderWriter, eventDispatcher, useLoaderInAtomics, logger, new StatusTransitioner(logger)); - } - - EhcacheWithLoaderWriter(EhcacheRuntimeConfiguration runtimeConfiguration, Store store, - CacheLoaderWriter cacheLoaderWriter, - CacheEventDispatcher eventDispatcher, boolean useLoaderInAtomics, Logger logger, StatusTransitioner statusTransitioner) { - this.store = store; - runtimeConfiguration.addCacheConfigurationListener(store.getConfigurationChangeListeners()); - StatisticsManager.associate(store).withParent(this); - if (cacheLoaderWriter == null) { - throw new NullPointerException("CacheLoaderWriter cannot be null."); - } - this.cacheLoaderWriter = cacheLoaderWriter; - if (store instanceof RecoveryCache) { - this.resilienceStrategy = new LoggingRobustResilienceStrategy<>(castToRecoveryCache(store)); - } else { - this.resilienceStrategy = new LoggingRobustResilienceStrategy<>(recoveryCache(store)); - } - - this.runtimeConfiguration = runtimeConfiguration; - runtimeConfiguration.addCacheConfigurationListener(eventDispatcher.getConfigurationChangeListeners()); - this.jsr107Cache = new Jsr107CacheImpl(); - - this.useLoaderInAtomics = useLoaderInAtomics; - this.logger=logger; - this.statusTransitioner = statusTransitioner; - for (BulkOps bulkOp : BulkOps.values()) { - bulkMethodEntries.put(bulkOp, new LongAdder()); - } - } - - /** - * {@inheritDoc} - */ - @Override - public Map getBulkMethodEntries() { - return bulkMethodEntries; - } - - @SuppressWarnings("unchecked") - private RecoveryCache castToRecoveryCache(Store store) { - return (RecoveryCache) store; - } - - private V getNoLoader(K key) { - getObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key); - - try { - final Store.ValueHolder valueHolder = store.get(key); - - // Check for expiry first - if (valueHolder == null) { - getObserver.end(GetOutcome.MISS); - return null; - } else { - getObserver.end(GetOutcome.HIT); - return valueHolder.value(); - } - } catch (StoreAccessException e) { - try { - return resilienceStrategy.getFailure(key, e); - } finally { - getObserver.end(GetOutcome.FAILURE); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public V get(final K key) throws CacheLoadingException { - getObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key); - final Function mappingFunction = memoize(k -> { - V loaded = null; - try { - cacheLoadingObserver.begin(); - loaded = cacheLoaderWriter.load(k); - cacheLoadingObserver.end(CacheLoadingOutcome.SUCCESS); - } catch (Exception e) { - cacheLoadingObserver.end(CacheLoadingOutcome.FAILURE); - throw new StorePassThroughException(newCacheLoadingException(e)); - } - - return loaded; - }); - - try { - final Store.ValueHolder valueHolder = store.computeIfAbsent(key, mappingFunction); - - // Check for expiry first - if (valueHolder == null) { - getObserver.end(GetOutcome.MISS); - return null; - } else { - getObserver.end(GetOutcome.HIT); - return valueHolder.value(); - } - } catch (StoreAccessException e) { - try { - V fromLoader; - try { - fromLoader = mappingFunction.apply(key); - } catch (StorePassThroughException cpte) { - return resilienceStrategy.getFailure(key, e, (CacheLoadingException) cpte.getCause()); - } - return resilienceStrategy.getFailure(key, fromLoader, e); - } finally { - getObserver.end(GetOutcome.FAILURE); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public void put(final K key, final V value) throws CacheWritingException { - putObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key, value); - final BiFunction remappingFunction = memoize((key1, previousValue) -> { - try { - cacheLoaderWriter.write(key1, value); - } catch (Exception e) { - throw new StorePassThroughException(newCacheWritingException(e)); - } - return value; - }); - - try { - store.compute(key, remappingFunction); - putObserver.end(PutOutcome.PUT); - } catch (StoreAccessException e) { - try { - try { - remappingFunction.apply(key, value); - } catch (StorePassThroughException cpte) { - resilienceStrategy.putFailure(key, value, e, (CacheWritingException) cpte.getCause()); - return; - } - resilienceStrategy.putFailure(key, value, e); - } finally { - putObserver.end(PutOutcome.FAILURE); - } - } - } - - private boolean newValueAlreadyExpired(K key, V oldValue, V newValue) { - if (newValue == null) { - return false; - } - - final Duration duration; - if (oldValue == null) { - try { - duration = runtimeConfiguration.getExpiry().getExpiryForCreation(key, newValue); - } catch (RuntimeException re) { - logger.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); - return true; - } - } else { - try { - duration = runtimeConfiguration.getExpiry().getExpiryForUpdate(key, supplierOf(oldValue), newValue); - } catch (RuntimeException re) { - logger.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); - return true; - } - } - - return Duration.ZERO.equals(duration); - } - - /** - * {@inheritDoc} - */ - @Override - public boolean containsKey(final K key) { - statusTransitioner.checkAvailable(); - checkNonNull(key); - try { - return store.containsKey(key); - } catch (StoreAccessException e) { - return resilienceStrategy.containsKeyFailure(key, e); - } - } - - /** - * {@inheritDoc} - */ - @Override - public void remove(K key) throws CacheWritingException { - removeInternal(key); // ignore return value; - } - - - private boolean removeInternal(final K key) throws CacheWritingException { - removeObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key); - - final AtomicBoolean modified = new AtomicBoolean(); - - final BiFunction remappingFunction = memoize((key1, previousValue) -> { - modified.set(previousValue != null); - - try { - cacheLoaderWriter.delete(key1); - } catch (Exception e) { - throw new StorePassThroughException(newCacheWritingException(e)); - } - return null; - }); - - try { - store.compute(key, remappingFunction); - if (modified.get()) { - removeObserver.end(RemoveOutcome.SUCCESS); - } else { - removeObserver.end(RemoveOutcome.NOOP); - } - } catch (StoreAccessException e) { - try { - try { - remappingFunction.apply(key, null); - } catch (StorePassThroughException f) { - resilienceStrategy.removeFailure(key, e, (CacheWritingException) f.getCause()); - } - resilienceStrategy.removeFailure(key, e); - } finally { - removeObserver.end(RemoveOutcome.FAILURE); - } - } - - return modified.get(); - } - - /** - * {@inheritDoc} - */ - @Override - public void clear() { - statusTransitioner.checkAvailable(); - try { - store.clear(); - } catch (StoreAccessException e) { - resilienceStrategy.clearFailure(e); - } - } - - /** - * {@inheritDoc} - */ - @Override - public Iterator> iterator() { - statusTransitioner.checkAvailable(); - return new CacheEntryIterator(false); - } - - /** - * {@inheritDoc} - */ - @Override - public Map getAll(Set keys) throws BulkCacheLoadingException { - return getAllInternal(keys, true); - } - - private Map getAllInternal(Set keys, boolean includeNulls) throws BulkCacheLoadingException { - getAllObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNullContent(keys); - if(keys.isEmpty()) { - getAllObserver.end(GetAllOutcome.SUCCESS); - return Collections.emptyMap(); - } - final Map successes = new HashMap<>(); - final Map failures = new HashMap<>(); - - - Function, Iterable>> computeFunction = - keys1 -> { - Map computeResult = new LinkedHashMap<>(); - - // put all the entries to get ordering correct - for (K key : keys1) { - computeResult.put(key, null); - } - - Map loaded = Collections.emptyMap(); - try { - loaded = cacheLoaderWriter.loadAll(computeResult.keySet()); - } catch(BulkCacheLoadingException bcle) { - collectSuccessesAndFailures(bcle, successes, failures); - } catch (Exception e) { - for (K key : computeResult.keySet()) { - failures.put(key, e); - } - } - - if (!loaded.isEmpty()) { - for (K key : computeResult.keySet()) { - V value = loaded.get(key); - successes.put(key, value); - computeResult.put(key, value); - } - } - - return computeResult.entrySet(); - }; - - Map result = new HashMap<>(); - try { - Map> computedMap = store.bulkComputeIfAbsent(keys, computeFunction); - - int hits = 0; - int keyCount = 0; - for (Map.Entry> entry : computedMap.entrySet()) { - keyCount++; - if (entry.getValue() != null) { - result.put(entry.getKey(), entry.getValue().value()); - hits++; - } else if (includeNulls && failures.isEmpty()) { - result.put(entry.getKey(), null); - } - } - - addBulkMethodEntriesCount(BulkOps.GET_ALL_HITS, hits); - if (failures.isEmpty()) { - addBulkMethodEntriesCount(BulkOps.GET_ALL_MISS, keyCount - hits); - getAllObserver.end(GetAllOutcome.SUCCESS); - return result; - } else { - successes.putAll(result); - getAllObserver.end(GetAllOutcome.FAILURE); - throw new BulkCacheLoadingException(failures, successes); - } - } catch (StoreAccessException e) { - try { - Set toLoad = new HashSet<>(); - for (K key : keys) { - toLoad.add(key); - } - toLoad.removeAll(successes.keySet()); - toLoad.removeAll(failures.keySet()); - computeFunction.apply(toLoad); - if (failures.isEmpty()) { - return resilienceStrategy.getAllFailure(keys, successes, e); - } else { - return resilienceStrategy.getAllFailure(keys, e, new BulkCacheLoadingException(failures, successes)); - } - } finally { - getAllObserver.end(GetAllOutcome.FAILURE); - } - } - } - - LinkedHashSet> nullValuesForKeys(final Iterable keys) { - final LinkedHashSet> entries = new LinkedHashSet<>(); - for (K key : keys) { - entries.add(new AbstractMap.SimpleEntry<>(key, null)); - } - return entries; - } - - /** - * {@inheritDoc} - */ - @Override - public void putAll(final Map entries) throws BulkCacheWritingException { - putAllObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(entries); - if(entries.isEmpty()) { - putAllObserver.end(PutAllOutcome.SUCCESS); - return; - } - final Set successes = new HashSet<>(); - final Map failures = new HashMap<>(); - - // Copy all entries to write into a Map - final Map entriesToRemap = new HashMap<>(); - for (Map.Entry entry: entries.entrySet()) { - // If a key/value is null, throw NPE, nothing gets mutated - if (entry.getKey() == null || entry.getValue() == null) { - throw new NullPointerException(); - } - entriesToRemap.put(entry.getKey(), entry.getValue()); - } - - final AtomicInteger actualPutCount = new AtomicInteger(); - - // The compute function that will return the keys to their NEW values, taking the keys to their old values as input; - // but this could happen in batches, i.e. not necessary containing all of the entries of the Iterable passed to this method - Function>, Iterable>> computeFunction = - entries1 -> { - // If we have a writer, first write this batch - cacheLoaderWriterWriteAllCall(entries1, entriesToRemap, successes, failures); - - Map mutations = new LinkedHashMap<>(); - - // then record we handled these mappings - for (Map.Entry entry: entries1) { - K key = entry.getKey(); - V existingValue = entry.getValue(); - V newValue = entriesToRemap.remove(key); - - if (newValueAlreadyExpired(key, existingValue, newValue)) { - mutations.put(key, null); - } else if (successes.contains(key)) { - actualPutCount.incrementAndGet(); - mutations.put(key, newValue); - - } else { - mutations.put(key, existingValue); - } - } - - // Finally return the values to be installed in the Cache's Store - return mutations.entrySet(); - }; - - try { - store.bulkCompute(entries.keySet(), computeFunction); - addBulkMethodEntriesCount(BulkOps.PUT_ALL, actualPutCount.get()); - if (failures.isEmpty()) { - putAllObserver.end(PutAllOutcome.SUCCESS); - } else { - BulkCacheWritingException cacheWritingException = new BulkCacheWritingException(failures, successes); - tryRemoveFailedKeys(entries, failures, cacheWritingException); - putAllObserver.end(PutAllOutcome.FAILURE); - throw cacheWritingException; - } - } catch (StoreAccessException e) { - try { - // just in case not all writes happened: - if (!entriesToRemap.isEmpty()) { - cacheLoaderWriterWriteAllCall(entriesToRemap.entrySet(), entriesToRemap, successes, failures); - } - if (failures.isEmpty()) { - resilienceStrategy.putAllFailure(entries, e); - } else { - resilienceStrategy.putAllFailure(entries, e, new BulkCacheWritingException(failures, successes)); - } - } finally { - putAllObserver.end(PutAllOutcome.FAILURE); - } - } - } - - private void tryRemoveFailedKeys(Map entries, Map failures, BulkCacheWritingException cacheWritingException) { - try { - store.bulkCompute(failures.keySet(), entries1 -> { - HashMap result = new HashMap<>(); - for (Map.Entry entry : entries1) { - result.put(entry.getKey(), null); - } - return result.entrySet(); - }); - } catch (StoreAccessException e) { - resilienceStrategy.putAllFailure(entries, e, cacheWritingException); - } - } - - private void cacheLoaderWriterWriteAllCall(Iterable> entries, Map entriesToRemap, Set successes, Map failures) throws IllegalStateException { - Map toWrite = new HashMap<>(); - for (Map.Entry entry: entries) { - V value = entriesToRemap.get(entry.getKey()); - if (value == null) { - continue; - } - - toWrite.put(entry.getKey(), value); - } - try { - if (! toWrite.isEmpty()) { - // write all entries of this batch - cacheLoaderWriter.writeAll(toWrite.entrySet()); - successes.addAll(toWrite.keySet()); - } - } catch (BulkCacheWritingException bcwe) { - collectSuccessesAndFailures(bcwe, successes, failures); - } catch (Exception e) { - for (K key: toWrite.keySet()) { - failures.put(key, e); - } - } - } - - @SuppressWarnings({ "unchecked" }) - private static void collectSuccessesAndFailures(BulkCacheWritingException bcwe, Set successes, Map failures) { - successes.addAll((Collection)bcwe.getSuccesses()); - failures.putAll((Map)bcwe.getFailures()); - } - @SuppressWarnings({ "unchecked" }) - private void collectSuccessesAndFailures(BulkCacheLoadingException bcle, Map successes, Map failures) { - successes.putAll((Map)bcle.getSuccesses()); - failures.putAll((Map)bcle.getFailures()); - } - - /** - * {@inheritDoc} - */ - @Override - public void removeAll(final Set keys) throws BulkCacheWritingException { - removeAllObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(keys); - if(keys.isEmpty()) { - removeAllObserver.end(RemoveAllOutcome.SUCCESS); - return; - } - final Set successes = new HashSet<>(); - final Map failures = new HashMap<>(); - - final Map entriesToRemove = new HashMap<>(); - for (K key: keys) { - if (key == null) { - throw new NullPointerException(); - } - entriesToRemove.put(key, null); - } - - final AtomicInteger actualRemoveCount = new AtomicInteger(); - - Function>, Iterable>> removalFunction = - entries -> { - Set unknowns = cacheLoaderWriterDeleteAllCall(entries, entriesToRemove, successes, failures); - - Map results = new LinkedHashMap<>(); - - for (Map.Entry entry : entries) { - K key = entry.getKey(); - V existingValue = entry.getValue(); - - if (successes.contains(key)) { - if (existingValue != null) { - actualRemoveCount.incrementAndGet(); - } - results.put(key, null); - entriesToRemove.remove(key); - } else { - if (unknowns.contains(key)) { - results.put(key, null); - } else { - results.put(key, existingValue); - } - } - } - - return results.entrySet(); - }; - - try { - store.bulkCompute(keys, removalFunction); - addBulkMethodEntriesCount(BulkOps.REMOVE_ALL, actualRemoveCount.get()); - if (failures.isEmpty()) { - removeAllObserver.end(RemoveAllOutcome.SUCCESS); - } else { - removeAllObserver.end(RemoveAllOutcome.FAILURE); - throw new BulkCacheWritingException(failures, successes); - } - } catch (StoreAccessException e) { - try { - // just in case not all writes happened: - if (!entriesToRemove.isEmpty()) { - cacheLoaderWriterDeleteAllCall(entriesToRemove.entrySet(), entriesToRemove, successes, failures); - } - if (failures.isEmpty()) { - resilienceStrategy.removeAllFailure(keys, e); - } else { - resilienceStrategy.removeAllFailure(keys, e, new BulkCacheWritingException(failures, successes)); - } - } finally { - removeAllObserver.end(RemoveAllOutcome.FAILURE); - } - } - } - - private Set cacheLoaderWriterDeleteAllCall(Iterable> entries, Map entriesToRemove, Set successes, Map failures) { - final Set unknowns = new HashSet<>(); - Set toDelete = new HashSet<>(); - for (Map.Entry entry : entries) { - K key = entry.getKey(); - if (entriesToRemove.containsKey(key)) { - toDelete.add(key); - } - } - - try { - cacheLoaderWriter.deleteAll(toDelete); - successes.addAll(toDelete); - } catch (BulkCacheWritingException bcwe) { - collectSuccessesAndFailures(bcwe, successes, failures); - } catch (Exception e) { - for (K key : toDelete) { - failures.put(key, e); - unknowns.add(key); - } - } - return unknowns; - } - - /** - * {@inheritDoc} - */ - @Override - public V putIfAbsent(final K key, final V value) throws CacheWritingException { - putIfAbsentObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key, value); - final AtomicBoolean installed = new AtomicBoolean(false); - - final Function mappingFunction = memoize(k -> { - if (useLoaderInAtomics) { - try { - V loaded = cacheLoaderWriter.load(k); - if (loaded != null) { - return loaded; // populate the cache - } - } catch (Exception e) { - throw new StorePassThroughException(newCacheLoadingException(e)); - } - } - - try { - cacheLoaderWriter.write(k, value); - } catch (Exception e) { - throw new StorePassThroughException(newCacheWritingException(e)); - } - - installed.set(true); - return value; - }); - - try { - ValueHolder inCache = store.computeIfAbsent(key, mappingFunction); - if (installed.get()) { - putIfAbsentObserver.end(PutIfAbsentOutcome.PUT); - return null; - } else if (inCache == null) { - putIfAbsentObserver.end(PutIfAbsentOutcome.HIT); - return null; - } else { - putIfAbsentObserver.end(PutIfAbsentOutcome.HIT); - return inCache.value(); - } - } catch (StoreAccessException e) { - try { - V loaded = null; - try { - loaded = mappingFunction.apply(key); - } catch (StorePassThroughException f) { - Throwable cause = f.getCause(); - if(cause instanceof CacheLoadingException) { - return resilienceStrategy.putIfAbsentFailure(key, value, e, (CacheLoadingException) cause); - } else if(cause instanceof CacheWritingException) { - return resilienceStrategy.putIfAbsentFailure(key, value, e, (CacheWritingException) cause); - } else { - throw new AssertionError(); - } - } - return resilienceStrategy.putIfAbsentFailure(key, value, loaded, e, installed.get()); - } finally { - putIfAbsentObserver.end(PutIfAbsentOutcome.FAILURE); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public boolean remove(final K key, final V value) throws CacheWritingException { - conditionalRemoveObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key, value); - final AtomicBoolean hit = new AtomicBoolean(); - final AtomicBoolean removed = new AtomicBoolean(); - final BiFunction remappingFunction = memoize((k, inCache) -> { - if (inCache == null) { - if (useLoaderInAtomics) { - try { - inCache = cacheLoaderWriter.load(key); - if (inCache == null) { - return null; - } - } catch (Exception e) { - throw new StorePassThroughException(newCacheLoadingException(e)); - } - } else { - return null; - } - } - - hit.set(true); - if (value.equals(inCache)) { - try { - cacheLoaderWriter.delete(k); - } catch (Exception e) { - throw new StorePassThroughException(newCacheWritingException(e)); - } - removed.set(true); - return null; - } - return inCache; - }); - try { - store.compute(key, remappingFunction, REPLACE_FALSE); - if (removed.get()) { - conditionalRemoveObserver.end(ConditionalRemoveOutcome.SUCCESS); - } else { - if (hit.get()) { - conditionalRemoveObserver.end(ConditionalRemoveOutcome.FAILURE_KEY_PRESENT); - } else { - conditionalRemoveObserver.end(ConditionalRemoveOutcome.FAILURE_KEY_MISSING); - } - } - } catch (StoreAccessException e) { - try { - try { - remappingFunction.apply(key, null); - } catch (StorePassThroughException f) { - Throwable cause = f.getCause(); - if(cause instanceof CacheLoadingException) { - return resilienceStrategy.removeFailure(key, value, e, (CacheLoadingException) cause); - } else if(cause instanceof CacheWritingException) { - return resilienceStrategy.removeFailure(key, value, e, (CacheWritingException) cause); - } else { - throw new AssertionError(); - } - } - return resilienceStrategy.removeFailure(key, value, e, removed.get()); - } finally { - conditionalRemoveObserver.end(ConditionalRemoveOutcome.FAILURE); - } - } - return removed.get(); - } - - /** - * {@inheritDoc} - */ - @Override - public V replace(final K key, final V value) throws CacheLoadingException, CacheWritingException { - replaceObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key, value); - final AtomicReference old = new AtomicReference<>(); - final BiFunction remappingFunction = memoize((k, inCache) -> { - if (inCache == null) { - if (useLoaderInAtomics) { - try { - inCache = cacheLoaderWriter.load(key); - if (inCache == null) { - return null; - } - } catch (Exception e) { - throw new StorePassThroughException(newCacheLoadingException(e)); - } - } else { - return null; - } - } - - try { - cacheLoaderWriter.write(key, value); - } catch (Exception e) { - throw new StorePassThroughException(newCacheWritingException(e)); - } - - old.set(inCache); - - if (newValueAlreadyExpired(key, inCache, value)) { - return null; - } - return value; - }); - - try { - store.compute(key, remappingFunction); - if (old.get() != null) { - replaceObserver.end(ReplaceOutcome.HIT); - } else { - replaceObserver.end(ReplaceOutcome.MISS_NOT_PRESENT); - } - return old.get(); - } catch (StoreAccessException e) { - try { - try { - remappingFunction.apply(key, null); - } catch (StorePassThroughException f) { - Throwable cause = f.getCause(); - if(cause instanceof CacheLoadingException) { - return resilienceStrategy.replaceFailure(key, value, e, (CacheLoadingException) cause); - } else if(cause instanceof CacheWritingException) { - return resilienceStrategy.replaceFailure(key, value, e, (CacheWritingException)cause); - } else { - throw new AssertionError(); - } - } - return resilienceStrategy.replaceFailure(key, value, e); - } finally { - replaceObserver.end(ReplaceOutcome.FAILURE); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public boolean replace(final K key, final V oldValue, final V newValue) throws CacheLoadingException, CacheWritingException { - replaceObserver.begin(); - statusTransitioner.checkAvailable(); - checkNonNull(key, oldValue, newValue); - - final AtomicBoolean success = new AtomicBoolean(); - final AtomicBoolean hit = new AtomicBoolean(); - - final BiFunction remappingFunction = memoize((k, inCache) -> { - if (inCache == null) { - if (useLoaderInAtomics) { - try { - inCache = cacheLoaderWriter.load(key); - if (inCache == null) { - return null; - } - } catch (Exception e) { - throw new StorePassThroughException(newCacheLoadingException(e)); - } - } else { - return null; - } - } - - hit.set(true); - if (oldValue.equals(inCache)) { - try { - cacheLoaderWriter.write(key, newValue); - } catch (Exception e) { - throw new StorePassThroughException(newCacheWritingException(e)); - } - - success.set(true); - - if (newValueAlreadyExpired(key, oldValue, newValue)) { - return null; - } - return newValue; - } - return inCache; - }); - try { - store.compute(key, remappingFunction, REPLACE_FALSE); - if (success.get()) { - replaceObserver.end(ReplaceOutcome.HIT); - } else { - if (hit.get()) { - replaceObserver.end(ReplaceOutcome.MISS_PRESENT); - } else { - replaceObserver.end(ReplaceOutcome.MISS_NOT_PRESENT); - } - } - return success.get(); - } catch (StoreAccessException e) { - try { - try { - remappingFunction.apply(key, null); - } catch (StorePassThroughException f) { - Throwable cause = f.getCause(); - if(cause instanceof CacheLoadingException) { - return resilienceStrategy.replaceFailure(key, oldValue, newValue, e, (CacheLoadingException) cause); - } else if(cause instanceof CacheWritingException) { - return resilienceStrategy.replaceFailure(key, oldValue, newValue, e, (CacheWritingException)cause); - } else { - throw new AssertionError(); - } - } - return resilienceStrategy.replaceFailure(key, oldValue, newValue, e, success.get()); - } finally { - replaceObserver.end(ReplaceOutcome.FAILURE); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public CacheRuntimeConfiguration getRuntimeConfiguration() { - return runtimeConfiguration; - } - - /** - * {@inheritDoc} - */ - @Override - public void init() { - statusTransitioner.init().succeeded(); - } - - /** - * {@inheritDoc} - */ - @Override - public void close() { - statusTransitioner.close().succeeded(); - } - - /** - * {@inheritDoc} - */ - @Override - public Status getStatus() { - return statusTransitioner.currentStatus(); - } - - /** - * {@inheritDoc} - */ - @Override - public void addHook(LifeCycled hook) { - statusTransitioner.addHook(hook); - } - - void removeHook(LifeCycled hook) { - statusTransitioner.removeHook(hook); - } - - private static void checkNonNull(Object thing) { - if(thing == null) { - throw new NullPointerException(); - } - } - - private static void checkNonNull(Object... things) { - for (Object thing : things) { - checkNonNull(thing); - } - } - - private void checkNonNullContent(Collection collectionOfThings) { - checkNonNull(collectionOfThings); - for (Object thing : collectionOfThings) { - checkNonNull(thing); - } - } - - private void addBulkMethodEntriesCount(BulkOps op, long count) { - bulkMethodEntries.get(op).add(count); - } - - /** - * {@inheritDoc} - */ - @Override - public Jsr107Cache getJsr107Cache() { - return jsr107Cache; - } - - /** - * {@inheritDoc} - */ - @Override - public CacheLoaderWriter getCacheLoaderWriter() { - return this.cacheLoaderWriter; - } - - private final class Jsr107CacheImpl implements Jsr107Cache { - @Override - public void loadAll(Set keys, boolean replaceExistingValues, Function, Map> loadFunction) { - if(keys.isEmpty()) { - return ; - } - if (replaceExistingValues) { - loadAllReplace(keys, loadFunction); - } else { - loadAllAbsent(keys, loadFunction); - } - } - - @Override - public Iterator> specIterator() { - return new SpecIterator<>(this, store); - } - - @Override - public V getNoLoader(K key) { - return EhcacheWithLoaderWriter.this.getNoLoader(key); - } - - @Override - public Map getAll(Set keys) { - return EhcacheWithLoaderWriter.this.getAllInternal(keys, false); - } - - private void loadAllAbsent(Set keys, final Function, Map> loadFunction) { - try { - store.bulkComputeIfAbsent(keys, absentKeys -> cacheLoaderWriterLoadAllForKeys(absentKeys, loadFunction).entrySet()); - } catch (StoreAccessException e) { - throw newCacheLoadingException(e); - } - } - - Map cacheLoaderWriterLoadAllForKeys(Iterable keys, Function, Map> loadFunction) { - try { - Map loaded = loadFunction.apply(keys); - - // put into a new map since we can't assume the 107 cache loader returns things ordered, or necessarily with all the desired keys - Map rv = new LinkedHashMap<>(); - for (K key : keys) { - rv.put(key, loaded.get(key)); - } - return rv; - } catch (Exception e) { - throw newCacheLoadingException(e); - } - } - - private void loadAllReplace(Set keys, final Function, Map> loadFunction) { - try { - store.bulkCompute(keys, entries -> { - Collection keys1 = new ArrayList<>(); - for (Map.Entry entry : entries) { - keys1.add(entry.getKey()); - } - return cacheLoaderWriterLoadAllForKeys(keys1, loadFunction).entrySet(); - }); - } catch (StoreAccessException e) { - throw newCacheLoadingException(e); - } - } - - @Override - public void compute(K key, final BiFunction computeFunction, - final Supplier replaceEqual, final Supplier invokeWriter, final Supplier withStatsAndEvents) { - putObserver.begin(); - removeObserver.begin(); - getObserver.begin(); - - try { - BiFunction fn = (mappedKey, mappedValue) -> { - if (mappedValue == null) { - getObserver.end(GetOutcome.MISS); - } else { - getObserver.end(GetOutcome.HIT); - } - - V newValue = computeFunction.apply(mappedKey, mappedValue); - - if (newValue == mappedValue) { - if (! replaceEqual.get()) { - return mappedValue; - } - } - - if (invokeWriter.get()) { - try { - if (newValue != null) { - cacheLoaderWriter.write(mappedKey, newValue); - } else { - cacheLoaderWriter.delete(mappedKey); - } - } catch (Exception e) { - throw new StorePassThroughException(newCacheWritingException(e)); - } - } - - if (newValueAlreadyExpired(mappedKey, mappedValue, newValue)) { - return null; - } - - if (withStatsAndEvents.get()) { - if (newValue == null) { - removeObserver.end(RemoveOutcome.SUCCESS); - } else { - putObserver.end(PutOutcome.PUT); - } - } - - return newValue; - }; - - store.compute(key, fn, replaceEqual); - } catch (StoreAccessException e) { - throw new RuntimeException(e); - } - } - - @Override - public V getAndRemove(K key) { - getObserver.begin(); - removeObserver.begin(); - - final AtomicReference existingValue = new AtomicReference<>(); - try { - store.compute(key, (mappedKey, mappedValue) -> { - existingValue.set(mappedValue); - - try { - cacheLoaderWriter.delete(mappedKey); - } catch (Exception e) { - throw new StorePassThroughException(newCacheWritingException(e)); - } - return null; - }); - } catch (StoreAccessException e) { - getObserver.end(GetOutcome.FAILURE); - removeObserver.end(RemoveOutcome.FAILURE); - throw new RuntimeException(e); - } - - V returnValue = existingValue.get(); - if (returnValue != null) { - getObserver.end(GetOutcome.HIT); - removeObserver.end(RemoveOutcome.SUCCESS); - } else { - getObserver.end(GetOutcome.MISS); - } - return returnValue; - } - - @Override - public V getAndPut(K key, final V value) { - getObserver.begin(); - putObserver.begin(); - - final AtomicReference existingValue = new AtomicReference<>(); - try { - store.compute(key, (mappedKey, mappedValue) -> { - existingValue.set(mappedValue); - - try { - cacheLoaderWriter.write(mappedKey, value); - } catch (Exception e) { - throw new StorePassThroughException(newCacheWritingException(e)); - } - - if (newValueAlreadyExpired(mappedKey, mappedValue, value)) { - return null; - } - - return value; - }); - } catch (StoreAccessException e) { - getObserver.end(GetOutcome.FAILURE); - putObserver.end(PutOutcome.FAILURE); - throw new RuntimeException(e); - } - - V returnValue = existingValue.get(); - if (returnValue != null) { - getObserver.end(GetOutcome.HIT); - } else { - getObserver.end(GetOutcome.MISS); - } - putObserver.end(PutOutcome.PUT); - return returnValue; - } - - @Override - public boolean remove(K key) { - return removeInternal(key); - } - - @Override - public void removeAll() { - Store.Iterator>> iterator = store.iterator(); - while (iterator.hasNext()) { - try { - Entry> next = iterator.next(); - remove(next.getKey()); - } catch (StoreAccessException cae) { - // skip - } - } - } - } - - private class CacheEntryIterator implements Iterator> { - - private final Store.Iterator>> iterator; - private final boolean quiet; - private Cache.Entry> current; - private Cache.Entry> next; - private StoreAccessException nextException; - - public CacheEntryIterator(boolean quiet) { - this.quiet = quiet; - this.iterator = store.iterator(); - advance(); - } - - private void advance() { - try { - while (iterator.hasNext()) { - next = iterator.next(); - if (getNoLoader(next.getKey()) != null) { - return; - } - } - next = null; - } catch (RuntimeException re) { - nextException = new StoreAccessException(re); - next = null; - } catch (StoreAccessException cae) { - nextException = cae; - next = null; - } - } - - @Override - public boolean hasNext() { - statusTransitioner.checkAvailable(); - return nextException != null || next != null; - } - - @Override - public Entry next() { - if (!hasNext()) { - throw new NoSuchElementException(); - } - - if (!quiet) getObserver.begin(); - if (nextException == null) { - if (!quiet) getObserver.end(GetOutcome.HIT); - current = next; - advance(); - return new ValueHolderBasedEntry<>(current); - } else { - if (!quiet) getObserver.end(GetOutcome.FAILURE); - StoreAccessException cae = nextException; - nextException = null; - return resilienceStrategy.iteratorFailure(cae); - } - } - - @Override - public void remove() { - statusTransitioner.checkAvailable(); - if (current == null) { - throw new IllegalStateException("No current element"); - } - EhcacheWithLoaderWriter.this.remove(current.getKey(), current.getValue().value()); - current = null; - } - } - - - private static RecoveryCache recoveryCache(final Store store) { - return new RecoveryCache() { - - @Override - public void obliterate() throws StoreAccessException { - store.clear(); - } - - @Override - public void obliterate(K key) throws StoreAccessException { - store.remove(key); - } - - @Override - public void obliterate(Iterable keys) throws StoreAccessException { - for (K key : keys) { - obliterate(key); - } - } - }; - } - - private static class ValueHolderBasedEntry implements Cache.Entry { - private final Cache.Entry> storeEntry; - - ValueHolderBasedEntry(Cache.Entry> storeEntry) { - this.storeEntry = storeEntry; - } - - @Override - public K getKey() { - return storeEntry.getKey(); - } - - @Override - public V getValue() { - return storeEntry.getValue().value(); - } - - } - -} diff --git a/core/src/main/java/org/ehcache/core/config/BaseCacheConfiguration.java b/core/src/main/java/org/ehcache/core/config/BaseCacheConfiguration.java deleted file mode 100644 index 09e775b3b0..0000000000 --- a/core/src/main/java/org/ehcache/core/config/BaseCacheConfiguration.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.config; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePools; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.spi.service.ServiceConfiguration; - -/** - * Base implementation of {@link CacheConfiguration}. - */ -public class BaseCacheConfiguration implements CacheConfiguration { - - private final Class keyType; - private final Class valueType; - private final EvictionAdvisor evictionAdvisor; - private final Collection> serviceConfigurations; - private final ClassLoader classLoader; - private final Expiry expiry; - private final ResourcePools resourcePools; - - /** - * Creates a new {@code BaseCacheConfiguration} from the given parameters. - * - * @param keyType the key type - * @param valueType the value type - * @param evictionAdvisor the eviction advisor - * @param classLoader the class loader - * @param expiry the expiry policy - * @param resourcePools the resource pools - * @param serviceConfigurations the service configurations - */ - public BaseCacheConfiguration(Class keyType, Class valueType, - EvictionAdvisor evictionAdvisor, - ClassLoader classLoader, Expiry expiry, - ResourcePools resourcePools, ServiceConfiguration... serviceConfigurations) { - if (keyType == null) { - throw new NullPointerException("keyType cannot be null"); - } - if (valueType == null) { - throw new NullPointerException("valueType cannot be null"); - } - if (resourcePools == null) { - throw new NullPointerException("resourcePools cannot be null"); - } - this.keyType = keyType; - this.valueType = valueType; - this.evictionAdvisor = evictionAdvisor; - this.classLoader = classLoader; - if (expiry != null) { - this.expiry = expiry; - } else { - this.expiry = Expirations.noExpiration(); - } - this.resourcePools = resourcePools; - this.serviceConfigurations = Collections.unmodifiableCollection(Arrays.asList(serviceConfigurations)); - } - - /** - * {@inheritDoc} - */ - @Override - public Collection> getServiceConfigurations() { - return serviceConfigurations; - } - - /** - * {@inheritDoc} - */ - @Override - public Class getKeyType() { - return keyType; - } - - /** - * {@inheritDoc} - */ - @Override - public Class getValueType() { - return valueType; - } - - /** - * {@inheritDoc} - */ - @Override - public EvictionAdvisor getEvictionAdvisor() { - return evictionAdvisor; - } - - /** - * {@inheritDoc} - */ - @Override - public ClassLoader getClassLoader() { - return classLoader; - } - - /** - * {@inheritDoc} - */ - @Override - public Expiry getExpiry() { - return expiry; - } - - /** - * {@inheritDoc} - */ - @Override - public ResourcePools getResourcePools() { - return resourcePools; - } -} diff --git a/core/src/main/java/org/ehcache/core/config/package-info.java b/core/src/main/java/org/ehcache/core/config/package-info.java deleted file mode 100644 index 6ffe44b748..0000000000 --- a/core/src/main/java/org/ehcache/core/config/package-info.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Package holding some core implementations related to configuration such as - * {@link org.ehcache.core.config.ResourcePoolsImpl} and {@link org.ehcache.core.config.DefaultConfiguration}. - */ -package org.ehcache.core.config; \ No newline at end of file diff --git a/core/src/main/java/org/ehcache/core/exceptions/ExceptionFactory.java b/core/src/main/java/org/ehcache/core/exceptions/ExceptionFactory.java deleted file mode 100644 index e012ac5bbe..0000000000 --- a/core/src/main/java/org/ehcache/core/exceptions/ExceptionFactory.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.exceptions; - -import org.ehcache.spi.loaderwriter.CacheLoadingException; -import org.ehcache.spi.loaderwriter.CacheWritingException; - -/** - * Factory to help creation of {@link CacheLoadingException} and {@link CacheWritingException}. - */ -public final class ExceptionFactory { - - private ExceptionFactory() { - throw new UnsupportedOperationException("Thou shalt not instantiate me!"); - } - - /** - * Creates a new {@code CacheWritingException} with the provided exception as cause. - * - * @param e the cause - * @return a cache writing exception - */ - public static CacheWritingException newCacheWritingException(Exception e) { - return new CacheWritingException(e); - } - - /** - * Creates a new {@code CacheLoadingException} with the provided exception as cause. - * - * @param e the cause - * @return a cache loading exception - */ - public static CacheLoadingException newCacheLoadingException(Exception e) { - return new CacheLoadingException(e); - } -} diff --git a/core/src/main/java/org/ehcache/core/internal/resilience/LoggingRobustResilienceStrategy.java b/core/src/main/java/org/ehcache/core/internal/resilience/LoggingRobustResilienceStrategy.java deleted file mode 100644 index 5d79ca5b47..0000000000 --- a/core/src/main/java/org/ehcache/core/internal/resilience/LoggingRobustResilienceStrategy.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.resilience; - -import org.ehcache.Cache; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.CacheIterationException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * - * @author cdennis - */ -public class LoggingRobustResilienceStrategy extends RobustResilienceStrategy { - - private static final Logger LOGGER = LoggerFactory.getLogger(LoggingRobustResilienceStrategy.class); - - public LoggingRobustResilienceStrategy(RecoveryCache store) { - super(store); - } - - @Override - public Cache.Entry iteratorFailure(StoreAccessException e) { - LOGGER.error("Ehcache iterator terminated early due to exception", e); - throw new CacheIterationException(e); - } - - @Override - protected void recovered(K key, StoreAccessException from) { - LOGGER.info("Ehcache key {} recovered from", key, from); - } - - @Override - protected void recovered(Iterable keys, StoreAccessException from) { - LOGGER.info("Ehcache keys {} recovered from", keys, from); - } - - @Override - protected void recovered(StoreAccessException from) { - LOGGER.info("Ehcache recovered from", from); - } - - @Override - protected void inconsistent(K key, StoreAccessException because, StoreAccessException... cleanup) { - LOGGER.error("Ehcache key {} in possible inconsistent state due to ", key, because); - } - - @Override - protected void inconsistent(Iterable keys, StoreAccessException because, StoreAccessException... cleanup) { - LOGGER.error("Ehcache keys {} in possible inconsistent state due to ", keys, because); - } - - @Override - protected void inconsistent(StoreAccessException because, StoreAccessException... cleanup) { - LOGGER.error("Ehcache in possible inconsistent state due to ", because); - } -} diff --git a/core/src/main/java/org/ehcache/core/internal/resilience/RecoveryCache.java b/core/src/main/java/org/ehcache/core/internal/resilience/RecoveryCache.java deleted file mode 100644 index 5ebcac1c71..0000000000 --- a/core/src/main/java/org/ehcache/core/internal/resilience/RecoveryCache.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.resilience; - -import org.ehcache.core.spi.store.StoreAccessException; - -/** - * - * @author Chris Dennis - */ -public interface RecoveryCache { - - void obliterate() throws StoreAccessException; - - void obliterate(K key) throws StoreAccessException; - - void obliterate(Iterable keys) throws StoreAccessException; -} diff --git a/core/src/main/java/org/ehcache/core/internal/resilience/ResilienceStrategy.java b/core/src/main/java/org/ehcache/core/internal/resilience/ResilienceStrategy.java deleted file mode 100644 index b033d9f002..0000000000 --- a/core/src/main/java/org/ehcache/core/internal/resilience/ResilienceStrategy.java +++ /dev/null @@ -1,392 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.resilience; - -import java.util.Map; - -import org.ehcache.Cache; -import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheLoadingException; -import org.ehcache.spi.loaderwriter.CacheWritingException; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; - -/** - * A strategy for providing cache resilience in the face of failure. - *

- * An implementation of this interface is used by a cache to decide how to - * recover after internal components of the cache fail. Implementations of - * these methods are expected to take suitable recovery steps. They can then - * choose between allowing the operation to terminate successfully, or throw an - * exception which will be propagated to the thread calling in to the cache. - *

- * Resilience in this context refers only to resilience against cache failures - * and not to resilience against failures of any underlying - * {@link CacheLoaderWriter}. To this end writer or loader failures will only be - * reported to the strategy in the context of a coincident cache failure. - * Isolated writer and loader exceptions will be thrown directly. - * - * @param the type of the keys used to access data within the cache - * @param the type of the values held within the cache - * - * @author Chris Dennis - */ -public interface ResilienceStrategy { - - /** - * Called when a {@link Cache#get(java.lang.Object)} fails on a cache without - * a cache loader due to an underlying store failure. - * - * @param key the key being retrieved - * @param e the triggered failure - * @return the value to return from the operation - */ - V getFailure(K key, StoreAccessException e); - - /** - * Called when a {@link Cache#get(java.lang.Object)} fails on a cache with a - * cache loader due to an underlying store failure. - * - * @param key the key being retrieved - * @param loaded the value from the loader - * @param e the triggered failure - * @return the value to return from the operation - */ - V getFailure(K key, V loaded, StoreAccessException e); - - /** - * Called when a {@link Cache#get(java.lang.Object)} fails on a cache with a - * cache loader due to an underlying store failure. - * - * @param key the key being retrieved - * @param e the cache failure - * @param f the loader failure - * @return the value to return from the operation - */ - V getFailure(K key, StoreAccessException e, CacheLoadingException f); - - /** - * Called when a {@link Cache#containsKey(java.lang.Object)} fails due to an - * underlying store failure, and the resultant cache load operation also fails. - * - * @param key the key being queried - * @param e the triggered failure - * @return the value to return from the operation - */ - boolean containsKeyFailure(K key, StoreAccessException e); - - /** - * Called when a {@link Cache#put(java.lang.Object, java.lang.Object)} fails - * due to an underlying store failure. - * - * @param key the key being put - * @param value the value being put - * @param e the triggered failure - */ - void putFailure(K key, V value, StoreAccessException e); - - /** - * Called when a {@link Cache#put(java.lang.Object, java.lang.Object)} fails - * due to an underlying store failure, and the associated cache write - * operation also failed. - * - * @param key the key being put - * @param value the value being put - * @param e the cache failure - * @param f the writer failure - */ - void putFailure(K key, V value, StoreAccessException e, CacheWritingException f); - - /** - * Called when a {@link Cache#remove(java.lang.Object)} fails due to an - * underlying store failure. - * - * @param key the key being removed - * @param e the triggered failure - */ - void removeFailure(K key, StoreAccessException e); - - /** - * Called when a {@link Cache#remove(java.lang.Object)} fails - * due to an underlying store failure, and the associated cache write - * operation also failed. - * - * @param key the key being removed - * @param e the cache failure - * @param f the writer failure - */ - void removeFailure(K key, StoreAccessException e, CacheWritingException f); - - /** - * Called when a {@link Cache#clear()} fails due to an underlying store - * failure. - * - * @param e the triggered failure - */ - void clearFailure(StoreAccessException e); - - /** - * Called when a cache iterator advancement fails due to an underlying store - * failure. - * - * @param e the triggered failure - * @return an entry to return on a failed iteration - */ - Cache.Entry iteratorFailure(StoreAccessException e); - - /** - * Called when a {@link Cache#putIfAbsent(java.lang.Object, java.lang.Object)} - * fails due to an underlying store failure. - *

- * If it is known at the time of calling that the key is absent from the cache - * (and the writer if one is present) then {@code knownToBeAbsent} will be - * {@code true}. - * - * @param key the key being put - * @param value the value being put - * @param e the triggered failure - * @param knownToBeAbsent {@code true} if the value is known to be absent - * @return the value to return from the operation - */ - V putIfAbsentFailure(K key, V value, V loaderWriterFunctionResult, StoreAccessException e, boolean knownToBeAbsent); - - /** - * Called when a {@link Cache#putIfAbsent(java.lang.Object, java.lang.Object)} - * fails due to an underlying store failure, and the associated cache write - * operation also failed. - * - * @param key the key being put - * @param value the value being put - * @param e the cache failure - * @param f the writer failure - * @return the value to return from the operation - */ - V putIfAbsentFailure(K key, V value, StoreAccessException e, CacheWritingException f); - - /** - * Called when a {@link Cache#putIfAbsent(java.lang.Object, java.lang.Object)} - * fails due to an underlying store failure, and the associated cache load - * operation also failed. - * - * @param key the key being put - * @param value the value being put - * @param e the cache failure - * @param f the loader failure - * @return the value to return from the operation - */ - V putIfAbsentFailure(K key, V value, StoreAccessException e, CacheLoadingException f); - - /** - * Called when a {@link Cache#remove(java.lang.Object, java.lang.Object)} - * fails due to an underlying store failure. - *

- * If it is known at the time of calling that the targeted mapping is present - * in the cache (or the writer if one is present) then {@code knownToBePresent} - * will be {@code true}. - * - * @param key the key being removed - * @param value the value being removed - * @param e the triggered failure - * @param knownToBePresent {@code true} if the value is known to be present - * @return the value to return from the operation - */ - boolean removeFailure(K key, V value, StoreAccessException e, boolean knownToBePresent); - - /** - * Called when a {@link Cache#remove(java.lang.Object, java.lang.Object)} - * fails due to an underlying store failure, and the associated cache write - * operation also failed. - * - * @param key the key being removed - * @param value the value being removed - * @param e the cache failure - * @param f the writer failure - * @return the value to return from the operation - */ - boolean removeFailure(K key, V value, StoreAccessException e, CacheWritingException f); - - /** - * Called when a {@link Cache#remove(java.lang.Object, java.lang.Object)} - * fails due to an underlying store failure, and the associated cache load - * operation also failed. - * - * @param key the key being removed - * @param value the value being removed - * @param e the cache failure - * @param f the loader failure - * @return the value to return from the operation - */ - boolean removeFailure(K key, V value, StoreAccessException e, CacheLoadingException f); - - /** - * Called when a {@link Cache#replace(java.lang.Object, java.lang.Object)} - * fails due to an underlying store failure. - * - * @param key the key being replaced - * @param value the value being replaced - * @param e the triggered failure - * @return the value to return from the operation - */ - V replaceFailure(K key, V value, StoreAccessException e); - - /** - * Called when a {@link Cache#replace(java.lang.Object, java.lang.Object)} - * fails due to an underlying store failure, and the associated cache write - * operation also failed. - * - * @param key the key being replaced - * @param value the value being replaced - * @param e the cache failure - * @param f the writer failure - * @return the value to return from the operation - */ - V replaceFailure(K key, V value, StoreAccessException e, CacheWritingException f); - - /** - * Called when a {@link Cache#replace(java.lang.Object, java.lang.Object)} - * fails due to an underlying store failure, and the associated cache load - * operation also failed. - * - * @param key the key being replaced - * @param value the value being replaced - * @param e the cache failure - * @param f the loader failure - * @return the value to return from the operation - */ - V replaceFailure(K key, V value, StoreAccessException e, CacheLoadingException f); - - /** - * Called when a {@link Cache#replace(java.lang.Object, java.lang.Object, java.lang.Object)} - * fails due to an underlying store failure. - *

- * If it is known at the time of calling that the target mapping is present - * in the cache (or the writer if one is present) then {@code knownToBeMatch} - * will be {@code true}. - * - * @param key the key being replaced - * @param value the expected value - * @param newValue the replacement value - * @param e the triggered failure - * @param knownToMatch {@code true} if the value is known to match - * @return the value to return from the operation - */ - boolean replaceFailure(K key, V value, V newValue, StoreAccessException e, boolean knownToMatch); - - /** - * Called when a {@link Cache#replace(java.lang.Object, java.lang.Object, java.lang.Object)} - * fails due to an underlying store failure, and the associated cache write - * operation also failed. - * - * @param key the key being replaced - * @param value the expected value - * @param newValue the replacement value - * @param e the cache failure - * @param f the writer failure - * @return the value to return from the operation - */ - boolean replaceFailure(K key, V value, V newValue, StoreAccessException e, CacheWritingException f); - - /** - * Called when a {@link Cache#replace(java.lang.Object, java.lang.Object, java.lang.Object)} - * fails due to an underlying store failure, and the associated cache load - * operation also failed. - * - * @param key the key being replaced - * @param value the expected value - * @param newValue the replacement value - * @param e the cache failure - * @param f the loader failure - * @return the value to return from the operation - */ - boolean replaceFailure(K key, V value, V newValue, StoreAccessException e, CacheLoadingException f); - - /** - * Called when a {@link Cache#getAll(java.util.Set)} fails on a cache - * without a cache loader due to an underlying store failure. - * - * @param keys the keys being retrieved - * @param e the triggered failure - * @return the value to return from the operation - */ - Map getAllFailure(Iterable keys, StoreAccessException e); - - /** - * Called when a {@link Cache#getAll(java.util.Set)} fails on a cache - * with a cache loader due to an underlying store failure. - * - * @param keys the keys being retrieved - * @param loaded the values from the loader - * @param e the triggered failure - * @return the value to return from the operation - */ - Map getAllFailure(Iterable keys, Map loaded, StoreAccessException e); - - /** - * Called when a {@link Cache#getAll(java.util.Set)} fails on a cache - * with a cache loader due to an underlying store failure, and the associated - * cache write operation also failed. - * - * @param keys the keys being retrieved - * @param e the cache failure - * @param f the writer failure - * @return the value to return from the operation - */ - Map getAllFailure(Iterable keys, StoreAccessException e, BulkCacheLoadingException f); - - /** - * Called when a {@link Cache#putAll(java.util.Map)} fails due to an - * underlying store failure. - * - * @param entries the entries being put - * @param e the triggered failure - */ - void putAllFailure(Map entries, StoreAccessException e); - - /** - * Called when a {@link Cache#putAll(java.util.Map)} fails due to an - * underlying store failure, and the associated cache write operation also - * failed. - * - * @param entries the entries being put - * @param e the cache failure - * @param f the writer failure - */ - void putAllFailure(Map entries, StoreAccessException e, BulkCacheWritingException f); - - /** - * Called when a {@link Cache#removeAll(java.util.Set)} fails due to an - * underlying store failure. - * - * @param keys the keys being removed - * @param e the triggered failure - * @return the value to return from the operation - */ - Map removeAllFailure(Iterable keys, StoreAccessException e); - - /** - * Called when a {@link Cache#removeAll(java.util.Set)} fails - * due to an underlying store failure, and the associated cache write - * operation also failed. - * - * @param keys the keys being removed - * @param e the cache failure - * @param f the writer failure - * @return the value to return from the operation - */ - Map removeAllFailure(Iterable keys, StoreAccessException e, BulkCacheWritingException f); -} diff --git a/core/src/main/java/org/ehcache/core/internal/resilience/RethrowingStoreAccessException.java b/core/src/main/java/org/ehcache/core/internal/resilience/RethrowingStoreAccessException.java deleted file mode 100644 index c6f155c52f..0000000000 --- a/core/src/main/java/org/ehcache/core/internal/resilience/RethrowingStoreAccessException.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.resilience; - -import org.ehcache.core.spi.store.StoreAccessException; - -/** - * Generic exception used when an internal operation fails on a {@link org.ehcache.Cache} but shouldn't be - * handled by a resilience strategy but rather rethrown to the caller. - * - * @deprecated This mechanism is a stop-gap solution until {@link ResilienceStrategy} - * instances can be plugged-in. - * - * @author Ludovic Orban - */ -@Deprecated -public class RethrowingStoreAccessException extends StoreAccessException { - - /** - * Create an instance of RethrowingStoreAccessException. - * @param cause the cause RuntimeException that will be rethrown. - */ - public RethrowingStoreAccessException(RuntimeException cause) { - super(cause); - } - - @Override - public synchronized RuntimeException getCause() { - return (RuntimeException) super.getCause(); - } -} diff --git a/core/src/main/java/org/ehcache/core/internal/resilience/RobustResilienceStrategy.java b/core/src/main/java/org/ehcache/core/internal/resilience/RobustResilienceStrategy.java deleted file mode 100644 index 8de48c3082..0000000000 --- a/core/src/main/java/org/ehcache/core/internal/resilience/RobustResilienceStrategy.java +++ /dev/null @@ -1,265 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.resilience; - -import java.util.HashMap; -import java.util.Map; - -import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheLoadingException; -import org.ehcache.spi.loaderwriter.CacheWritingException; - -import static java.util.Collections.emptyMap; - -/** - * - * @author Chris Dennis - */ -public abstract class RobustResilienceStrategy implements ResilienceStrategy { - - private final RecoveryCache cache; - - public RobustResilienceStrategy(RecoveryCache cache) { - this.cache = cache; - } - - @Override - public V getFailure(K key, StoreAccessException e) { - cleanup(key, e); - return null; - } - - @Override - public V getFailure(K key, V loaded, StoreAccessException e) { - cleanup(key, e); - return loaded; - } - - @Override - public V getFailure(K key, StoreAccessException e, CacheLoadingException f) { - cleanup(key, e); - throw f; - } - - @Override - public boolean containsKeyFailure(K key, StoreAccessException e) { - cleanup(key, e); - return false; - } - - @Override - public void putFailure(K key, V value, StoreAccessException e) { - cleanup(key, e); - } - - @Override - public void putFailure(K key, V value, StoreAccessException e, CacheWritingException f) { - cleanup(key, e); - throw f; - } - - @Override - public void removeFailure(K key, StoreAccessException e) { - cleanup(key, e); - } - - @Override - public void removeFailure(K key, StoreAccessException e, CacheWritingException f) { - cleanup(key, e); - throw f; - } - - @Override - public void clearFailure(StoreAccessException e) { - cleanup(e); - } - - @Override - public V putIfAbsentFailure(K key, V value, V loaderWriterFunctionResult, StoreAccessException e, boolean knownToBeAbsent) { - cleanup(key, e); - if (loaderWriterFunctionResult != null && !loaderWriterFunctionResult.equals(value)) { - return loaderWriterFunctionResult; - } else { - return null; - } - } - - @Override - public V putIfAbsentFailure(K key, V value, StoreAccessException e, CacheWritingException f) { - cleanup(key, e); - throw f; - } - - @Override - public V putIfAbsentFailure(K key, V value, StoreAccessException e, CacheLoadingException f) { - cleanup(key, e); - throw f; - } - - @Override - public boolean removeFailure(K key, V value, StoreAccessException e, boolean knownToBePresent) { - cleanup(key, e); - return knownToBePresent; - } - - @Override - public boolean removeFailure(K key, V value, StoreAccessException e, CacheWritingException f) { - cleanup(key, e); - throw f; - } - - @Override - public boolean removeFailure(K key, V value, StoreAccessException e, CacheLoadingException f) { - cleanup(key, e); - throw f; - } - - @Override - public V replaceFailure(K key, V value, StoreAccessException e) { - cleanup(key, e); - return null; - } - - @Override - public V replaceFailure(K key, V value, StoreAccessException e, CacheWritingException f) { - cleanup(key, e); - throw f; - } - - @Override - public V replaceFailure(K key, V value, StoreAccessException e, CacheLoadingException f) { - cleanup(key, e); - throw f; - } - - @Override - public boolean replaceFailure(K key, V value, V newValue, StoreAccessException e, boolean knownToMatch) { - cleanup(key, e); - return knownToMatch; - } - - @Override - public boolean replaceFailure(K key, V value, V newValue, StoreAccessException e, CacheWritingException f) { - cleanup(key, e); - throw f; - } - - @Override - public boolean replaceFailure(K key, V value, V newValue, StoreAccessException e, CacheLoadingException f) { - cleanup(key, e); - throw f; - } - - @Override - public Map getAllFailure(Iterable keys, StoreAccessException e) { - cleanup(keys, e); - HashMap result = new HashMap<>(); - for (K key : keys) { - result.put(key, null); - } - return result; - } - - @Override - public Map getAllFailure(Iterable keys, Map loaded, StoreAccessException e) { - cleanup(keys, e); - return loaded; - } - - @Override - public Map getAllFailure(Iterable keys, StoreAccessException e, BulkCacheLoadingException f) { - cleanup(keys, e); - throw f; - } - - @Override - public void putAllFailure(Map entries, StoreAccessException e) { - cleanup(entries.keySet(), e); - } - - @Override - public void putAllFailure(Map entries, StoreAccessException e, BulkCacheWritingException f) { - cleanup(entries.keySet(), e); - throw f; - } - - @Override - public Map removeAllFailure(Iterable entries, StoreAccessException e) { - cleanup(entries, e); - return emptyMap(); - } - - @Override - public Map removeAllFailure(Iterable entries, StoreAccessException e, BulkCacheWritingException f) { - cleanup(entries, e); - throw f; - } - - private void cleanup(StoreAccessException from) { - filterException(from); - try { - cache.obliterate(); - } catch (StoreAccessException e) { - inconsistent(from, e); - return; - } - recovered(from); - } - - private void cleanup(Iterable keys, StoreAccessException from) { - filterException(from); - try { - cache.obliterate(keys); - } catch (StoreAccessException e) { - inconsistent(keys, from, e); - return; - } - recovered(keys, from); - } - - private void cleanup(K key, StoreAccessException from) { - filterException(from); - try { - cache.obliterate(key); - } catch (StoreAccessException e) { - inconsistent(key, from, e); - return; - } - recovered(key, from); - } - - @Deprecated - void filterException(StoreAccessException cae) throws RuntimeException { - if (cae instanceof RethrowingStoreAccessException) { - throw ((RethrowingStoreAccessException) cae).getCause(); - } - } - - protected abstract void recovered(K key, StoreAccessException from); - - protected abstract void recovered(Iterable keys, StoreAccessException from); - - protected abstract void recovered(StoreAccessException from); - - protected abstract void inconsistent(K key, StoreAccessException because, StoreAccessException... cleanup); - - protected abstract void inconsistent(Iterable keys, StoreAccessException because, StoreAccessException... cleanup); - - protected abstract void inconsistent(StoreAccessException because, StoreAccessException... cleanup); -} diff --git a/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java b/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java deleted file mode 100644 index 7ad35054d4..0000000000 --- a/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java +++ /dev/null @@ -1,569 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.service; - -import org.ehcache.config.Builder; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.spi.service.PluralService; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.spi.service.ServiceDependencies; -import org.ehcache.core.spi.service.ServiceFactory; -import org.ehcache.core.internal.util.ClassLoading; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Deque; -import java.util.HashMap; -import java.util.HashSet; -import java.util.IdentityHashMap; -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.ServiceLoader; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; - -import static java.util.Collections.*; - -/** - * Provides discovery and tracking services for {@link Service} implementations. - */ -public final class ServiceLocator implements ServiceProvider { - - private static final Logger LOGGER = LoggerFactory.getLogger(ServiceLocator.class); - private final ServiceMap services; - - private final ReadWriteLock runningLock = new ReentrantReadWriteLock(); - - private final AtomicBoolean running = new AtomicBoolean(false); - - public static DependencySet dependencySet() { - return new DependencySet(); - } - - private ServiceLocator(ServiceMap services) { - this.services = services; - } - - @Override - public T getService(Class serviceType) { - if (serviceType.isAnnotationPresent(PluralService.class)) { - throw new IllegalArgumentException(serviceType.getName() + " is marked as a PluralService"); - } - final Collection registeredServices = getServicesOfType(serviceType); - if (registeredServices.size() > 1) { - throw new AssertionError("The non-PluralService type" + serviceType.getName() - + " has more than one service registered"); - } - return (registeredServices.isEmpty() ? null : registeredServices.iterator().next()); - } - - @Override - public Collection getServicesOfType(Class serviceType) { - return services.get(serviceType); - } - - public boolean knowsServiceFor(ServiceConfiguration serviceConfig) { - return services.contains(serviceConfig.getServiceType()); - } - - public void startAllServices() throws Exception { - Deque started = new LinkedList<>(); - final Lock lock = runningLock.writeLock(); - lock.lock(); - try { - if (!running.compareAndSet(false, true)) { - throw new IllegalStateException("Already started!"); - } - - /* - * This ensures that we start services in dependency order - */ - LinkedList unstarted = new LinkedList<>(services.all()); - int totalServices = unstarted.size(); - long start = System.currentTimeMillis(); - LOGGER.debug("Starting {} Services...", totalServices); - while (!unstarted.isEmpty()) { - boolean startedSomething = false; - for (Iterator it = unstarted.iterator(); it.hasNext(); ) { - Service s = it.next(); - if (hasUnstartedDependencies(s, unstarted)) { - LOGGER.trace("Delaying starting {}", s); - } else { - LOGGER.trace("Starting {}", s); - s.start(this); - started.push(s); - it.remove(); - startedSomething = true; - } - } - if (startedSomething) { - LOGGER.trace("Cycle complete: " + unstarted.size() + " Services remaining"); - } else { - throw new IllegalStateException("Cyclic dependency in Service set: " + unstarted); - } - } - LOGGER.debug("All Services successfully started, {} Services in {}ms", totalServices, System.currentTimeMillis() - start); - } catch (Exception e) { - while(!started.isEmpty()) { - Service toBeStopped = started.pop(); - try { - toBeStopped.stop(); - } catch (Exception e1) { - LOGGER.error("Stopping Service failed due to ", e1); - } - } - throw e; - } finally { - lock.unlock(); - } - } - - public void stopAllServices() throws Exception { - Exception firstException = null; - Lock lock = runningLock.writeLock(); - lock.lock(); - try { - if(!running.compareAndSet(true, false)) { - throw new IllegalStateException("Already stopped!"); - } - - /* - * This ensures that we stop services in dependency order - */ - Collection running = new LinkedList<>(services.all()); - int totalServices = running.size(); - long start = System.currentTimeMillis(); - LOGGER.debug("Stopping {} Services...", totalServices); - while (!running.isEmpty()) { - boolean stoppedSomething = false; - for (Iterator it = running.iterator(); it.hasNext(); ) { - Service s = it.next(); - if (hasRunningDependencies(s, running)) { - LOGGER.trace("Delaying stopping {}", s); - } else { - LOGGER.trace("Stopping {}", s); - try { - s.stop(); - } catch (Exception e) { - if (firstException == null) { - firstException = e; - } else { - LOGGER.error("Stopping Service failed due to ", e); - } - } - it.remove(); - stoppedSomething = true; - } - } - if (stoppedSomething) { - LOGGER.trace("Cycle complete: " + running.size() + " Services remaining"); - } else { - throw new AssertionError("Cyclic dependency in Service set: " + running); - } - } - LOGGER.debug("All Services successfully stopped, {} Services in {}ms", totalServices, System.currentTimeMillis() - start); - } finally { - lock.unlock(); - } - if(firstException != null) { - throw firstException; - } - } - - private boolean hasUnstartedDependencies(Service service, Iterable unstarted) { - for (Class dep : identifyTransitiveDependenciesOf(service.getClass())) { - for (Service s : unstarted) { - if (dep.isInstance(s)) { - return true; - } - } - } - return false; - } - - private boolean hasRunningDependencies(Service service, Iterable running) { - for (Class dep : identifyTransitiveDependenciesOf(service.getClass())) { - for (Service s : running) { - if (dep.isInstance(s)) { - return true; - } - } - } - return false; - } - - public static class DependencySet implements Builder { - - @SuppressWarnings("rawtypes") - private final ServiceLoader serviceLoader = ClassLoading.libraryServiceLoaderFor(ServiceFactory.class); - - private final ServiceMap provided = new ServiceMap(); - private final Set> requested = new HashSet<>(); - - public DependencySet with(Service service) { - provided.add(service); - return this; - } - - public DependencySet with(Iterable services) { - for (Service s : services) { - with(s); - } - return this; - } - - public DependencySet with(ServiceCreationConfiguration config) { - Class serviceType = config.getServiceType(); - - //TODO : This stanza is due to the way we use configure the JSR-107 service - if (provided.contains(serviceType) && !serviceType.isAnnotationPresent(PluralService.class)) { - return this; - } - - Iterable> serviceFactories = ServiceLocator.getServiceFactories(serviceLoader); - boolean success = false; - for (ServiceFactory factory : serviceFactories) { - final Class factoryServiceType = factory.getServiceType(); - if (serviceType.isAssignableFrom(factoryServiceType)) { - @SuppressWarnings("unchecked") - ServiceFactory serviceFactory = (ServiceFactory) factory; - with(serviceFactory.create(config)); - success = true; - } - } - if (success) { - return this; - } else { - throw new IllegalStateException("No factories exist for " + serviceType); - } - } - - public DependencySet with(Class clazz) { - requested.add(clazz); - return this; - } - - public boolean contains(Class serviceClass) { - return provided.contains(serviceClass); - } - - public T providerOf(Class serviceClass) { - if (serviceClass.isAnnotationPresent(PluralService.class)) { - throw new IllegalArgumentException("Cannot retrieve single provider for plural service"); - } else { - Collection providers = providersOf(serviceClass); - switch (providers.size()) { - case 0: - return null; - case 1: - return providers.iterator().next(); - default: - throw new AssertionError(); - } - } - } - - public Collection providersOf(Class serviceClass) { - return provided.get(serviceClass); - } - - @Override - public ServiceLocator build() { - try { - ServiceMap resolvedServices = new ServiceMap(); - - for (Service service : provided.all()) { - resolvedServices = lookupDependenciesOf(resolvedServices, service.getClass()).add(service); - } - - for (Class request : requested) { - if (request.isAnnotationPresent(PluralService.class)) { - try { - resolvedServices = lookupService(resolvedServices, request); - } catch (DependencyException e) { - if (!resolvedServices.contains(request)) { - throw e; - } - } - } else if (!resolvedServices.contains(request)) { - resolvedServices = lookupService(resolvedServices, request); - } - } - - return new ServiceLocator(resolvedServices); - } catch (DependencyException e) { - throw new IllegalStateException(e); - } - } - - ServiceMap lookupDependenciesOf(ServiceMap resolved, Class requested) throws DependencyException { - for (Class dependency : identifyImmediateDependenciesOf(requested)) { - resolved = lookupService(resolved, dependency); - } - return resolved; - } - - private ServiceMap lookupService(ServiceMap resolved, Class requested) throws DependencyException { - //Have we already resolved this dependency? - if (resolved.contains(requested) && !requested.isAnnotationPresent(PluralService.class)) { - return resolved; - } - //Attempt resolution from the provided services - resolved = new ServiceMap(resolved).addAll(provided.get(requested)); - if (resolved.contains(requested) && !requested.isAnnotationPresent(PluralService.class)) { - return resolved; - } - Collection> serviceFactories = discoverServices(resolved, requested); - if (serviceFactories.size() > 1 && !requested.isAnnotationPresent(PluralService.class)) { - throw new DependencyException("Multiple factories for non-plural service"); - } - for(ServiceFactory factory : serviceFactories) { - if (!resolved.contains(factory.getServiceType())) { - try { - resolved = lookupDependenciesOf(resolved, factory.getServiceType()); - } catch (DependencyException e) { - continue; - } - - T service = factory.create(null); - - //we copy the service map so that if upstream dependency resolution fails we don't pollute the real resolved set - resolved = new ServiceMap(resolved).add(service); - } - } - if (resolved.contains(requested)) { - return resolved; - } else { - throw new DependencyException("Failed to find provider with satisfied dependency set for " + requested + " [candidates " + serviceFactories + "]"); - } - } - - /** - * For the {@link Service} class specified, attempt to instantiate the service using the - * {@link ServiceFactory} infrastructure. - * - * @param serviceClass the {@code Service} type to create - * @param the type of the {@code Service} - * - * @return the collection of created services; may be empty - * - * @throws IllegalStateException if the configured service is already registered or the configured service - * implements a {@code Service} subtype that is not marked with the {@link PluralService} annotation - * but is already registered - */ - private Collection> discoverServices(ServiceMap resolved, Class serviceClass) { - Collection> serviceFactories = new ArrayList<>(); - for (ServiceFactory factory : ServiceLocator.getServiceFactories(serviceLoader)) { - final Class factoryServiceType = factory.getServiceType(); - if (serviceClass.isAssignableFrom(factoryServiceType) && !factory.getClass().isAnnotationPresent(ServiceFactory.RequiresConfiguration.class)) { - if (provided.contains(factoryServiceType) || resolved.contains(factoryServiceType)) { - // Can have only one service registered under a concrete type - continue; - } - @SuppressWarnings("unchecked") - ServiceFactory serviceFactory = (ServiceFactory) factory; - serviceFactories.add(serviceFactory); - } - } - return serviceFactories; - } - } - - private static Collection> getAllInterfaces(final Class clazz) { - ArrayList> interfaces = new ArrayList<>(); - for (Class c = clazz; c != null; c = c.getSuperclass()) { - for (Class i : c.getInterfaces()) { - interfaces.add(i); - interfaces.addAll(getAllInterfaces(i)); - } - } - return interfaces; - } - - private static Set> identifyImmediateDependenciesOf(final Class clazz) { - if (clazz == null) { - return emptySet(); - } - - Set> dependencies = new HashSet<>(); - final ServiceDependencies annotation = clazz.getAnnotation(ServiceDependencies.class); - if (annotation != null) { - for (final Class dependency : annotation.value()) { - if (Service.class.isAssignableFrom(dependency)) { - @SuppressWarnings("unchecked") - Class serviceDependency = (Class) dependency; - dependencies.add(serviceDependency); - } else { - throw new IllegalStateException("Service dependency declared by " + clazz.getName() + - " is not a Service: " + dependency.getName()); - } - } - } - - for (Class interfaceClazz : clazz.getInterfaces()) { - if (Service.class.isAssignableFrom(interfaceClazz)) { - dependencies.addAll(identifyImmediateDependenciesOf(Service.class.getClass().cast(interfaceClazz))); - } - } - - dependencies.addAll(identifyImmediateDependenciesOf(clazz.getSuperclass())); - - return dependencies; - } - - private static Set> identifyTransitiveDependenciesOf(final Class clazz) { - Set> transitive = new HashSet<>(); - - Set> dependencies = identifyImmediateDependenciesOf(clazz); - transitive.addAll(dependencies); - - for (Class klazz : dependencies) { - transitive.addAll(identifyTransitiveDependenciesOf(klazz)); - } - - return transitive; - } - - @SuppressWarnings("unchecked") - private static Iterable> getServiceFactories(@SuppressWarnings("rawtypes") ServiceLoader serviceFactory) { - List> list = new ArrayList<>(); - for (ServiceFactory factory : serviceFactory) { - list.add((ServiceFactory)factory); - } - return list; - } - - private static class DependencyException extends Exception { - public DependencyException(String s) { - super(s); - } - } - - private static class ServiceMap { - - private final Map, Set> services; - - public ServiceMap(ServiceMap resolved) { - this.services = new HashMap<>(); - for (Map.Entry, Set> e : resolved.services.entrySet()) { - Set copy = newSetFromMap(new IdentityHashMap()); - copy.addAll(e.getValue()); - this.services.put(e.getKey(), copy); - } - } - - public ServiceMap() { - this.services = new HashMap<>(); - } - - public Set get(Class serviceType) { - @SuppressWarnings("unchecked") - Set s = (Set) services.get(serviceType); - if (s == null) { - return emptySet(); - } else { - return unmodifiableSet(s); - } - } - - public ServiceMap addAll(Iterable services) { - for (Service s : services) { - add(s); - } - return this; - } - - public ServiceMap add(Service service) { - Set> serviceClazzes = new HashSet<>(); - - serviceClazzes.add(service.getClass()); - for (Class i : getAllInterfaces(service.getClass())) { - if (Service.class != i && Service.class.isAssignableFrom(i)) { - - @SuppressWarnings("unchecked") - Class serviceClass = (Class) i; - - serviceClazzes.add(serviceClass); - } - } - - /* - * Register the concrete service under all Service subtypes it implements. If - * the Service subtype is annotated with @PluralService, permit multiple registrations; - * otherwise, fail the registration, - */ - for (Class serviceClazz : serviceClazzes) { - if (serviceClazz.isAnnotationPresent(PluralService.class)) { - // Permit multiple registrations - Set registeredServices = services.get(serviceClazz); - if (registeredServices == null) { - registeredServices = new LinkedHashSet<>(); - services.put(serviceClazz, registeredServices); - } - registeredServices.add(service); - } else { - // Only a single registration permitted - Set registeredServices = services.get(serviceClazz); - if (registeredServices == null || registeredServices.isEmpty()) { - services.put(serviceClazz, singleton(service)); - } else if (!registeredServices.contains(service)) { - final StringBuilder message = new StringBuilder("Duplicate service implementation(s) found for ") - .append(service.getClass()); - for (Class serviceClass : serviceClazzes) { - if (!serviceClass.isAnnotationPresent(PluralService.class)) { - Set s = this.services.get(serviceClass); - final Service declaredService = s == null ? null : s.iterator().next(); - if (declaredService != null) { - message - .append("\n\t\t- ") - .append(serviceClass) - .append(" already has ") - .append(declaredService.getClass()); - } - } - } - throw new IllegalStateException(message.toString()); - } - } - } - return this; - } - - public Set all() { - Set all = newSetFromMap(new IdentityHashMap()); - for (Set s : services.values()) { - all.addAll(s); - } - return unmodifiableSet(all); - } - - public boolean contains(Class request) { - return services.containsKey(request); - } - } -} diff --git a/core/src/main/java/org/ehcache/core/internal/store/StoreConfigurationImpl.java b/core/src/main/java/org/ehcache/core/internal/store/StoreConfigurationImpl.java deleted file mode 100644 index a9265f23c6..0000000000 --- a/core/src/main/java/org/ehcache/core/internal/store/StoreConfigurationImpl.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.store; - -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePools; -import org.ehcache.expiry.Expiry; -import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.serialization.Serializer; - -/** - * Implementation of the {@link org.ehcache.core.spi.store.Store.Configuration store configuration interface} as used by - * {@link org.ehcache.core.EhcacheManager EhcacheManager} in order to prepare {@link Store} creation. - */ -public class StoreConfigurationImpl implements Store.Configuration { - - private final Class keyType; - private final Class valueType; - private final EvictionAdvisor evictionAdvisor; - private final ClassLoader classLoader; - private final Expiry expiry; - private final ResourcePools resourcePools; - private final Serializer keySerializer; - private final Serializer valueSerializer; - private final int dispatcherConcurrency; - - /** - * Creates a new {@code StoreConfigurationImpl} based on the provided parameters. - * - * @param cacheConfig the cache configuration - * @param dispatcherConcurrency the level of concurrency for ordered events - * @param keySerializer the key serializer - * @param valueSerializer the value serializer - */ - public StoreConfigurationImpl(CacheConfiguration cacheConfig, int dispatcherConcurrency, - Serializer keySerializer, Serializer valueSerializer) { - this(cacheConfig.getKeyType(), cacheConfig.getValueType(), cacheConfig.getEvictionAdvisor(), - cacheConfig.getClassLoader(), cacheConfig.getExpiry(), cacheConfig.getResourcePools(), - dispatcherConcurrency, keySerializer, valueSerializer); - } - - /** - * Creates a new {@code StoreConfigurationImpl} based on the provided parameters. - * - * @param keyType the key type - * @param valueType the value type - * @param evictionAdvisor the eviction advisor - * @param classLoader the class loader - * @param expiry the expiry policy - * @param resourcePools the resource pools - * @param dispatcherConcurrency the level of concurrency for ordered events - * @param keySerializer the key serializer - * @param valueSerializer the value serializer - */ - public StoreConfigurationImpl(Class keyType, Class valueType, - EvictionAdvisor evictionAdvisor, - ClassLoader classLoader, Expiry expiry, - ResourcePools resourcePools, int dispatcherConcurrency, - Serializer keySerializer, Serializer valueSerializer) { - this.keyType = keyType; - this.valueType = valueType; - this.evictionAdvisor = evictionAdvisor; - this.classLoader = classLoader; - this.expiry = expiry; - this.resourcePools = resourcePools; - this.keySerializer = keySerializer; - this.valueSerializer = valueSerializer; - this.dispatcherConcurrency = dispatcherConcurrency; - } - - /** - * {@inheritDoc} - */ - @Override - public Class getKeyType() { - return keyType; - } - - /** - * {@inheritDoc} - */ - @Override - public Class getValueType() { - return valueType; - } - - /** - * {@inheritDoc} - */ - @Override - public EvictionAdvisor getEvictionAdvisor() { - return evictionAdvisor; - } - - /** - * {@inheritDoc} - */ - @Override - public ClassLoader getClassLoader() { - return this.classLoader; - } - - /** - * {@inheritDoc} - */ - @Override - public Expiry getExpiry() { - return expiry; - } - - /** - * {@inheritDoc} - */ - @Override - public ResourcePools getResourcePools() { - return resourcePools; - } - - /** - * {@inheritDoc} - */ - @Override - public Serializer getKeySerializer() { - return keySerializer; - } - - /** - * {@inheritDoc} - */ - @Override - public Serializer getValueSerializer() { - return valueSerializer; - } - - /** - * {@inheritDoc} - */ - @Override - public int getDispatcherConcurrency() { - return dispatcherConcurrency; - } -} diff --git a/core/src/main/java/org/ehcache/core/internal/store/StoreSupport.java b/core/src/main/java/org/ehcache/core/internal/store/StoreSupport.java deleted file mode 100644 index 8c42f44273..0000000000 --- a/core/src/main/java/org/ehcache/core/internal/store/StoreSupport.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.store; - -import org.ehcache.config.ResourceType; -import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Set; - -/** - * Defines methods supporting working with {@link Store} implementations. - */ -public final class StoreSupport { - /** - * Private, niladic constructor to prevent instantiation. - */ - private StoreSupport() { - } - - /** - * Chooses a {@link org.ehcache.core.spi.store.Store.Provider Store.Provider} from those - * available through the {@link ServiceLocator} that best supports the resource types and - * service configurations provided. This method relies on the - * {@link Store.Provider#rank(Set, Collection) Store.Provider.rank} method in making the - * selection. - * - * @param serviceProvider the {@code ServiceProvider} instance to use - * @param resourceTypes the set of {@code ResourceType}s that must be supported by the provider - * @param serviceConfigs the collection of {@code ServiceConfiguration}s used to influence the - * selection - * - * @return the non-{@code null} {@code Store.Provider} implementation chosen - * - * @throws IllegalStateException if no suitable {@code Store.Provider} is available or if - * multiple {@code Store.Provider} implementations return the same top ranking - */ - public static Store.Provider selectStoreProvider( - final ServiceProvider serviceProvider, final Set> resourceTypes, final Collection> serviceConfigs) { - - final Collection storeProviders = serviceProvider.getServicesOfType(Store.Provider.class); - int highRank = 0; - List rankingProviders = new ArrayList<>(); - for (final Store.Provider provider : storeProviders) { - int rank = provider.rank(resourceTypes, serviceConfigs); - if (rank > highRank) { - highRank = rank; - rankingProviders.clear(); - rankingProviders.add(provider); - } else if (rank != 0 && rank == highRank) { - rankingProviders.add(provider); - } - } - - if (rankingProviders.isEmpty()) { - final StringBuilder sb = new StringBuilder("No Store.Provider found to handle configured resource types "); - sb.append(resourceTypes); - sb.append(" from "); - formatStoreProviders(storeProviders, sb); - throw new IllegalStateException(sb.toString()); - } else if (rankingProviders.size() > 1) { - final StringBuilder sb = new StringBuilder("Multiple Store.Providers found to handle configured resource types "); - sb.append(resourceTypes); - sb.append(": "); - formatStoreProviders(rankingProviders, sb); - throw new IllegalStateException(sb.toString()); - } - - return rankingProviders.get(0); - } - - private static StringBuilder formatStoreProviders(final Collection storeProviders, final StringBuilder sb) { - sb.append('{'); - boolean prependSeparator = false; - for (final Store.Provider provider : storeProviders) { - if (prependSeparator) { - sb.append(", "); - } else { - prependSeparator = true; - } - sb.append(provider.getClass().getName()); - } - sb.append('}'); - return sb; - } -} diff --git a/core/src/main/java/org/ehcache/core/internal/util/ClassLoading.java b/core/src/main/java/org/ehcache/core/internal/util/ClassLoading.java deleted file mode 100644 index aec4cd8128..0000000000 --- a/core/src/main/java/org/ehcache/core/internal/util/ClassLoading.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.util; - -import java.io.IOException; -import java.net.URL; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.Enumeration; -import java.util.ServiceLoader; - -public class ClassLoading { - - private static final ClassLoader DEFAULT_CLASSLOADER; - - static { - DEFAULT_CLASSLOADER = AccessController.doPrivileged((PrivilegedAction) DefaultClassLoader::new); - } - - public static ClassLoader getDefaultClassLoader() { - return DEFAULT_CLASSLOADER; - } - - public static ServiceLoader libraryServiceLoaderFor(Class serviceType) { - return ServiceLoader.load(serviceType, ClassLoading.class.getClassLoader()); - } - - private static class DefaultClassLoader extends ClassLoader { - private static final ClassLoader THIS_LOADER = DefaultClassLoader.class.getClassLoader(); - - @Override - public Class loadClass(String name) throws ClassNotFoundException { - ClassLoader loader = Thread.currentThread().getContextClassLoader(); - - if (loader != null) { - try { - return loader.loadClass(name); - } catch (ClassNotFoundException cnfe) { - // - } - } - - return THIS_LOADER.loadClass(name); - } - - @Override - public URL getResource(String name) { - ClassLoader loader = Thread.currentThread().getContextClassLoader(); - - if (loader != null) { - URL res = loader.getResource(name); - if (res != null) { - return res; - } - } - - return THIS_LOADER.getResource(name); - } - - @Override - public Enumeration getResources(String name) throws IOException { - ClassLoader loader = Thread.currentThread().getContextClassLoader(); - - if (loader != null) { - Enumeration resources = loader.getResources(name); - if (resources != null && resources.hasMoreElements()) { - return resources; - } - } - - return THIS_LOADER.getResources(name); - } - } -} diff --git a/core/src/main/java/org/ehcache/core/internal/util/Functions.java b/core/src/main/java/org/ehcache/core/internal/util/Functions.java deleted file mode 100644 index 472ea3c5ac..0000000000 --- a/core/src/main/java/org/ehcache/core/internal/util/Functions.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.util; - -import java.util.function.BiFunction; -import java.util.function.Function; - -/** - * A set of utilities methods and Classes around Functions - * - * @author Alex Snaps - */ -public class Functions { - - /** - * Will transform the passed in {@link Function} in to an apply once and only once Function. - * Irrespectively of the argument passed in! And isn't thread safe. Basically acts as a dumb cache. - * - * @param f the function to memoize - * @param the function's input param type - * @param the function's output type - * @return the memoized function - */ - public static Function memoize(Function f) { - return new MemoizingFunction<>(f); - } - - /** - * Will transform the passed in {@link BiFunction} in to an apply once and only once BiFunction. - * Irrespectively of the arguments passed in! And isn't thread safe. Basically acts as a dumb cache. - * - * @param f the bifunction to memoize - * @param the bifunction's first input param type - * @param the bifunction's second input param type - * @param the function's output type - * @return the memoized bifunction - */ - public static BiFunction memoize(BiFunction f) { - return new MemoizingBiFunction<>(f); - } - - private static final class MemoizingFunction implements Function { - - private final Function function; - private boolean computed; - private T value; - - private MemoizingFunction(final Function function) { - this.function = function; - } - - @Override - public T apply(final A a) { - if (computed) { - return value; - } - value = function.apply(a); - computed = true; - return value; - } - } - - private static final class MemoizingBiFunction implements BiFunction { - - private final BiFunction function; - private boolean computed; - private T value; - - private MemoizingBiFunction(final BiFunction function) { - this.function = function; - } - - @Override - public T apply(final A a, final B b) { - if (computed) { - return value; - } - computed = true; - value = function.apply(a, b); - return value; - } - } -} diff --git a/core/src/main/java/org/ehcache/core/internal/util/ValueSuppliers.java b/core/src/main/java/org/ehcache/core/internal/util/ValueSuppliers.java deleted file mode 100644 index 1beeb2d5fe..0000000000 --- a/core/src/main/java/org/ehcache/core/internal/util/ValueSuppliers.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.util; - -import org.ehcache.ValueSupplier; - -/** - * Utility for creating basic {@link ValueSupplier} instances - */ -public class ValueSuppliers { - - /** - * Returns a basic {@link ValueSupplier} that serves the value passed in - * - * @param value the value to hold - * @param the value type - * @return a value supplier with the given value - */ - public static ValueSupplier supplierOf(final V value) { - return () -> value; - } - - private ValueSuppliers() { - // Not instantiable - } -} diff --git a/core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java b/core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java deleted file mode 100644 index db48eccfa5..0000000000 --- a/core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.spi.service; - -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceCreationConfiguration; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import static java.lang.annotation.RetentionPolicy.RUNTIME; - -/** - * A factory abstraction that can create {@link Service} instances. - */ -public interface ServiceFactory { - - /** - * Creates an instance of the service using the passed in {@link ServiceCreationConfiguration}. - *

- * Note that a {@code null} configuration may be supported or even required by a service implementation. - * - * @param configuration the creation configuration, can be {@code null} for some services - * @return the new service, not {@link Service#start(ServiceProvider) started} - */ - T create(ServiceCreationConfiguration configuration); - - /** - * Queries a {@code ServiceFactory} to know which {@link Service} type it produces. - * - * @return the class of the produced service. - */ - Class getServiceType(); - - - @Retention(RUNTIME) - @Target(ElementType.TYPE) - @interface RequiresConfiguration { - - } -} diff --git a/core/src/main/java/org/ehcache/core/spi/service/ServiceUtils.java b/core/src/main/java/org/ehcache/core/spi/service/ServiceUtils.java deleted file mode 100644 index 120fcb7561..0000000000 --- a/core/src/main/java/org/ehcache/core/spi/service/ServiceUtils.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.spi.service; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; - -/** - * ServiceUtils - */ -public class ServiceUtils { - - private ServiceUtils() { - // No instance possible - } - - public static Collection findAmongst(Class clazz, Collection instances) { - return findAmongst(clazz, instances.toArray()); - } - - public static Collection findAmongst(Class clazz, Object ... instances) { - Collection matches = new ArrayList<>(); - for (Object instance : instances) { - if (instance != null && clazz.isAssignableFrom(instance.getClass())) { - matches.add(clazz.cast(instance)); - } - } - return Collections.unmodifiableCollection(matches); - } - - public static T findSingletonAmongst(Class clazz, Collection instances) { - return findSingletonAmongst(clazz, instances.toArray()); - } - - public static T findSingletonAmongst(Class clazz, Object ... instances) { - final Collection matches = findAmongst(clazz, instances); - if (matches.isEmpty()) { - return null; - } else if (matches.size() == 1) { - return matches.iterator().next(); - } else { - throw new IllegalArgumentException("More than one " + clazz.getName() + " found"); - } - } -} diff --git a/core/src/main/java/org/ehcache/core/spi/service/StatisticsService.java b/core/src/main/java/org/ehcache/core/spi/service/StatisticsService.java deleted file mode 100644 index eb30fadedf..0000000000 --- a/core/src/main/java/org/ehcache/core/spi/service/StatisticsService.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.spi.service; - -import org.ehcache.core.statistics.CacheStatistics; -import org.ehcache.spi.service.Service; - -/** - * Service providing raw statistics for cache and tier usage. - */ -public interface StatisticsService extends Service { - - CacheStatistics getCacheStatistics(String cacheName); -} diff --git a/core/src/main/java/org/ehcache/core/spi/store/AbstractValueHolder.java b/core/src/main/java/org/ehcache/core/spi/store/AbstractValueHolder.java deleted file mode 100644 index b4ec5080ff..0000000000 --- a/core/src/main/java/org/ehcache/core/spi/store/AbstractValueHolder.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.spi.store; - -import org.ehcache.expiry.Duration; - -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLongFieldUpdater; - -import static java.lang.String.format; - -/** - * @author Ludovic Orban - */ -public abstract class AbstractValueHolder implements Store.ValueHolder { - - private static final AtomicLongFieldUpdater HITS_UPDATER = AtomicLongFieldUpdater.newUpdater(AbstractValueHolder.class, "hits"); - private final long id; - private final long creationTime; - private volatile long lastAccessTime; - private volatile long expirationTime; - private volatile long hits; - - private static final AtomicLongFieldUpdater ACCESSTIME_UPDATER = AtomicLongFieldUpdater.newUpdater(AbstractValueHolder.class, "lastAccessTime"); - private static final AtomicLongFieldUpdater EXPIRATIONTIME_UPDATER = AtomicLongFieldUpdater.newUpdater(AbstractValueHolder.class, "expirationTime"); - - protected AbstractValueHolder(long id, long creationTime) { - this(id, creationTime, NO_EXPIRE); - } - - protected AbstractValueHolder(long id, long creationTime, long expirationTime) { - this.id = id; - this.creationTime = creationTime; - this.expirationTime = expirationTime; - this.lastAccessTime = creationTime; - } - - protected abstract TimeUnit nativeTimeUnit(); - - @Override - public long creationTime(TimeUnit unit) { - return unit.convert(creationTime, nativeTimeUnit()); - } - - public void setExpirationTime(long expirationTime, TimeUnit unit) { - if (expirationTime == NO_EXPIRE) { - updateExpirationTime(NO_EXPIRE); - } else if (expirationTime <= 0) { - throw new IllegalArgumentException("invalid expiration time: " + expirationTime); - } else { - updateExpirationTime(nativeTimeUnit().convert(expirationTime, unit)); - } - } - - private void updateExpirationTime(long update) { - while (true) { - long current = this.expirationTime; - if (current >= update) { - break; - } - if (EXPIRATIONTIME_UPDATER.compareAndSet(this, current, update)) { - break; - } - }; - } - - public void accessed(long now, Duration expiration) { - final TimeUnit timeUnit = nativeTimeUnit(); - if (expiration != null) { - if (expiration.isInfinite()) { - setExpirationTime(Store.ValueHolder.NO_EXPIRE, null); - } else { - long millis = timeUnit.convert(expiration.getLength(), expiration.getTimeUnit()); - long newExpirationTime ; - if (millis == Long.MAX_VALUE) { - newExpirationTime = Long.MAX_VALUE; - } else { - newExpirationTime = now + millis; - if (newExpirationTime < 0) { - newExpirationTime = Long.MAX_VALUE; - } - } - setExpirationTime(newExpirationTime, timeUnit); - } - } - setLastAccessTime(now, timeUnit); - HITS_UPDATER.getAndIncrement(this); - } - - @Override - public long expirationTime(TimeUnit unit) { - final long expire = this.expirationTime; - if (expire == NO_EXPIRE) { - return NO_EXPIRE; - } - return unit.convert(expire, nativeTimeUnit()); - } - - @Override - public boolean isExpired(long expirationTime, TimeUnit unit) { - final long expire = this.expirationTime; - if (expire == NO_EXPIRE) { - return false; - } - return expire <= nativeTimeUnit().convert(expirationTime, unit); - } - - @Override - public long lastAccessTime(TimeUnit unit) { - return unit.convert(lastAccessTime, nativeTimeUnit()); - } - - public void setLastAccessTime(long lastAccessTime, TimeUnit unit) { - long update = unit.convert(lastAccessTime, nativeTimeUnit()); - while (true) { - long current = this.lastAccessTime; - if (current >= update) { - break; - } - if (ACCESSTIME_UPDATER.compareAndSet(this, current, update)) { - break; - } - }; - } - - @Override - public int hashCode() { - int result = 1; - result = 31 * result + (int)(creationTime ^ (creationTime >>> 32)); - result = 31 * result + (int)(lastAccessTime ^ (lastAccessTime >>> 32)); - result = 31 * result + (int)(expirationTime ^ (expirationTime >>> 32)); - return result; - } - - @Override - public boolean equals(Object obj) { - if (obj instanceof AbstractValueHolder) { - AbstractValueHolder other = (AbstractValueHolder) obj; - return - other.creationTime(nativeTimeUnit()) == creationTime && creationTime(other.nativeTimeUnit()) == other.creationTime && - other.expirationTime(nativeTimeUnit()) == expirationTime && expirationTime(other.nativeTimeUnit()) == other.expirationTime && - other.lastAccessTime(nativeTimeUnit()) == lastAccessTime && lastAccessTime(other.nativeTimeUnit()) == other.lastAccessTime; - } - return false; - } - - @Override - public float hitRate(long now, TimeUnit unit) { - final long endTime = TimeUnit.NANOSECONDS.convert(now, TimeUnit.MILLISECONDS); - final long startTime = TimeUnit.NANOSECONDS.convert(creationTime, nativeTimeUnit()); - float duration = (endTime - startTime)/(float)TimeUnit.NANOSECONDS.convert(1, unit); - return (hits/duration); - } - - @Override - public long hits() { - return this.hits; - } - - protected void setHits(long hits) { - HITS_UPDATER.set(this, hits); - } - - @Override - public long getId() { - return id; - } - - @Override - public String toString() { - return format("%s", value()); - } -} diff --git a/core/src/main/java/org/ehcache/core/spi/store/StoreAccessTimeoutException.java b/core/src/main/java/org/ehcache/core/spi/store/StoreAccessTimeoutException.java deleted file mode 100644 index 9bb8505616..0000000000 --- a/core/src/main/java/org/ehcache/core/spi/store/StoreAccessTimeoutException.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.spi.store; - -/** - * Thrown when a cache {@link Store} operation exceeds a configured time limit. - * This exception is not handled by the - * {@link org.ehcache.core.internal.resilience.ResilienceStrategy ResilienceStrategy}. - */ -public class StoreAccessTimeoutException extends RuntimeException { - private static final long serialVersionUID = 7824475930240423944L; - - /** - * Creates a new exception wrapping the {@link Throwable cause} passed in. - * - * @param cause the cause of this exception - */ - public StoreAccessTimeoutException(Throwable cause) { - super(cause); - } - - /** - * Creates a new exception wrapping the {@link Throwable cause} passed in and with the provided message. - * - * @param message information about the exception - * @param cause the cause of this exception - */ - public StoreAccessTimeoutException(String message, Throwable cause) { - super(message, cause); - } -} diff --git a/core/src/main/java/org/ehcache/core/statistics/TypedValueStatistic.java b/core/src/main/java/org/ehcache/core/statistics/TypedValueStatistic.java deleted file mode 100644 index bcad2eb6d0..0000000000 --- a/core/src/main/java/org/ehcache/core/statistics/TypedValueStatistic.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.statistics; - -import org.terracotta.statistics.ValueStatistic; -import org.terracotta.statistics.extended.StatisticType; - -/** - * Represent a {@code ValueStatistic} that knows its {@code StatisticType}. - */ -public abstract class TypedValueStatistic implements ValueStatistic { - private final StatisticType type; - - /** - * Type of this value statistic. Can be COUNTER or SIZE. - * - * @param type {@code StatisticType} - */ - public TypedValueStatistic(StatisticType type) { - this.type = type; - } - - public StatisticType getType() { - return type; - } -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveValueTest.java b/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveValueTest.java deleted file mode 100644 index 81e657af2e..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveValueTest.java +++ /dev/null @@ -1,222 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.core; - -import java.util.Collections; -import java.util.EnumSet; - -import org.ehcache.Status; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; -import org.hamcrest.CoreMatchers; -import org.junit.Test; -import org.slf4j.LoggerFactory; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; - -/** - * @author Abhilash - * - */ -public class EhcacheBasicRemoveValueTest extends EhcacheBasicCrudBase { - - @Test - public void testRemoveNullNull() { - final Ehcache ehcache = this.getEhcache(); - - try { - ehcache.remove(null, null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testRemoveKeyNull() throws Exception { - final Ehcache ehcache = this.getEhcache(); - - try { - ehcache.remove("key", null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testRemoveNullValue() throws Exception { - final Ehcache ehcache = this.getEhcache(); - - try { - ehcache.remove(null, "value"); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - /** - * Tests the effect of a {@link Ehcache#remove(Object, Object)} for - *

- */ - @Test - public void testRemoveValueNoStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final Ehcache ehcache = this.getEhcache(); - - assertFalse(ehcache.remove("key", "value")); - verify(this.store).remove(eq("key"), eq("value")); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_MISSING)); - } - - /** - * Tests the effect of a {@link Ehcache#remove(Object, Object)} for - *
    - *
  • key with unequal value in {@code Store}
  • - *
- */ - @Test - public void testRemoveValueUnequalStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - - final Ehcache ehcache = this.getEhcache(); - - assertFalse(ehcache.remove("key", "value")); - verify(this.store).remove(eq("key"), eq("value")); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("unequalValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); - } - - /** - * Tests the effect of a {@link Ehcache#remove(Object, Object)} for - *
    - *
  • key with equal value in {@code Store}
  • - *
- */ - @Test - public void testRemoveValueEqualStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - - final Ehcache ehcache = this.getEhcache(); - - assertTrue(ehcache.remove("key", "value")); - verify(this.store).remove(eq("key"), eq("value")); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link Ehcache#remove(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • >{@code Store.remove} throws
  • - *
- */ - @Test - public void testRemoveValueNoStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).remove(eq("key"), eq("value")); - - final Ehcache ehcache = this.getEhcache(); - - ehcache.remove("key", "value"); - verify(this.store).remove(eq("key"), eq("value")); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link Ehcache#remove(Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • >{@code Store.remove} throws
  • - *
- */ - @Test - public void testRemoveValueUnequalStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).remove(eq("key"), eq("value")); - - final Ehcache ehcache = this.getEhcache(); - - ehcache.remove("key", "value"); - verify(this.store).remove(eq("key"), eq("value")); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link Ehcache#remove(Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • >{@code Store.remove} throws
  • - *
- */ - @Test - public void testRemoveValueEqualStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).remove(eq("key"), eq("value")); - - final Ehcache ehcache = this.getEhcache(); - - ehcache.remove("key", "value"); - verify(this.store).remove(eq("key"), eq("value")); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Gets an initialized {@link Ehcache Ehcache} instance - * - * @return a new {@code Ehcache} instance - */ - private Ehcache getEhcache() { - final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, cacheEventDispatcher, LoggerFactory - .getLogger(Ehcache.class + "-" + "EhcacheBasicRemoveValueTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheLoaderWriterTest.java b/core/src/test/java/org/ehcache/core/EhcacheLoaderWriterTest.java deleted file mode 100644 index 05c29dfe12..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheLoaderWriterTest.java +++ /dev/null @@ -1,417 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.config.ResourcePoolsHelper; -import org.ehcache.core.events.CacheEventDispatcher; -import org.ehcache.core.exceptions.StorePassThroughException; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheLoadingException; -import org.ehcache.spi.loaderwriter.CacheWritingException; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.junit.Before; -import org.junit.Test; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; -import org.slf4j.LoggerFactory; - -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; - -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; - - -/** - * @author vfunshteyn - */ -public class EhcacheLoaderWriterTest { - private EhcacheWithLoaderWriter cache; - private Store store; - - @SuppressWarnings("unchecked") - @Before - public void setUp() throws Exception { - store = mock(Store.class); - CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); - final CacheConfiguration config = new BaseCacheConfiguration<>(Number.class, String.class, null, - null, null, ResourcePoolsHelper.createHeapOnlyPools()); - CacheEventDispatcher notifier = mock(CacheEventDispatcher.class); - cache = new EhcacheWithLoaderWriter<>( - config, store, loaderWriter, notifier, LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheLoaderWriterTest")); - cache.init(); - } - - @Test - public void testGet() throws Exception { - when(store.computeIfAbsent(any(Number.class), anyFunction())).thenAnswer(invocation -> { - Function function = asFunction(invocation); - function.apply((Number)invocation.getArguments()[0]); - return null; - }); - cache.get(1); - verify(cache.getCacheLoaderWriter()).load(1); - } - - @Test - public void testGetThrowsOnCompute() throws Exception { - when(store.computeIfAbsent(any(Number.class), anyFunction())).thenThrow(new StoreAccessException("boom")); - String expected = "foo"; - when((String)cache.getCacheLoaderWriter().load(any(Number.class))).thenReturn(expected); - assertThat(cache.get(1), is(expected)); - verify(store).remove(1); - } - - @Test(expected=CacheLoadingException.class) - public void testGetThrowsOnLoad() throws Exception { - when(store.computeIfAbsent(any(Number.class), anyFunction())).thenAnswer(invocation -> { - Function function = asFunction(invocation); - try { - function.apply((Number)invocation.getArguments()[0]); - } catch (StorePassThroughException e) { - throw e.getCause(); - } - return null; - }); - when(cache.getCacheLoaderWriter().load(any(Number.class))).thenThrow(new Exception()); - cache.get(1); - } - - @Test - public void testPut() throws Exception { - when(store.compute(any(Number.class), anyBiFunction())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - function.apply((Number)invocation.getArguments()[0], null); - return null; - }); - cache.put(1, "one"); - verify(cache.getCacheLoaderWriter()).write(1, "one"); - } - - @Test - public void testPutThrowsOnCompute() throws Exception { - when(store.compute(any(Number.class), anyBiFunction())).thenThrow(new StoreAccessException("boom")); - cache.put(1, "one"); - verify(store).remove(1); - verify(cache.getCacheLoaderWriter()).write(1, "one"); - } - - @Test(expected=CacheWritingException.class) - public void testPutThrowsOnWrite() throws Exception { - when(store.compute(any(Number.class), anyBiFunction())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - try { - function.apply((Number)invocation.getArguments()[0], null); - } catch (StorePassThroughException e) { - throw e.getCause(); - } - return null; - }); - doThrow(new Exception()).when(cache.getCacheLoaderWriter()).write(any(Number.class), anyString()); - cache.put(1, "one"); - } - - @Test - public void testRemove() throws Exception { - when(store.compute(any(Number.class), anyBiFunction())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - function.apply((Number)invocation.getArguments()[0], null); - return null; - }); - cache.remove(1); - verify(cache.getCacheLoaderWriter()).delete(1); - } - - @Test - public void testRemoveThrowsOnCompute() throws Exception { - when(store.compute(any(Number.class), anyBiFunction())).thenThrow(new StoreAccessException("boom")); - cache.remove(1); - verify(store).remove(1); - verify(cache.getCacheLoaderWriter()).delete(1); - } - - @Test(expected=CacheWritingException.class) - public void testRemoveThrowsOnWrite() throws Exception { - when(store.compute(any(Number.class), anyBiFunction())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - try { - function.apply((Number)invocation.getArguments()[0], null); - } catch (StorePassThroughException e) { - throw e.getCause(); - } - return null; - }); - doThrow(new Exception()).when(cache.getCacheLoaderWriter()).delete(any(Number.class)); - cache.remove(1); - } - - @Test - public void testPutIfAbsent_present() throws Exception { - when(store.computeIfAbsent(any(Number.class), anyFunction())).thenAnswer(invocation -> { - Function function = asFunction(invocation); - Number key = (Number) invocation.getArguments()[0]; - if (!key.equals(1)) { - function.apply(key); - } - return null; - }); - - cache.putIfAbsent(1, "foo"); - verifyZeroInteractions(cache.getCacheLoaderWriter()); - } - - @Test - public void testPutIfAbsent_absent() throws Exception { - when(store.computeIfAbsent(any(Number.class), anyFunction())).thenAnswer(invocation -> { - Function function = asFunction(invocation); - Number key = (Number) invocation.getArguments()[0]; - function.apply(key); - return null; - }); - - cache.putIfAbsent(1, "foo"); - verify(cache.getCacheLoaderWriter()).load(1); - verify(cache.getCacheLoaderWriter()).write(1, "foo"); - } - - @Test - public void testPutIfAbsentThrowsOnCompute() throws Exception { - when(store.computeIfAbsent(any(Number.class), anyFunction())).thenThrow(new StoreAccessException("boom")); - cache.putIfAbsent(1, "one"); - verify(cache.getCacheLoaderWriter()).write(1, "one"); - verify(store).remove(1); - } - - @Test(expected=CacheWritingException.class) - public void testPutIfAbsentThrowsOnWrite() throws Exception { - when(store.computeIfAbsent(any(Number.class), anyFunction())).thenAnswer(invocation -> { - Function function = asFunction(invocation); - try { - function.apply((Number)invocation.getArguments()[0]); - } catch (StorePassThroughException e) { - throw e.getCause(); - } - return null; - }); - doThrow(new Exception()).when(cache.getCacheLoaderWriter()).write(any(Number.class), anyString()); - cache.putIfAbsent(1, "one"); - } - - @Test - public void testTwoArgRemoveMatch() throws Exception { - final String cachedValue = "cached"; - when(store.compute(any(Number.class), anyBiFunction(), anySupplier())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - function.apply((Number)invocation.getArguments()[0], cachedValue); - return null; - }); - assertThat(cache.remove(1, cachedValue), is(true)); - verify(cache.getCacheLoaderWriter()).delete(1); - } - - @Test - public void testTwoArgRemoveKeyNotInCache() throws Exception { - when(store.compute(any(Number.class), anyBiFunction())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - function.apply((Number)invocation.getArguments()[0], null); - return null; - }); - String toRemove = "foo"; - assertThat(cache.remove(1, toRemove), is(false)); - verify(cache.getCacheLoaderWriter(), never()).delete(1); - } - - @Test - public void testTwoArgRemoveWriteUnsuccessful() throws Exception { - when(store.compute(any(Number.class), anyBiFunction(), anySupplier())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - function.apply((Number)invocation.getArguments()[0], null); - return null; - }); - String toRemove = "foo"; - assertThat(cache.remove(1, toRemove), is(false)); - verify(cache.getCacheLoaderWriter(), never()).delete(1); - - } - - @Test - public void testTwoArgRemoveThrowsOnCompute() throws Exception { - String toRemove = "foo"; - when(store.compute(any(Number.class), anyBiFunction(), anySupplier())).thenThrow(new StoreAccessException("boom")); - assertThat(cache.remove(1, toRemove), is(false)); - verify(cache.getCacheLoaderWriter(), never()).delete(1); - verify(store).remove(1); - } - - @Test(expected=CacheWritingException.class) - public void testTwoArgRemoveThrowsOnWrite() throws Exception { - final String expected = "foo"; - when(store.compute(any(Number.class), anyBiFunction(), anySupplier())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - try { - function.apply((Number)invocation.getArguments()[0], expected); - } catch (StorePassThroughException e) { - throw e.getCause(); - } - return null; - }); - doThrow(new Exception()).when(cache.getCacheLoaderWriter()).delete(any(Number.class)); - cache.remove(1, expected); - } - - @Test - public void testReplace() throws Exception { - final String oldValue = "foo"; - final String newValue = "bar"; - when(store.compute(any(Number.class), anyBiFunction())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - function.apply((Number)invocation.getArguments()[0], oldValue); - return null; - }); - when((String)cache.getCacheLoaderWriter().load(any(Number.class))).thenReturn(oldValue); - - assertThat(cache.replace(1, newValue), is(oldValue)); - verify(cache.getCacheLoaderWriter()).write(1, newValue); - } - - @Test - public void testReplaceThrowsOnCompute() throws Exception { - when(store.compute(any(Number.class), anyBiFunction())).thenThrow(new StoreAccessException("boom")); - String value = "foo"; - assertThat(cache.replace(1, value), nullValue()); - verify(cache.getCacheLoaderWriter()).load(1); - verify(store).remove(1); - } - - @Test(expected=CacheWritingException.class) - public void testReplaceThrowsOnWrite() throws Exception { - final String expected = "old"; - when(store.compute(any(Number.class), anyBiFunction())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - try { - function.apply((Number)invocation.getArguments()[0], expected); - } catch (StorePassThroughException e) { - throw e.getCause(); - } - return null; - }); - when((String)cache.getCacheLoaderWriter().load(any(Number.class))).thenReturn(expected); - doThrow(new Exception()).when(cache.getCacheLoaderWriter()).write(any(Number.class), anyString()); - cache.replace(1, "bar"); - } - - @Test - public void testThreeArgReplaceMatch() throws Exception { - final String cachedValue = "cached"; - final String newValue = "toReplace"; - - when(store.compute(any(Number.class), anyBiFunction(), anySupplier())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - function.apply((Number)invocation.getArguments()[0], cachedValue); - return null; - }); - - assertThat(cache.replace(1, cachedValue, newValue), is(true)); - verify(cache.getCacheLoaderWriter()).write(1, newValue); - } - - @Test - public void testThreeArgReplaceKeyNotInCache() throws Exception { - final String oldValue = "cached"; - final String newValue = "toReplace"; - - when(store.compute(any(Number.class), anyBiFunction())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - function.apply((Number)invocation.getArguments()[0], null); - return null; - }); - - assertThat(cache.replace(1, oldValue, newValue), is(false)); - verify(cache.getCacheLoaderWriter(), never()).write(1, newValue); - } - - @Test - public void testThreeArgReplaceThrowsOnCompute() throws Exception { - final String oldValue = "cached"; - final String newValue = "toReplace"; - when(store.compute(any(Number.class), anyBiFunction(), anySupplier())).thenThrow(new StoreAccessException("boom")); - - assertThat(cache.replace(1, oldValue, newValue), is(false)); - verify(cache.getCacheLoaderWriter(), never()).write(1, newValue); - verify(store).remove(1); - } - - @Test(expected=CacheWritingException.class) - public void testThreeArgReplaceThrowsOnWrite() throws Exception { - when(store.compute(any(Number.class), anyBiFunction(), anySupplier())).thenAnswer(invocation -> { - BiFunction function = asBiFunction(invocation); - final String applied; - try { - applied = function.apply((Number)invocation.getArguments()[0], "old"); - } catch (StorePassThroughException e) { - throw e.getCause(); - } - - @SuppressWarnings("unchecked") - final Store.ValueHolder mock = mock(Store.ValueHolder.class); - - when(mock.value()).thenReturn(applied); - return mock; - }); - doThrow(new Exception()).when(cache.getCacheLoaderWriter()).write(any(Number.class), anyString()); - cache.replace(1, "old", "new"); - } - - @SuppressWarnings("unchecked") - private static BiFunction anyBiFunction() { - return any(BiFunction.class); - } - - @SuppressWarnings("unchecked") - private Function anyFunction() { - return any(Function.class); - } - - @SuppressWarnings("unchecked") - private Supplier anySupplier() { - return any(Supplier.class); - } - - @SuppressWarnings("unchecked") - private static BiFunction asBiFunction(InvocationOnMock in) { - return (BiFunction)in.getArguments()[1]; - } - - @SuppressWarnings("unchecked") - private static Function asFunction(InvocationOnMock in) { - return (Function)in.getArguments()[1]; - } - -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheTest.java b/core/src/test/java/org/ehcache/core/EhcacheTest.java deleted file mode 100644 index dc693431f4..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheTest.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.core; - -import static org.mockito.Mockito.mock; - -import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.config.ResourcePoolsHelper; -import org.ehcache.core.events.CacheEventDispatcher; -import org.ehcache.core.spi.store.Store; -import org.slf4j.LoggerFactory; - -/** - * @author Abhilash - * - */ -public class EhcacheTest extends CacheTest { - - @Override - protected InternalCache getCache(Store store) { - final CacheConfiguration config = new BaseCacheConfiguration<>(Object.class, Object.class, null, - null, null, ResourcePoolsHelper.createHeapOnlyPools()); - @SuppressWarnings("unchecked") - CacheEventDispatcher cacheEventDispatcher = mock(CacheEventDispatcher.class); - return new Ehcache<>(config, store, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheTest")); - } - -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicClearTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicClearTest.java deleted file mode 100644 index d286fb9799..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicClearTest.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.Status; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; -import org.hamcrest.Matchers; -import org.junit.Test; -import org.mockito.Mock; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; - -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; - -/** - * Provides testing of basic CLEAR operations on an {@code EhcacheWithLoaderWriter}. - * - * @author Clifford W. Johnson - */ -public class EhcacheWithLoaderWriterBasicClearTest extends EhcacheBasicCrudBase { - - @Mock - private CacheLoaderWriter cacheLoaderWriter; - - /** - * Tests {@link EhcacheWithLoaderWriter#clear()} over an empty cache. - */ - @Test - public void testClearEmpty() throws Exception { - final FakeStore realStore = new FakeStore(Collections.emptyMap()); - this.store = spy(realStore); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(); - - ehcache.clear(); - verifyZeroInteractions(this.cacheLoaderWriter); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(realStore.getEntryMap().isEmpty(), is(true)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#clear()} over an empty cache where - * {@link Store#clear() Store.clear} throws a - * {@link StoreAccessException StoreAccessException}. - */ - @Test - public void testClearEmptyStoreAccessException() throws Exception { - final FakeStore realStore = new FakeStore(Collections.emptyMap()); - this.store = spy(realStore); - doThrow(new StoreAccessException("")).when(this.store).clear(); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(); - - ehcache.clear(); - verifyZeroInteractions(this.cacheLoaderWriter); - verify(this.spiedResilienceStrategy).clearFailure(any(StoreAccessException.class)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#clear()} over a non-empty cache. - */ - @Test - public void testClearNonEmpty() throws Exception { - final FakeStore realStore = new FakeStore(this.getTestStoreEntries()); - this.store = spy(realStore); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(); - assertThat(realStore.getEntryMap().isEmpty(), is(false)); - - ehcache.clear(); - verifyZeroInteractions(this.cacheLoaderWriter); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(realStore.getEntryMap().isEmpty(), is(true)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#clear()} over a non-empty cache where - * {@link Store#clear() Store.clear} throws a - * {@link StoreAccessException StoreAccessException}. - */ - @Test - public void testClearNonEmptyStoreAccessException() throws Exception { - final FakeStore realStore = new FakeStore(this.getTestStoreEntries()); - this.store = spy(realStore); - doThrow(new StoreAccessException("")).when(this.store).clear(); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(); - assertThat(realStore.getEntryMap().isEmpty(), is(false)); - - ehcache.clear(); - verifyZeroInteractions(this.cacheLoaderWriter); - verify(this.spiedResilienceStrategy).clearFailure(any(StoreAccessException.class)); - // Not testing ResilienceStrategy implementation here - } - - private Map getTestStoreEntries() { - final Map storeEntries = new HashMap<>(); - storeEntries.put("key1", "value1"); - storeEntries.put("keyA", "valueA"); - storeEntries.put("key2", "value2"); - storeEntries.put("keyB", "valueB"); - return storeEntries; - } - - /** - * Gets an initialized {@link EhcacheWithLoaderWriter Ehcache} instance using {@link #cacheLoaderWriter}. - * - * @return a new {@code EhcacheWithLoaderWriter} instance - */ - private EhcacheWithLoaderWriter getEhcache() - throws Exception { - final EhcacheWithLoaderWriter ehcache = - new EhcacheWithLoaderWriter<>(CACHE_CONFIGURATION, this.store, this.cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBasicClearTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicContainsKeyTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicContainsKeyTest.java deleted file mode 100644 index 71f5c518f5..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicContainsKeyTest.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.Status; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; -import org.hamcrest.Matchers; -import org.junit.Test; -import org.mockito.Mock; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; - -/** - * Provides testing of basic CONTAINS_KEY operations on an {@code EhcacheWithLoaderWriter}. - * - * @author Clifford W. Johnson - */ -public class EhcacheWithLoaderWriterBasicContainsKeyTest extends EhcacheBasicCrudBase { - - @Mock - private CacheLoaderWriter cacheLoaderWriter; - - /** - * Tests {@link EhcacheWithLoaderWriter#containsKey(Object) EhcacheWithLoaderWriter.containsKey} with a {@code null} key. - */ - @Test - public void testContainsKeyNull() throws Exception { - final FakeStore realStore = new FakeStore(Collections.emptyMap()); - this.store = spy(realStore); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(); - - try { - ehcache.containsKey(null); - fail(); - } catch (NullPointerException e) { - // Expected - } - verifyZeroInteractions(this.cacheLoaderWriter); - verifyZeroInteractions(this.spiedResilienceStrategy); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#containsKey(Object) EhcacheWithLoaderWriter.containsKey} over an empty cache. - */ - @Test - public void testContainsKeyEmpty() throws Exception { - final FakeStore realStore = new FakeStore(Collections.emptyMap()); - this.store = spy(realStore); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(); - - assertFalse(ehcache.containsKey("key")); - verifyZeroInteractions(this.cacheLoaderWriter); - verifyZeroInteractions(this.spiedResilienceStrategy); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#containsKey(Object) EhcacheWithLoaderWriter.containsKey} over an empty cache - * where {@link Store#containsKey(Object) Store.containsKey} throws a - * {@link StoreAccessException StoreAccessException}. - */ - @Test - public void testContainsKeyEmptyStoreAccessException() throws Exception { - final FakeStore realStore = new FakeStore(Collections.emptyMap()); - this.store = spy(realStore); - doThrow(new StoreAccessException("")).when(this.store).containsKey("key"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(); - - ehcache.containsKey("key"); - verifyZeroInteractions(this.cacheLoaderWriter); - verify(this.spiedResilienceStrategy).containsKeyFailure(eq("key"), any(StoreAccessException.class)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#containsKey(Object) EhcacheWithLoaderWriter.containsKey} over a cache holding - * the target key. - */ - @Test - public void testContainsKeyContains() throws Exception { - final FakeStore realStore = new FakeStore(this.getTestStoreEntries()); - this.store = spy(realStore); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(); - - assertTrue(ehcache.containsKey("keyA")); - verifyZeroInteractions(this.cacheLoaderWriter); - verifyZeroInteractions(this.spiedResilienceStrategy); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#containsKey(Object) EhcacheWithLoaderWriter.containsKey} over a cache holding - * the target key where {@link Store#containsKey(Object) Store.containsKey} - * throws a {@link StoreAccessException StoreAccessException}. - */ - @Test - public void testContainsKeyContainsStoreAccessException() throws Exception { - final FakeStore realStore = new FakeStore(this.getTestStoreEntries()); - this.store = spy(realStore); - doThrow(new StoreAccessException("")).when(this.store).containsKey("keyA"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(); - - ehcache.containsKey("keyA"); - verifyZeroInteractions(this.cacheLoaderWriter); - verify(this.spiedResilienceStrategy).containsKeyFailure(eq("keyA"), any(StoreAccessException.class)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#containsKey(Object) EhcacheWithLoaderWriter.containsKey} over a non-empty cache - * not holding the target key. - */ - @Test - public void testContainsKeyMissing() throws Exception { - final FakeStore realStore = new FakeStore(this.getTestStoreEntries()); - this.store = spy(realStore); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(); - - assertFalse(ehcache.containsKey("missingKey")); - verifyZeroInteractions(this.cacheLoaderWriter); - verifyZeroInteractions(this.spiedResilienceStrategy); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#containsKey(Object) EhcacheWithLoaderWriter.containsKey} over a non-empty cache - * not holding the target key where {@link Store#containsKey(Object) Store.containsKey} - * throws a {@link StoreAccessException StoreAccessException}. - */ - @Test - public void testContainsKeyMissingStoreAccessException() throws Exception { - final FakeStore realStore = new FakeStore(this.getTestStoreEntries()); - this.store = spy(realStore); - doThrow(new StoreAccessException("")).when(this.store).containsKey("missingKey"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(); - - ehcache.containsKey("missingKey"); - verifyZeroInteractions(this.cacheLoaderWriter); - verify(this.spiedResilienceStrategy).containsKeyFailure(eq("missingKey"), any(StoreAccessException.class)); - } - - private Map getTestStoreEntries() { - final Map storeEntries = new HashMap<>(); - storeEntries.put("key1", "value1"); - storeEntries.put("keyA", "valueA"); - storeEntries.put("key2", "value2"); - storeEntries.put("keyB", "valueB"); - return storeEntries; - } - - /** - * Gets an initialized {@link EhcacheWithLoaderWriter Ehcache} instance using {@link #cacheLoaderWriter}. - * - * @return a new {@code EhcacheWithLoaderWriter} instance - */ - private EhcacheWithLoaderWriter getEhcache() - throws Exception { - final EhcacheWithLoaderWriter ehcache = - new EhcacheWithLoaderWriter<>(CACHE_CONFIGURATION, this.store, this.cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBasicContainsKeyTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetAllTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetAllTest.java deleted file mode 100644 index 4a1fdf6504..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetAllTest.java +++ /dev/null @@ -1,3459 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.core; - -import java.util.Collections; -import java.util.EnumSet; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Set; - -import org.ehcache.Status; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.hamcrest.Matchers; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Captor; -import org.mockito.InOrder; -import org.mockito.Mock; -import org.slf4j.LoggerFactory; - -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_A; -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_B; -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_C; -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_D; -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_E; -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_F; -import static org.ehcache.core.EhcacheBasicBulkUtil.TEST_ENTRIES; -import static org.ehcache.core.EhcacheBasicBulkUtil.copyOnly; -import static org.ehcache.core.EhcacheBasicBulkUtil.copyUntil; -import static org.ehcache.core.EhcacheBasicBulkUtil.copyWithout; -import static org.ehcache.core.EhcacheBasicBulkUtil.fanIn; -import static org.ehcache.core.EhcacheBasicBulkUtil.getEntryMap; -import static org.ehcache.core.EhcacheBasicBulkUtil.getNullEntryMap; -import static org.ehcache.core.EhcacheBasicBulkUtil.union; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.atLeast; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; -import static org.ehcache.core.EhcacheBasicGetAllTest.getAnyStringSet; -import static org.ehcache.core.EhcacheBasicGetAllTest.getAnyIterableFunction; -import static org.ehcache.core.EhcacheBasicGetAllTest.validateBulkCounters; - -/** - * Provides testing of basic GET_ALL operations on an {@code EhcacheWithLoaderWriter}. - *

Note

- * The current implementation of {@link EhcacheWithLoaderWriter#getAll(java.util.Set) Ehcache.getAll} - * does not produce partial results while handling a - * {@link StoreAccessException StoreAccessException}; all keys presented - * to {@code getAll} succeed or fail based on the recovery call to - * {@link CacheLoaderWriter#loadAll(Iterable)}. - * - * @author Clifford W. Johnson - */ -public class EhcacheWithLoaderWriterBasicGetAllTest extends EhcacheBasicCrudBase { - - @Mock - private CacheLoaderWriter loaderWriter; - - /** - * A Mockito {@code ArgumentCaptor} for the {@code Set} argument to the - * {@link CacheLoaderWriter#loadAll(Iterable)} - * method. - */ - @Captor - private ArgumentCaptor> loadAllCaptor; - - /** - * A Mockito {@code ArgumentCaptor} for the {@code Set} argument to the - * {@link org.ehcache.core.internal.resilience.ResilienceStrategy#getAllFailure(Iterable, Map, StoreAccessException) - * ResilienceStrategy.getAllFailure(Iterable, Map, StoreAccessException)} method. - */ - @Captor - private ArgumentCaptor> getAllFailureMapCaptor; - - /** - * A Mockito {@code ArgumentCaptor} for the - * {@link BulkCacheLoadingException BulkCacheLoadingException} - * provided to the - * {@link org.ehcache.core.internal.resilience.ResilienceStrategy#getAllFailure(Iterable, StoreAccessException, BulkCacheLoadingException) - * ResilienceStrategy.getAllFailure(Iterable, StoreAccessException, BulkCacheLoadingException)} method. - */ - @Captor - private ArgumentCaptor bulkExceptionCaptor; - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • empty request key set
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
- */ - @Test - public void testGetAllEmptyRequestWithLoader() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(TEST_ENTRIES); - this.loaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Map actual = ehcache.getAll(Collections.emptySet()); - final Map expected = Collections.emptyMap(); - assertThat(actual, equalTo(expected)); - - verify(this.store, never()).bulkComputeIfAbsent(eq(Collections.emptySet()), getAnyIterableFunction()); - verify(this.spiedResilienceStrategy, never()).getAllFailure(eq(Collections.emptySet()), any(StoreAccessException.class)); - verify(this.loaderWriter, never()).loadAll(eq(Collections.emptySet())); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchLoaderAllFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES); - this.loaderWriter = spy(fakeLoader); - doThrow(new Exception("loadAll failed")).when(this.loaderWriter).loadAll(getAnyStringSet()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - try { - ehcache.getAll(KEY_SET_A); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - } - - verify(this.store).bulkComputeIfAbsent(eq(KEY_SET_A), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_B))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(KEY_SET_A)); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreNoMatchLoaderAllFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_A, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - try { - ehcache.getAll(KEY_SET_A); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - } - - verify(this.store).bulkComputeIfAbsent(eq(KEY_SET_A), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_B))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(KEY_SET_A)); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreNoMatchLoaderSomeFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_A, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), Matchers.>equalTo(KEY_SET_C)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - } - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_B))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionBeforeLoaderAllFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES); - this.loaderWriter = spy(fakeLoader); - doThrow(new Exception("loadAll failed")).when(this.loaderWriter).loadAll(getAnyStringSet()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - try { - ehcache.getAll(KEY_SET_A); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(KEY_SET_A), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(KEY_SET_A)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(KEY_SET_A), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses().keySet(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionBeforeLoaderAllFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_A, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - try { - ehcache.getAll(KEY_SET_A); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(KEY_SET_A), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(KEY_SET_A)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(KEY_SET_A), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), Collections. emptySet(), KEY_SET_A); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionBeforeLoaderSomeFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_A, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), Matchers.>equalTo(KEY_SET_C)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), KEY_SET_C, KEY_SET_A); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionAfterLoaderAllFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES); - this.loaderWriter = spy(fakeLoader); - doThrow(new Exception("loadAll failed")).when(this.loaderWriter).loadAll(getAnyStringSet()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - try { - ehcache.getAll(KEY_SET_A); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(KEY_SET_A), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(KEY_SET_A)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(KEY_SET_A), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses().keySet(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fails with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionAfterLoaderAllFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_A, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - try { - ehcache.getAll(KEY_SET_A); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(KEY_SET_A), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(KEY_SET_A)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(KEY_SET_A), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), Collections. emptySet(), KEY_SET_A); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fails with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionAfterLoaderSomeFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_A, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), Matchers.>equalTo(KEY_SET_C)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), KEY_SET_C, KEY_SET_A); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchLoaderNoMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Map actual = ehcache.getAll(KEY_SET_A); - assertThat(actual, equalTo(getNullEntryMap(KEY_SET_A))); - - verify(this.store).bulkComputeIfAbsent(eq(KEY_SET_A), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_B))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(KEY_SET_A)); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, 0, KEY_SET_A.size()); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(java.util.Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • no {@link org.ehcache.spi.loaderwriter.CacheLoaderWriter} entries match
  • - *
  • no {@link org.ehcache.spi.loaderwriter.CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionBeforeLoaderNoMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Map actual = ehcache.getAll(KEY_SET_A); - final Map expected = getNullEntryMap(KEY_SET_A); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(KEY_SET_A), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(KEY_SET_A)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(KEY_SET_A), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionAfterLoaderNoMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Map actual = ehcache.getAll(KEY_SET_A); - final Map expected = getNullEntryMap(KEY_SET_A); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(KEY_SET_A), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(KEY_SET_A)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(KEY_SET_A), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchLoaderNoMatchSomeFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C), KEY_SET_F); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_F); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(getNullEntryMap(KEY_SET_A))); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_F)); - } - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_B))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionBeforeLoaderNoMatchSomeFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C), KEY_SET_F); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_F); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses().keySet(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionAfterLoaderNoMatchSomeFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C), KEY_SET_F); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_F); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - final Set successKeys = copyWithout(copyUntil(fetchKeys, "keyA3"), KEY_SET_F); - final Set failKeys = copyWithout(fetchKeys, successKeys); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), - Matchers.>equalTo(getNullEntryMap(successKeys))); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(failKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchLoaderSomeMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_B, KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(union(getNullEntryMap(KEY_SET_A), getEntryMap(KEY_SET_C)))); - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_B, KEY_SET_C))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, KEY_SET_C.size(), KEY_SET_A.size()); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionBeforeLoaderSomeMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_B, KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = union(getNullEntryMap(KEY_SET_A), getEntryMap(KEY_SET_C)); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionAfterLoaderSomeMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_B, KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = union(getNullEntryMap(KEY_SET_A), getEntryMap(KEY_SET_C)); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchLoaderSomeMatchDisjointFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C), KEY_SET_B); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(union(getNullEntryMap(KEY_SET_A), getEntryMap(KEY_SET_C)))); - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_B, KEY_SET_C))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, KEY_SET_C.size(), KEY_SET_A.size()); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionBeforeLoaderSomeMatchDisjointFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C), KEY_SET_B); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = union(getNullEntryMap(KEY_SET_A), getEntryMap(KEY_SET_C)); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionAfterLoaderSomeMatchDisjointFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C), KEY_SET_B); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = union(getNullEntryMap(KEY_SET_A), getEntryMap(KEY_SET_C)); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • all {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchLoaderAllMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(getEntryMap(fetchKeys))); - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, KEY_SET_A.size() + KEY_SET_C.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • all {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionBeforeLoaderAllMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = getEntryMap(fetchKeys); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • no {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • all {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreNoMatchStoreAccessExceptionAfterLoaderAllMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = getEntryMap(fetchKeys); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - - - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchLoaderAllFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES); - this.loaderWriter = spy(fakeLoader); - doThrow(new Exception("loadAll failed")).when(this.loaderWriter).loadAll(getAnyStringSet()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(getEntryMap(KEY_SET_A))); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_C)); - } - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(KEY_SET_C)); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, KEY_SET_A.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fails with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchLoaderAllFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_C, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(getEntryMap(KEY_SET_A))); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_C)); - } - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(KEY_SET_C)); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, KEY_SET_A.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fails with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchLoaderSomeFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_C, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(getEntryMap(KEY_SET_A, KEY_SET_D))); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_C)); - } - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(union(KEY_SET_C, KEY_SET_D))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, KEY_SET_A.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionBeforeLoaderAllFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES); - this.loaderWriter = spy(fakeLoader); - doThrow(new Exception("loadAll failed")).when(this.loaderWriter).loadAll(getAnyStringSet()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses().keySet(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionBeforeLoaderAllFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, union(KEY_SET_A, KEY_SET_C), true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), Collections. emptySet(), fetchKeys); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionBeforeLoaderLSomeFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, union(KEY_SET_A, KEY_SET_C), true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), Matchers.>equalTo(KEY_SET_D)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(union(KEY_SET_A, KEY_SET_C))); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), KEY_SET_D, union(KEY_SET_A, KEY_SET_C)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionAfterLoaderAllFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES); - this.loaderWriter = spy(fakeLoader); - doThrow(new Exception("loadAll failed")).when(this.loaderWriter).loadAll(getAnyStringSet()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses().isEmpty(), is(true)); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionAfterLoaderAllFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, fetchKeys, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), Collections. emptySet(), fetchKeys); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionAfterLoaderSomeFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_A, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), Matchers.>equalTo(KEY_SET_C)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), KEY_SET_C, KEY_SET_A); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchLoaderNoMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_F)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(union(getEntryMap(KEY_SET_A), getNullEntryMap(KEY_SET_C)))); - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(KEY_SET_C)); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, KEY_SET_A.size(), KEY_SET_C.size()); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionBeforeLoaderNoMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_F)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = getNullEntryMap(fetchKeys); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionAfterLoaderNoMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(union(getNullEntryMap(KEY_SET_C), getEntryMap(KEY_SET_A)))); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(actual), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchLoaderNoMatchSomeFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_F), KEY_SET_D); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(union(getEntryMap(KEY_SET_A), getNullEntryMap(KEY_SET_C)))); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_D)); - } - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(copyWithout(fetchKeys, KEY_SET_A))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, KEY_SET_A.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchLoaderNoMatchSomeFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_D, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(union(getEntryMap(KEY_SET_A), getEntryMap(KEY_SET_C)))); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_D)); - } - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(copyWithout(fetchKeys, KEY_SET_A))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, KEY_SET_A.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionBeforeLoaderNoMatchSomeFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_F), KEY_SET_D); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses().keySet(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionBeforeLoaderNoMatchAllFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, fetchKeys, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), Collections. emptySet(), fetchKeys); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionAfterLoaderNoMatchSomeFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_B), KEY_SET_D); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - final Set successKeys = copyWithout(copyUntil(fetchKeys, "keyA3"), fanIn(KEY_SET_A, KEY_SET_D)); - final Set failKeys = copyWithout(fetchKeys, successKeys); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), - Matchers.>equalTo(getNullEntryMap(successKeys))); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(failKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchLoaderSomeMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C, KEY_SET_F)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(union(getEntryMap(KEY_SET_A, KEY_SET_C), getNullEntryMap(KEY_SET_D)))); - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(copyWithout(fetchKeys, KEY_SET_A))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, KEY_SET_A.size() + KEY_SET_C.size(), KEY_SET_D.size()); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionBeforeLoaderSomeMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C, KEY_SET_F)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = union(getNullEntryMap(KEY_SET_A, KEY_SET_D), getEntryMap(KEY_SET_C)); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionAfterLoaderSomeMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C, KEY_SET_F)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(union(getNullEntryMap(KEY_SET_D), getEntryMap(KEY_SET_A, KEY_SET_C)))); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(actual), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchLoaderSomeMatchDisjointFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C, KEY_SET_E), KEY_SET_F); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D, KEY_SET_F); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses(), - Matchers.>equalTo(union(getEntryMap(KEY_SET_A, KEY_SET_C), getNullEntryMap(KEY_SET_D)))); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_F)); - } - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(copyWithout(fetchKeys, KEY_SET_A))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, KEY_SET_A.size() + KEY_SET_C.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fails with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchLoaderSomeMatchDisjointFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, union(KEY_SET_D, KEY_SET_F), true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D, KEY_SET_F); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses(),Matchers.>equalTo(getEntryMap(KEY_SET_A, KEY_SET_C))); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(union(KEY_SET_D, KEY_SET_F))); - } - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(copyWithout(fetchKeys, KEY_SET_A))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, KEY_SET_A.size(), 0); - } - - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionBeforeLoaderSomeMatchDisjointFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C, KEY_SET_E), KEY_SET_F); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D, KEY_SET_F); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses().keySet(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionBeforeLoaderSomeMatchDisjointFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, union(KEY_SET_A, KEY_SET_C), true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_E); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(getEntryMap(KEY_SET_E))); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(union(KEY_SET_A, KEY_SET_C))); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), KEY_SET_E, union(KEY_SET_A, KEY_SET_C)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionAfterLoaderSomeMatchDisjointFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C, KEY_SET_E), KEY_SET_F); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D, KEY_SET_F); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - final Set successKeys = copyWithout(copyUntil(fetchKeys, "keyA3"), fanIn(KEY_SET_A, KEY_SET_F)); - final Set failKeys = copyWithout(fetchKeys, successKeys); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), - Matchers.>equalTo(union(getEntryMap(copyOnly(KEY_SET_C, successKeys)), - getNullEntryMap(copyOnly(KEY_SET_D, successKeys))))); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(failKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionAfterLoaderSomeMatchDisjointFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, union(KEY_SET_D, KEY_SET_F), true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D, KEY_SET_F); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(union(getEntryMap(KEY_SET_A), getEntryMap(KEY_SET_C)))); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(union(KEY_SET_D, KEY_SET_F))); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • all {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchLoaderAllMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(getEntryMap(fetchKeys))); - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C, KEY_SET_D))); - verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(copyWithout(fetchKeys, KEY_SET_A))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, fetchKeys.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • all {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionBeforeLoaderAllMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = getEntryMap(fetchKeys); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • some {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • all {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreSomeMatchStoreAccessExceptionAfterLoaderAllMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = getEntryMap(fetchKeys); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - - - - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchLoaderAllFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES); - this.loaderWriter = spy(fakeLoader); - doThrow(new Exception("loadAll failed")).when(this.loaderWriter).loadAll(getAnyStringSet()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(getEntryMap(fetchKeys))); - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verify(this.loaderWriter, never()).loadAll(getAnyStringSet()); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, fetchKeys.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionBeforeLoaderAllFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES); - this.loaderWriter = spy(fakeLoader); - doThrow(new Exception("loadAll failed")).when(this.loaderWriter).loadAll(getAnyStringSet()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses().keySet(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionBeforeLoaderAllFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, fetchKeys, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(union(KEY_SET_A, KEY_SET_B))); - - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), Collections. emptySet(), fetchKeys); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionBeforeLoaderSomeFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_A, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), Matchers.>equalTo(KEY_SET_B)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), KEY_SET_B, KEY_SET_A); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionAfterLoaderAllFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES); - this.loaderWriter = spy(fakeLoader); - doThrow(new Exception("loadAll failed")).when(this.loaderWriter).loadAll(getAnyStringSet()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses().keySet(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • all {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionAfterLoaderAllFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, fetchKeys, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), Collections. emptySet(), fetchKeys); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • with a {@code CacheLoaderWriter} (loader-provided entries not relevant)
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionAfterLoaderSomeFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_A, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), Matchers.>equalTo(KEY_SET_B)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), KEY_SET_B, KEY_SET_A); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchLoaderNoMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(getEntryMap(fetchKeys))); - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verify(this.loaderWriter, never()).loadAll(getAnyStringSet()); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, fetchKeys.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionBeforeLoaderNoMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = getNullEntryMap(fetchKeys); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchLoaderNoMatchSomeFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C, KEY_SET_F), KEY_SET_B); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(getEntryMap(fetchKeys))); - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verify(this.loaderWriter, never()).loadAll(getAnyStringSet()); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, fetchKeys.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionBeforeLoaderNoMatchSomeFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_C, KEY_SET_F), KEY_SET_B); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses().keySet(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionBeforeLoaderNoMatchSomeFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, fetchKeys, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), Collections. emptySet(), fetchKeys); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • no {@link CacheLoaderWriter} entries match
  • - *
  • some {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionAfterLoaderNoMatchSomeFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_C, KEY_SET_F), KEY_SET_B); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses().keySet(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchLoaderSomeMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_B, KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(getEntryMap(fetchKeys))); - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verify(this.loaderWriter, never()).loadAll(getAnyStringSet()); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, fetchKeys.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionBeforeLoaderSomeMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_B, KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = union(getNullEntryMap(KEY_SET_A), getEntryMap(KEY_SET_B)); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(fetchKeys); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionAfterLoaderSomeMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = getEntryMap(KEY_SET_A, KEY_SET_B); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(fetchKeys); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchLoaderSomeMatchDisjointFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_B, KEY_SET_C), KEY_SET_A); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final Map actual = ehcache.getAll(fetchKeys); - - final Map expected = getEntryMap(KEY_SET_A, KEY_SET_B); - assertThat(actual, equalTo(expected)); - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verify(this.loaderWriter, never()).loadAll(getAnyStringSet()); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, fetchKeys.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionBeforeLoaderSomeMatchDisjointFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_B, KEY_SET_C), KEY_SET_A); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(Collections.emptyMap())); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionBeforeLoaderSomeMatchDisjointFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_A, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), Matchers.>equalTo(KEY_SET_B)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), KEY_SET_B, KEY_SET_A); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionAfterLoaderSomeMatchDisjointFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_B, KEY_SET_C), KEY_SET_A); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(Collections.emptyMap())); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(fetchKeys)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • some {@link CacheLoaderWriter} entries match
  • - *
  • non-matching {@link CacheLoaderWriter#loadAll(Iterable)} calls fail with {@link BulkCacheLoadingException}
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionAfterLoaderSomeMatchDisjointFailWithBulkCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(TEST_ENTRIES, KEY_SET_A, true); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - try { - ehcache.getAll(fetchKeys); - fail(); - } catch (BulkCacheLoadingException e) { - // Expected - assertThat(e.getSuccesses().keySet(), Matchers.>equalTo(KEY_SET_B)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(KEY_SET_A)); - } - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - verifyBulkLoadingException(this.bulkExceptionCaptor.getValue(), KEY_SET_B, KEY_SET_A); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • all {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchLoaderAllMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(getEntryMap(fetchKeys))); - - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verify(this.loaderWriter, never()).loadAll(getAnyStringSet()); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); - validateBulkCounters(ehcache, fetchKeys.size(), 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws before accessing loader
  • - *
  • all {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionBeforeLoaderAllMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkComputeIfAbsent(getAnyStringSet(), getAnyIterableFunction()); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final Map actual = ehcache.getAll(fetchKeys); - - - final Map expected = getEntryMap(fetchKeys); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for - *
    - *
  • non-empty request key set
  • - *
  • all {@link Store} entries match
  • - *
  • {@link Store#bulkComputeIfAbsent} throws after accessing loader
  • - *
  • all {@link CacheLoaderWriter} entries match
  • - *
  • no {@link CacheLoaderWriter#loadAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testGetAllStoreAllMatchStoreAccessExceptionAfterLoaderAllMatchNoneFail() throws Exception { - final FakeStore fakeStore = new FakeStore(getEntryMap(KEY_SET_A, KEY_SET_B), Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoader = new FakeCacheLoaderWriter(getEntryMap(KEY_SET_A, KEY_SET_B)); - this.loaderWriter = spy(fakeLoader); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.loaderWriter); - - final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final Map actual = ehcache.getAll(fetchKeys); - - - final Map expected = getEntryMap(fetchKeys); - assertThat(actual, equalTo(expected)); - - final InOrder ordered = inOrder(this.loaderWriter, this.spiedResilienceStrategy); - verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); - // ResilienceStrategy invoked: no assertion for Store content - ordered.verify(this.loaderWriter, atLeast(1)).loadAll(this.loadAllCaptor.capture()); - assertThat(this.getLoadAllArgs(), equalTo(fetchKeys)); - ordered.verify(this.spiedResilienceStrategy) - .getAllFailure(eq(fetchKeys), eq(expected), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); - validateBulkCounters(ehcache, 0, 0); - } - - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter) { - final EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(CACHE_CONFIGURATION, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBasicGetAllTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } - - private void verifyBulkLoadingException(BulkCacheLoadingException e, Set successKeys, Set failureKeys) { - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses().keySet(), Matchers.> equalTo(successKeys)); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.> equalTo(failureKeys)); - } - - /** - * Collects all arguments captured by {@link #loadAllCaptor}. - * - * @return the argument values collected by {@link #loadAllCaptor}; the - * {@code Iterator} over the resulting {@code Set} returns the values - * in the order observed by the captor. - */ - private Set getLoadAllArgs() { - final Set loadAllArgs = new LinkedHashSet<>(); - for (final Set set : this.loadAllCaptor.getAllValues()) { - loadAllArgs.addAll(set); - } - return loadAllArgs; - } - -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetTest.java deleted file mode 100644 index 7bcf910e23..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetTest.java +++ /dev/null @@ -1,437 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.Status; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheLoadingException; -import org.ehcache.core.exceptions.ExceptionFactory; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.hamcrest.CoreMatchers; -import org.junit.Test; -import org.mockito.Mock; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.EnumSet; - -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.isNull; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; - -/** - * Provides testing of basic GET operations on an {@code EhcacheWithLoaderWriter}. - * - * @author Clifford W. Johnson - */ -public class EhcacheWithLoaderWriterBasicGetTest extends EhcacheBasicCrudBase { - - @Mock - protected CacheLoaderWriter cacheLoaderWriter; - - @Test - public void testGetNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.get(null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
- */ - @Test - public void testGetNoStoreEntry() throws Exception { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.get("key"), is(nullValue())); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key not available via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testGetNoStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.get("key"), is(nullValue())); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verify(this.cacheLoaderWriter).load(eq("key")); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key available via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testGetNoStoreEntryHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - when(this.cacheLoaderWriter.load("key")).thenReturn("value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.get("key"), is("value")); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verify(this.cacheLoaderWriter).load(eq("key")); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.load} throws
  • - *
- */ - @Test - public void testGetNoStoreEntryCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - when(this.cacheLoaderWriter.load("key")).thenThrow(new Exception()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.get("key"); - fail(); - } catch (CacheLoadingException e) { - // Expected - } - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verify(this.cacheLoaderWriter).load(eq("key")); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.computeIfAbsent} throws
  • - *
  • key not available via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testGetNoStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.get("key"); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verify(this.cacheLoaderWriter).load(eq("key")); - verify(this.spiedResilienceStrategy).getFailure(eq("key"), (String) isNull(), any(StoreAccessException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.FAILURE)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.computeIfAbsent} throws
  • - *
  • key available via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testGetNoStoreEntryStoreAccessExceptionHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - - when(this.cacheLoaderWriter.load("key")).thenReturn("value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.get("key"); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verify(this.cacheLoaderWriter).load(eq("key")); - verify(this.spiedResilienceStrategy).getFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.FAILURE)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.computeIfAbsent} throws
  • - *
  • {@code CacheLoaderWriter.load} throws
  • - *
- */ - @Test - public void testGetNoStoreEntryStoreAccessExceptionCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - - when(this.cacheLoaderWriter.load("key")).thenThrow(new Exception()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.get("key"); - fail(); - } catch (CacheLoadingException e) { - // Expected - } - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verify(this.cacheLoaderWriter).load(eq("key")); - verify(this.spiedResilienceStrategy).getFailure(eq("key"), any(StoreAccessException.class), any(CacheLoadingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.FAILURE)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
- */ - @Test - public void testGetHasStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.get("key"), equalTo("value")); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • key not available via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testGetHasStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.get("key"), equalTo("value")); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verify(this.cacheLoaderWriter, never()).load(eq("key")); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • key available via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testGetHasStoreEntryHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - - when(this.cacheLoaderWriter.load("key")).thenReturn("value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.get("key"), equalTo("value")); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verify(this.cacheLoaderWriter, never()).load(eq("key")); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.load} throws
  • - *
- */ - @Test - public void testGetHasStoreEntryCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - - when(this.cacheLoaderWriter.load("key")).thenThrow(ExceptionFactory.newCacheLoadingException(new Exception())); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.get("key"), equalTo("value")); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verify(this.cacheLoaderWriter, never()).load(eq("key")); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.computeIfAbsent} throws
  • - *
  • key not available via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testGetHasStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - doThrow(new StoreAccessException("")).when(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.get("key"); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verify(this.cacheLoaderWriter).load(eq("key")); - verify(this.spiedResilienceStrategy).getFailure(eq("key"), (String) isNull(), any(StoreAccessException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.FAILURE)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.computeIfAbsent} throws
  • - *
  • key available via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testGetHasStoreEntryStoreAccessExceptionHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - doThrow(new StoreAccessException("")).when(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - - when(this.cacheLoaderWriter.load("key")).thenReturn("value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.get("key"); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verify(this.cacheLoaderWriter).load(eq("key")); - verify(this.spiedResilienceStrategy).getFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.FAILURE)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.computeIfAbsent} throws
  • - *
  • {@code CacheLoaderWriter.load} throws
  • - *
- */ - @Test - public void testGetHasStoreEntryStoreAccessExceptionCacheLoadingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - doThrow(new StoreAccessException("")).when(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - - when(this.cacheLoaderWriter.load("key")).thenThrow(new Exception()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.get("key"); - fail(); - } catch (CacheLoadingException e) { - // Expected - } - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verify(this.cacheLoaderWriter).load(eq("key")); - verify(this.spiedResilienceStrategy).getFailure(eq("key"), any(StoreAccessException.class), any(CacheLoadingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.FAILURE)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.FAILURE)); - } - - /** - * Gets an initialized {@link EhcacheWithLoaderWriter Ehcache} instance using the - * {@link CacheLoaderWriter} provided. - * - * @param cacheLoaderWriter - * the {@code CacheLoaderWriter} to use; may be {@code null} - * - * @return a new {@code EhcacheWithLoaderWriter} instance - */ - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter) { - final EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(CACHE_CONFIGURATION, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBasicGetTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicIteratorTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicIteratorTest.java deleted file mode 100644 index 63a1e83a15..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicIteratorTest.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.core; - -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - -import org.ehcache.Cache; -import org.ehcache.Status; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.hamcrest.Matchers; -import org.junit.Test; -import org.slf4j.LoggerFactory; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; - -/** - * @author Abhilash - * - */ -public class EhcacheWithLoaderWriterBasicIteratorTest extends EhcacheBasicIteratorTest { - - /** - * Tests {@link java.util.Iterator#remove()} from {@link EhcacheWithLoaderWriter#iterator()} on a non-empty cache. - */ - @Test - public void testIteratorNonEmptyRemoveOne() throws Exception { - final Map testStoreEntries = this.getTestStoreEntries(); - final FakeStore fakeStore = new FakeStore(testStoreEntries); - this.store = fakeStore; - - // Set CacheLoaderWriter & Store to have the same entries initially - final FakeCacheLoaderWriter fakeWriterWriter = new FakeCacheLoaderWriter(testStoreEntries); - final InternalCache ehcache = this.getEhcache(fakeWriterWriter); - - final Iterator> iterator = ehcache.iterator(); - while (iterator.hasNext()) { - final Cache.Entry entry = iterator.next(); - if (entry.getKey().equals("keyA")) { - iterator.remove(); - } - } - - testStoreEntries.remove("keyA"); - final Map storeEntries = new HashMap<>(fakeStore.getEntryMap()); - for (Map.Entry expectedEntry : testStoreEntries.entrySet()) { - final String expectedEntryKey = expectedEntry.getKey(); - assertThat(storeEntries, hasEntry(equalTo(expectedEntryKey), equalTo(expectedEntry.getValue()))); - storeEntries.remove(expectedEntryKey); - } - assertThat("Iterator.remove removed incorrect Store entry", storeEntries.isEmpty(), is(true)); - - final Map writerEntries = new HashMap<>(fakeWriterWriter.getEntryMap()); - for (Map.Entry expectedEntry : testStoreEntries.entrySet()) { - final String expectedEntryKey = expectedEntry.getKey(); - assertThat(writerEntries, hasEntry(equalTo(expectedEntryKey), equalTo(expectedEntry.getValue()))); - writerEntries.remove(expectedEntryKey); - } - assertThat("Iterator.remove removed incorrect Writer entry", writerEntries.isEmpty(), is(true)); - } - - /** - * Tests removal of all entries from {@link EhcacheWithLoaderWriter#iterator()} on a non-empty cache. - */ - @Test - public void testIteratorNonEmptyRemoveAll() throws Exception { - final Map testStoreEntries = this.getTestStoreEntries(); - final FakeStore fakeStore = new FakeStore(testStoreEntries); - this.store = fakeStore; - - // Set CacheLoaderWriter & Store to have the same entries initially - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(testStoreEntries); - final InternalCache ehcache = this.getEhcache(fakeLoaderWriter); - - final Iterator> iterator = ehcache.iterator(); - while (iterator.hasNext()) { - iterator.next(); - iterator.remove(); - } - assertThat("Failed to remove all entries from Store", fakeStore.getEntryMap().isEmpty(), is(true)); - assertThat("Failed to remove all entries via CacheLoaderWriter", fakeLoaderWriter.getEntryMap().isEmpty(), is(true)); - } - - /** - * Tests {@link java.util.Iterator#remove()} twice on the same entry returned from the {@code Iterator} - * returned from {@link EhcacheWithLoaderWriter#iterator()} on a non-empty cache. - */ - @Test - public void testIteratorNonEmptyRemoveTwice() throws Exception { - final Map testStoreEntries = this.getTestStoreEntries(); - this.store = new FakeStore(testStoreEntries); - - // Set CacheLoaderWriter & Store to have the same entries initially - final FakeCacheLoaderWriter fakeWriterWriter = new FakeCacheLoaderWriter(testStoreEntries); - final InternalCache ehcache = this.getEhcache(fakeWriterWriter); - - final Iterator> iterator = ehcache.iterator(); - while (iterator.hasNext()) { - final Cache.Entry entry = iterator.next(); - if (entry.getKey().equals("keyA")) { - iterator.remove(); - try { - iterator.remove(); - fail(); - } catch (IllegalStateException e) { - // Expected - } - break; - } - } - } - - /** - * Tests {@link java.util.Iterator#remove()} after removing the last entry returned from the {@code Iterator} - * returned from {@link EhcacheWithLoaderWriter#iterator()} on a non-empty cache. - */ - @Test - public void testIteratorNonEmptyRemoveAfterLast() throws Exception { - final Map testStoreEntries = this.getTestStoreEntries(); - this.store = new FakeStore(testStoreEntries); - - // Set CacheLoaderWriter & Store to have the same entries initially - final FakeCacheLoaderWriter fakeWriterWriter = new FakeCacheLoaderWriter(testStoreEntries); - final InternalCache ehcache = this.getEhcache(fakeWriterWriter); - - final Iterator> iterator = ehcache.iterator(); - while (iterator.hasNext()) { - iterator.next(); - } - - iterator.remove(); // Expected to remove last observed entry - try { - iterator.remove(); - fail(); - } catch (IllegalStateException e) { - // Expected - } - } - - @Override - protected InternalCache getEhcache() throws Exception { - - @SuppressWarnings("unchecked") - final CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); - doThrow(new UnsupportedOperationException()).when(cacheLoaderWriter).delete(anyString()); - doThrow(new UnsupportedOperationException()).when(cacheLoaderWriter).deleteAll(getAnyStringIterable()); - doThrow(new UnsupportedOperationException()).when(cacheLoaderWriter).write(anyString(), anyString()); - doThrow(new UnsupportedOperationException()).when(cacheLoaderWriter).writeAll(getAnyMapEntryIterable()); - doThrow(new UnsupportedOperationException()).when(cacheLoaderWriter).load(anyString()); - doThrow(new UnsupportedOperationException()).when(cacheLoaderWriter).loadAll(getAnyStringIterable()); - - return this.getEhcache(cacheLoaderWriter); - } - - /** - * creates an instance {@code EhcacheWithLoaderWriter} - * - * @param cacheLoaderWriter - * @return instance {@link EhcacheWithLoaderWriter} - * @throws Exception - */ - private InternalCache getEhcache(CacheLoaderWriter cacheLoaderWriter) throws Exception { - final EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(CACHE_CONFIGURATION, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBasicIteratorTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutAllTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutAllTest.java deleted file mode 100644 index 4d45741a26..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutAllTest.java +++ /dev/null @@ -1,1963 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.core; - -import java.util.Collections; -import java.util.EnumSet; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Set; - -import org.ehcache.Status; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.config.ResourcePoolsHelper; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.statistics.BulkOps; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expiry; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.hamcrest.Matchers; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Captor; -import org.mockito.InOrder; -import org.mockito.Mock; -import org.slf4j.LoggerFactory; - -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_A; -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_B; -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_C; -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_D; -import static org.ehcache.core.EhcacheBasicBulkUtil.copyWithout; -import static org.ehcache.core.EhcacheBasicBulkUtil.fanIn; -import static org.ehcache.core.EhcacheBasicBulkUtil.getAltEntryMap; -import static org.ehcache.core.EhcacheBasicBulkUtil.getEntryMap; -import static org.ehcache.core.EhcacheBasicBulkUtil.union; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.everyItem; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.isIn; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.atLeast; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; -import static org.ehcache.core.EhcacheBasicPutAllTest.getAnyEntryIterable; -import static org.ehcache.core.EhcacheBasicPutAllTest.getAnyEntryIterableFunction; -import static org.ehcache.core.EhcacheBasicPutAllTest.getAnyStringSet; - -/** - * Provides testing of basic PUT_ALL operations on an {@code EhcacheWithLoaderWriter}. - *

- * In an effort compromise, this class intentionally omits test cases in which - * the {@code Store} is pre-populated with no entries, pre-populated only with - * entries having keys not in the {@code putAll} request map, and pre-populated - * with entries for all keys in the {@code putAll} request map. This reduces - * the potential test cases by about 70% without, hopefully, compromising code - * coverage. - *

- * Since the processing in {@link EhcacheWithLoaderWriter#putAll} relies on non-deterministically ordered Maps in several stages - * of processing, the result of {@code putAll} when handling failures is *not* deterministic -- changes in - * iteration order of the {@code putAll} request map can change the results of the {@code putAll} operation under - * error scenarios. The test assertions attempt to confirm results in aggregate over successes and failures and - * do not specify specific success and failures for each test. - * - * @author Clifford W. Johnson - */ -public class EhcacheWithLoaderWriterBasicPutAllTest extends EhcacheBasicCrudBase { - - @Mock - private CacheLoaderWriter cacheLoaderWriter; - - /** - * A Mockito {@code ArgumentCaptor} for the {@code Set} argument to the - * {@link Store#bulkCompute(Set, java.util.function.Function, java.util.function.Supplier) - * Store.bulkCompute(Set, Function, NullaryFunction} method. - */ - @Captor - private ArgumentCaptor> bulkComputeSetCaptor; - - /** - * A Mockito {@code ArgumentCaptor} for the - * {@link BulkCacheWritingException BulkCacheWritingException} - * provided to the - * {@link org.ehcache.core.internal.resilience.ResilienceStrategy#putAllFailure(Map, StoreAccessException, BulkCacheWritingException)} - * ResilienceStrategy.putAllFailure(Iterable, StoreAccessException, BulkCacheWritingException)} method. - */ - @Captor - private ArgumentCaptor bulkExceptionCaptor; - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *

    - *
  • empty request map
  • - *
  • populated {@code Store} (keys not relevant)
  • - *
  • populated {@code CacheLoaderWriter} (keys not relevant)
  • - *
- */ - @Test - public void testPutAllEmptyRequestWithWriter() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalStoreContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - ehcache.putAll(Collections.emptyMap()); - - verify(this.store, never()).bulkCompute(eq(Collections.emptySet()), getAnyEntryIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(originalStoreContent)); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(originalStoreContent)); - verify(this.spiedResilienceStrategy, never()).putAllFailure(eq(Collections.emptyMap()), any(StoreAccessException.class)); - verify(this.cacheLoaderWriter, never()).writeAll(eq(Collections.>emptyList())); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.SUCCESS)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • no {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapWriterNoOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); - ehcache.putAll(contentUpdates); - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), equalTo(contentUpdates.keySet())); - assertThat(fakeStore.getEntryMap(), equalTo(union(originalStoreContent, contentUpdates))); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, contentUpdates))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.SUCCESS)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(contentUpdates.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • no {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeWriterNoOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); - ehcache.putAll(contentUpdates); - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, contentUpdates))); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • no {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterWriterNoOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); - ehcache.putAll(contentUpdates); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, contentUpdates))); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapWriterNoOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_C); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); - final Set expectedFailures = KEY_SET_C; - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(expectedSuccesses.keySet())); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), equalTo(contentUpdates.keySet())); - assertThat(fakeStore.getEntryMap(), equalTo(union(originalStoreContent, expectedSuccesses))); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, expectedSuccesses))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(KEY_SET_A.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeWriterNoOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_C); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); - final Set expectedFailures = KEY_SET_C; - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, expectedSuccesses))); - - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(expectedSuccesses.keySet())); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterWriterNoOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_C); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); - final Set expectedFailures = KEY_SET_C; - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, expectedSuccesses))); - - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(expectedSuccesses.keySet())); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#writeAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapWriterNoOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_C); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); - final Set expectedFailures = KEY_SET_C; - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(expectedSuccesses.keySet())); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - assertThat(fakeStore.getEntryMap(), equalTo(union(originalStoreContent, expectedSuccesses))); - verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, expectedSuccesses))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(expectedSuccesses.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#writeAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeWriterOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_C); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); - final Set expectedFailures = contentUpdates.keySet(); - - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - @SuppressWarnings("unchecked") - final Set bcweSuccesses = (Set)this.bulkExceptionCaptor.getValue().getSuccesses(); - @SuppressWarnings("unchecked") - final Map bcweFailures = (Map)this.bulkExceptionCaptor.getValue().getFailures(); - - assertThat(bcweSuccesses.isEmpty(), is(true)); - assertThat(bcweFailures.keySet(), equalTo(expectedFailures)); - assertThatAllStoreEntriesWithoutFailuresMatchWriterState(fakeStore, fakeLoaderWriter, bcweFailures); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#writeAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterWriterOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_C); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); - final Set expectedFailures = KEY_SET_C; - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - @SuppressWarnings("unchecked") - final Set bcweSuccesses = (Set)this.bulkExceptionCaptor.getValue().getSuccesses(); - @SuppressWarnings("unchecked") - final Map bcweFailures = (Map)this.bulkExceptionCaptor.getValue().getFailures(); - - assertThat(union(bcweSuccesses, bcweFailures.keySet()), equalTo(contentUpdates.keySet())); - assertThat(Collections.disjoint(bcweSuccesses, bcweFailures.keySet()), is(true)); - assertThatAllStoreEntriesWithoutFailuresMatchWriterState(fakeStore, fakeLoaderWriter, bcweFailures); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • all {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapWriterNoOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("writeAll failed")).when(this.cacheLoaderWriter).writeAll(getAnyEntryIterable()); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(contentUpdates.keySet())); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, contentUpdates.keySet()))); - verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • all {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeWriterNoOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("writeAll failed")).when(this.cacheLoaderWriter).writeAll(getAnyEntryIterable()); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), - getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), - Matchers.>equalTo(contentUpdates.keySet())); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • all {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterWriterNoOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("writeAll failed")).when(this.cacheLoaderWriter).writeAll(getAnyEntryIterable()); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), - getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), - Matchers.>equalTo(contentUpdates.keySet())); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • no {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapWriterSomeOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - ehcache.putAll(contentUpdates); - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), equalTo(contentUpdates.keySet())); - assertThat(fakeStore.getEntryMap(), equalTo(union(originalStoreContent, contentUpdates))); - verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, contentUpdates))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.SUCCESS)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(contentUpdates.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • no {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeWriterSomeOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - ehcache.putAll(contentUpdates); - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, contentUpdates))); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • no {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterWriterSomeOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - ehcache.putAll(contentUpdates); - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, contentUpdates))); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapWriterSomeOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - final Set expectedFailures = KEY_SET_D; - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(expectedSuccesses.keySet())); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - assertThat(fakeStore.getEntryMap(), equalTo(union(originalStoreContent, expectedSuccesses))); - verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, expectedSuccesses))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(expectedSuccesses.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeWriterSomeOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - final Set expectedFailures = KEY_SET_D; - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, expectedSuccesses))); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(expectedSuccesses.keySet())); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterWriterSomeOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - final Set expectedFailures = KEY_SET_D; - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, expectedSuccesses))); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(expectedSuccesses.keySet())); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#writeAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapWriterSomeOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - final Set expectedFailures = union(KEY_SET_D, Collections.singleton("keyC4")); - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(expectedSuccesses.keySet())); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - assertThat(fakeStore.getEntryMap(), equalTo(union(originalStoreContent, expectedSuccesses))); - verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, expectedSuccesses))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(expectedSuccesses.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#writeAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeWriterSomeOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - final Set expectedFailures = union(KEY_SET_D, Collections.singleton("keyC4")); - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - @SuppressWarnings("unchecked") - final Set bcweSuccesses = (Set)this.bulkExceptionCaptor.getValue().getSuccesses(); - @SuppressWarnings("unchecked") - final Map bcweFailures = (Map)this.bulkExceptionCaptor.getValue().getFailures(); - - assertThat(union(bcweSuccesses, bcweFailures.keySet()), equalTo(contentUpdates.keySet())); - assertThat(Collections.disjoint(bcweSuccesses, bcweFailures.keySet()), is(true)); - assertThatAllStoreEntriesWithoutFailuresMatchWriterState(fakeStore, fakeLoaderWriter, bcweFailures); - - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#writeAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterWriterSomeOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - final Set expectedFailures = union(KEY_SET_D, Collections.singleton("keyC4")); - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - @SuppressWarnings("unchecked") - final Set bcweSuccesses = (Set)this.bulkExceptionCaptor.getValue().getSuccesses(); - @SuppressWarnings("unchecked") - final Map bcweFailures = (Map)this.bulkExceptionCaptor.getValue().getFailures(); - - assertThat(union(bcweSuccesses, bcweFailures.keySet()), equalTo(contentUpdates.keySet())); - assertThat(Collections.disjoint(bcweSuccesses, bcweFailures.keySet()), is(true)); - assertThatAllStoreEntriesWithoutFailuresMatchWriterState(fakeStore, fakeLoaderWriter, bcweFailures); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • all {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapWriterSomeOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("writeAll failed")).when(this.cacheLoaderWriter).writeAll(getAnyEntryIterable()); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(contentUpdates.keySet())); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_B))); - verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • all {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeWriterSomeOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("writeAll failed")).when(this.cacheLoaderWriter).writeAll(getAnyEntryIterable()); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), - Matchers.>equalTo(contentUpdates.keySet())); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • all {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterWriterSomeOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("writeAll failed")).when(this.cacheLoaderWriter).writeAll(getAnyEntryIterable()); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), - Matchers.>equalTo(contentUpdates.keySet())); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • no {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapWriterFullOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_B, KEY_SET_C)); - ehcache.putAll(contentUpdates); - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), equalTo(contentUpdates.keySet())); - assertThat(fakeStore.getEntryMap(), equalTo(union(originalStoreContent, contentUpdates))); - verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, contentUpdates))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.SUCCESS)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(contentUpdates.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • no {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeWriterFullOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_B, KEY_SET_C)); - ehcache.putAll(contentUpdates); - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, contentUpdates))); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • no {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterWriterFullOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyB3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_B, KEY_SET_C)); - ehcache.putAll(contentUpdates); - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, contentUpdates))); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapWriterFullOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D)); - final Set expectedFailures = KEY_SET_D; - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(expectedSuccesses.keySet())); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - assertThat(fakeStore.getEntryMap(), equalTo(union(originalStoreContent, expectedSuccesses))); - verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, expectedSuccesses))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(KEY_SET_B.size() + KEY_SET_C.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeWriterFullOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D)); - final Set expectedFailures = KEY_SET_D; - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, expectedSuccesses))); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(expectedSuccesses.keySet())); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterWriterFullOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyB3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D)); - final Set expectedFailures = KEY_SET_D; - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, expectedSuccesses))); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(expectedSuccesses.keySet())); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#writeAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapWriterFullOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D)); - final Set expectedFailures = union(KEY_SET_D, Collections.singleton("keyC4")); - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(expectedSuccesses.keySet())); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - } - - verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - assertThat(fakeStore.getEntryMap(), equalTo(union(originalStoreContent, expectedSuccesses))); - verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, expectedSuccesses))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(expectedSuccesses.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#writeAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeWriterFullOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D)); - final Set expectedFailures = union(KEY_SET_D, Collections.singleton("keyC4")); - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - @SuppressWarnings("unchecked") - final Set bcweSuccesses = (Set)this.bulkExceptionCaptor.getValue().getSuccesses(); - @SuppressWarnings("unchecked") - final Map bcweFailures = (Map)this.bulkExceptionCaptor.getValue().getFailures(); - - assertThat(union(bcweSuccesses, bcweFailures.keySet()), equalTo(contentUpdates.keySet())); - assertThat(Collections.disjoint(bcweSuccesses, bcweFailures.keySet()), is(true)); - assertThatAllStoreEntriesWithoutFailuresMatchWriterState(fakeStore, fakeLoaderWriter, bcweFailures); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • some {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#writeAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterWriterFullOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyB3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D)); - final Set expectedFailures = union(KEY_SET_D, Collections.singleton("keyC4")); - final Map expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - @SuppressWarnings("unchecked") - final Set bcweSuccesses = (Set)this.bulkExceptionCaptor.getValue().getSuccesses(); - @SuppressWarnings("unchecked") - final Map bcweFailures = (Map)this.bulkExceptionCaptor.getValue().getFailures(); - - assertThat(union(bcweSuccesses, bcweFailures.keySet()), equalTo(contentUpdates.keySet())); - assertThat(Collections.disjoint(bcweSuccesses, bcweFailures.keySet()), is(true)); - assertThatAllStoreEntriesWithoutFailuresMatchWriterState(fakeStore, fakeLoaderWriter, bcweFailures); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • all {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapWriterFullOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("writeAll failed")).when(this.cacheLoaderWriter).writeAll(getAnyEntryIterable()); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D)); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(contentUpdates.keySet())); - } - - verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A))); - verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • all {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeWriterFullOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("writeAll failed")).when(this.cacheLoaderWriter).writeAll(getAnyEntryIterable()); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D)); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(originalWriterContent)); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), - Matchers.>equalTo(contentUpdates.keySet())); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for - *
    - *
  • non-empty request map
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • all {@link CacheLoaderWriter#writeAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterWriterFullOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyB3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("writeAll failed")).when(this.cacheLoaderWriter).writeAll(getAnyEntryIterable()); - - final InternalCache ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D)); - try { - ehcache.putAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(originalWriterContent)); - ordered.verify(this.spiedResilienceStrategy) - .putAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), - Matchers.>equalTo(contentUpdates.keySet())); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); - } - - @Test - public void testPutAllPartialIntersectionsImmediatelyExpiredCreatedEntries() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - @SuppressWarnings("unchecked") - final Expiry expiry = mock(Expiry.class); - when(expiry.getExpiryForCreation(any(String.class), any(String.class))).thenReturn(Duration.ZERO); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(cacheLoaderWriter, expiry); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - ehcache.putAll(contentUpdates); - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), equalTo(contentUpdates.keySet())); - assertThat(fakeStore.getEntryMap(), equalTo(union(getAltEntryMap("new_", KEY_SET_A), getEntryMap(KEY_SET_B)))); - verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, contentUpdates))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.SUCCESS)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(KEY_SET_A.size())); - } - - @Test - public void testPutAllPartialIntersectionsImmediatelyExpiredUpdatedEntries() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_A, KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - @SuppressWarnings("unchecked") - final Expiry expiry = mock(Expiry.class); - when(expiry.getExpiryForUpdate(any(String.class), argThat(org.ehcache.core.util.Matchers.holding(instanceOf(String.class))), any(String.class))).thenReturn(Duration.ZERO); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(cacheLoaderWriter, expiry); - - final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D)); - ehcache.putAll(contentUpdates); - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), equalTo(contentUpdates.keySet())); - assertThat(fakeStore.getEntryMap(), equalTo(union(getEntryMap(KEY_SET_B), getAltEntryMap("new_", union(KEY_SET_C, KEY_SET_D))))); - verify(this.cacheLoaderWriter, atLeast(1)).writeAll(getAnyEntryIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(union(originalWriterContent, contentUpdates))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.SUCCESS)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(KEY_SET_C.size() + KEY_SET_D.size())); - } - - private void assertThatAllStoreEntriesWithoutFailuresMatchWriterState(FakeStore fakeStore, FakeCacheLoaderWriter fakeLoaderWriter, Map bcweFailures) { - assertThat(copyWithout(fakeStore.getEntryMap(), bcweFailures.keySet()).entrySet(), everyItem(isIn(fakeLoaderWriter.getEntryMap() - .entrySet()))); - } - - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter) { - return getEhcache(cacheLoaderWriter, CACHE_CONFIGURATION); - } - - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter, Expiry expiry) { - CacheConfiguration config = new BaseCacheConfiguration<>(String.class, String.class, null, null, - expiry, ResourcePoolsHelper.createHeapOnlyPools()); - return getEhcache(cacheLoaderWriter, config); - } - - private EhcacheWithLoaderWriter getEhcache(CacheLoaderWriter cacheLoaderWriter, CacheConfiguration config) { - final EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(config, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheBasicPutAllTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } - - /** - * Collects all arguments captured by {@link #bulkComputeSetCaptor}. - * - * @return the argument values collected by {@link #bulkComputeSetCaptor}; the - * {@code Iterator} over the resulting {@code Set} returns the values - * in the order observed by the captor. - */ - private Set getBulkComputeArgs() { - final Set bulkComputeArgs = new LinkedHashSet<>(); - for (final Set set : this.bulkComputeSetCaptor.getAllValues()) { - bulkComputeArgs.addAll(set); - } - return bulkComputeArgs; - } - -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutIfAbsentTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutIfAbsentTest.java deleted file mode 100644 index 0ffdb1ad18..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutIfAbsentTest.java +++ /dev/null @@ -1,492 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.Status; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.config.ResourcePoolsHelper; -import org.ehcache.core.events.CacheEventDispatcher; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheWritingException; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.hamcrest.Matchers; -import org.junit.Test; -import org.mockito.InOrder; -import org.mockito.Mock; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.EnumSet; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; - -/** - * Provides testing of basic PUT_IF_ABSENT operations on an {@code EhcacheWithLoaderWriter}. - * - * @author Clifford W. Johnson - */ -public class EhcacheWithLoaderWriterBasicPutIfAbsentTest extends EhcacheBasicCrudBase { - - @Mock - protected CacheLoaderWriter cacheLoaderWriter; - @Mock - private CacheEventDispatcher cacheEventDispatcher; - - @Test - public void testPutIfAbsentNullNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.putIfAbsent(null, null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testPutIfAbsentKeyNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.putIfAbsent("key", null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testPutIfAbsentNullValue() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.putIfAbsent(null, "value"); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
- */ - @Test - public void testPutIfAbsentNoStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.putIfAbsent("key", "value"), is(nullValue())); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.PUT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
- */ - @Test - public void testPutIfAbsentHasStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.putIfAbsent("key", "value"), is(equalTo("oldValue"))); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("oldValue")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutIfAbsentNoStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeLoaderWriter); - - assertThat(ehcache.putIfAbsent("key", "value"), is(nullValue())); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - assertThat(fakeLoaderWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.PUT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutIfAbsentHasStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeLoaderWriter); - - assertThat(ehcache.putIfAbsent("key", "value"), is(equalTo("oldValue"))); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("oldValue")); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.computeIfAbsent} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutIfAbsentNoStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - assertThat(ehcache.putIfAbsent("key", "value"), nullValue()); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), eq("value")); - ordered.verify(this.spiedResilienceStrategy) - .putIfAbsentFailure(eq("key"), eq("value"), eq("value"), any(StoreAccessException.class), eq(true)); - assertThat(fakeLoaderWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.computeIfAbsent} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutIfAbsentHasStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - assertThat(ehcache.putIfAbsent("key", "value"), nullValue()); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), eq("value")); - ordered.verify(this.spiedResilienceStrategy) - .putIfAbsentFailure(eq("key"), eq("value"), eq("value"), any(StoreAccessException.class), eq(true)); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutIfAbsentNoStoreEntryHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeLoaderWriter); - - assertThat(ehcache.putIfAbsent("key", "value"), is("oldValue")); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("oldValue")); - assertThat(fakeLoaderWriter.getEntryMap().get("key"), equalTo("oldValue")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutIfAbsentHasStoreEntryHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeLoaderWriter); - - assertThat(ehcache.putIfAbsent("key", "value"), is(equalTo("oldValue"))); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("oldValue")); - assertThat(fakeLoaderWriter.getEntryMap().get("key"), equalTo("oldValue")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.computeIfAbsent} throws
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutIfAbsentNoStoreEntryStoreAccessExceptionHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.putIfAbsent("key", "value"); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - ordered.verify(this.cacheLoaderWriter).load(eq("key")); - ordered.verify(this.spiedResilienceStrategy) - .putIfAbsentFailure(eq("key"), eq("value"), eq("oldValue"), any(StoreAccessException.class), eq(false)); - assertThat(fakeLoaderWriter.getEntryMap().get("key"), equalTo("oldValue")); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.computeIfPresent} throws
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutIfAbsentHasStoreEntryStoreAccessExceptionHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - assertThat(ehcache.putIfAbsent("key", "value"), is("oldValue")); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - ordered.verify(this.cacheLoaderWriter).load(eq("key")); - ordered.verify(this.spiedResilienceStrategy) - .putIfAbsentFailure(eq("key"), eq("value"), eq("oldValue"), any(StoreAccessException.class), eq(false)); - assertThat(fakeLoaderWriter.getEntryMap().get("key"), equalTo("oldValue")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testPutIfAbsentNoStoreEntryCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.putIfAbsent("key", "value"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutIfAbsentOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testPutIfAbsentHasStoreEntryCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.putIfAbsent("key", "value"), is(equalTo("oldValue"))); - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("oldValue")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.computeIfPresent} throws
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testPutIfAbsentNoStoreEntryStoreAccessExceptionCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - try { - ehcache.putIfAbsent("key", "value"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), eq("value")); - ordered.verify(this.spiedResilienceStrategy) - .putIfAbsentFailure(eq("key"), eq("value"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#putIfAbsent(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.computeIfPresent} throws
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testPutIfAbsentHasStoreEntryStoreAccessExceptionCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - try { - ehcache.putIfAbsent("key", "value"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), eq("value")); - ordered.verify(this.spiedResilienceStrategy) - .putIfAbsentFailure(eq("key"), eq("value"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.FAILURE)); - } - - /** - * Gets an initialized {@link EhcacheWithLoaderWriter Ehcache} instance using the - * {@link org.ehcache.spi.loaderwriter.CacheLoaderWriter CacheLoaderWriter} provided. - * - * @param cacheLoaderWriter - * the {@code CacheLoaderWriter} to use; may be {@code null} - * - * @return a new {@code Ehcache} instance - */ - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter) { - return getEhcache(cacheLoaderWriter, Expirations.noExpiration()); - } - - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter, Expiry expiry) { - CacheConfiguration config = new BaseCacheConfiguration<>(String.class, String.class, null, null, - expiry, ResourcePoolsHelper.createHeapOnlyPools()); - final EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(config, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBasicPutIfAbsentTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutTest.java deleted file mode 100644 index 2ee2a09f92..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutTest.java +++ /dev/null @@ -1,524 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.Status; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheWritingException; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.hamcrest.CoreMatchers; -import org.junit.Test; -import org.mockito.InOrder; -import org.mockito.Mock; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.EnumSet; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; - -/** - * Provides testing of basic PUT operations on an {@code EhcacheWithLoaderWriter}. - * - * @author Clifford W. Johnson - */ -public class EhcacheWithLoaderWriterBasicPutTest extends EhcacheBasicCrudBase { - - @Mock - protected CacheLoaderWriter cacheLoaderWriter; - - @Test - public void testPutNullNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.put(null, null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testPutKeyNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.put("key", null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testPutNullValue() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.put(null, "value"); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
- */ - @Test - public void testPutNoStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.put("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.PUT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutNoStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - ehcache.put("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - assertThat(fakeWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.PUT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutNoStoreEntryHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - ehcache.put("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - assertThat(fakeWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.PUT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testPutNoStoreEntryCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.put("key", "value"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
- */ - @Test - public void testPutNoStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.put("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verify(this.spiedResilienceStrategy).putFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutNoStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.put("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), eq("value")); - ordered.verify(this.spiedResilienceStrategy).putFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - assertThat(fakeWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutNoStoreEntryStoreAccessExceptionHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.put("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), eq("value")); - ordered.verify(this.spiedResilienceStrategy).putFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - assertThat(fakeWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testPutNoStoreEntryStoreAccessExceptionCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - try { - ehcache.put("key", "value"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), eq("value")); - ordered.verify(this.spiedResilienceStrategy) - .putFailure(eq("key"), eq("value"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
- */ - @Test - public void testPutHasStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.put("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.PUT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutHasStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - ehcache.put("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - assertThat(fakeWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.PUT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutHasStoreEntryHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - ehcache.put("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - assertThat(fakeWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.PUT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testPutHasStoreEntryCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.put("key", "value"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
- */ - @Test - public void testPutHasStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.put("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verify(this.spiedResilienceStrategy).putFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutHasStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.put("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), eq("value")); - ordered.verify(this.spiedResilienceStrategy).putFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - assertThat(fakeWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testPutHasStoreEntryStoreAccessExceptionHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.put("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), eq("value")); - ordered.verify(this.spiedResilienceStrategy).putFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - assertThat(fakeWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#put(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testPutHasStoreEntryStoreAccessExceptionCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - try { - ehcache.put("key", "value"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), eq("value")); - ordered.verify(this.spiedResilienceStrategy) - .putFailure(eq("key"), eq("value"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.FAILURE)); - } - - /** - * Gets an initialized {@link EhcacheWithLoaderWriter Ehcache} instance using the - * {@link org.ehcache.spi.loaderwriter.CacheLoaderWriter CacheLoaderWriter} provided. - * - * @param cacheLoaderWriter - * the {@code CacheLoaderWriter} to use; may be {@code null} - * - * @return a new {@code EhcacheWithLoaderWriter} instance - */ - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter) { - return getEhcache(cacheLoaderWriter, CACHE_CONFIGURATION); - } - - private EhcacheWithLoaderWriter getEhcache(CacheLoaderWriter cacheLoaderWriter, CacheConfiguration config) { - final EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(config, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBasicPutTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicRemoveAllTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicRemoveAllTest.java deleted file mode 100644 index 4fdfb936d4..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicRemoveAllTest.java +++ /dev/null @@ -1,1879 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.core; - -import java.util.Collections; -import java.util.EnumSet; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Set; - -import org.ehcache.Status; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.ehcache.core.statistics.BulkOps; -import org.hamcrest.Matchers; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Captor; -import org.mockito.InOrder; -import org.mockito.Mock; -import org.slf4j.LoggerFactory; - -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_A; -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_B; -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_C; -import static org.ehcache.core.EhcacheBasicBulkUtil.KEY_SET_D; -import static org.ehcache.core.EhcacheBasicBulkUtil.copyOnly; -import static org.ehcache.core.EhcacheBasicBulkUtil.copyWithout; -import static org.ehcache.core.EhcacheBasicBulkUtil.fanIn; -import static org.ehcache.core.EhcacheBasicBulkUtil.getEntryMap; -import static org.ehcache.core.EhcacheBasicBulkUtil.union; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.everyItem; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.isIn; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.atLeast; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; -import static org.ehcache.core.EhcacheBasicRemoveAllTest.getAnyStringSet; -import static org.ehcache.core.EhcacheBasicRemoveAllTest.getAnyEntryIterableFunction; -import static org.ehcache.core.EhcacheBasicRemoveAllTest.getAnyStringIterable; - -/** - * @author Abhilash - * - */ -public class EhcacheWithLoaderWriterBasicRemoveAllTest extends EhcacheBasicCrudBase { - - @Mock - protected CacheLoaderWriter cacheLoaderWriter; - - /** - * A Mockito {@code ArgumentCaptor} for the {@code Set} argument to the - * {@link Store#bulkCompute(Set, java.util.function.Function, java.util.function.Supplier) - * Store.bulkCompute(Set, Function, NullaryFunction} method. - */ - @Captor - protected ArgumentCaptor> bulkComputeSetCaptor; - - /** - * A Mockito {@code ArgumentCaptor} for the - * {@link BulkCacheWritingException BulkCacheWritingException} - * provided to the - * {@link org.ehcache.core.internal.resilience.ResilienceStrategy#removeAllFailure(Iterable, StoreAccessException, BulkCacheWritingException)} - * ResilienceStrategy.putAllFailure(Iterable, StoreAccessException, BulkCacheWritingException)} method. - */ - @Captor - private ArgumentCaptor bulkExceptionCaptor; - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • empty request set
  • - *
  • populated {@code Store} (keys not relevant)
  • - *
  • populated {@code CacheLoaderWriter} (keys not relevant)
  • - *
- */ - @Test - public void testRemoveAllEmptyRequestWithWriter() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalStoreContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - ehcache.removeAll(Collections.emptySet()); - - verify(this.store, never()).bulkCompute(eq(Collections.emptySet()), getAnyEntryIterableFunction()); - assertThat(fakeStore.getEntryMap(), equalTo(originalStoreContent)); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(originalStoreContent)); - verify(this.cacheLoaderWriter, never()).deleteAll(eq(Collections.emptySet())); - verify(this.spiedResilienceStrategy, never()).removeAllFailure(eq(Collections.emptySet()), any(StoreAccessException.class)); - - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.SUCCESS)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - no keys overlap
  • - *
  • no {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapWriterNoOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); - ehcache.removeAll(contentUpdates); - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), equalTo(contentUpdates)); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, contentUpdates))); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, contentUpdates))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.SUCCESS)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(KEY_SET_A.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - no keys overlap
  • - *
  • no {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeWriterNoOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); - ehcache.removeAll(contentUpdates); - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, contentUpdates))); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - no keys overlap
  • - *
  • no {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterWriterNoOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); - ehcache.removeAll(contentUpdates); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, contentUpdates))); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - no keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapWriterNoOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_C); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); - final Set expectedFailures = KEY_SET_C; - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(expectedSuccesses)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), equalTo(contentUpdates)); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, expectedSuccesses))); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, expectedSuccesses))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(KEY_SET_A.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - no keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeWriterNoOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_C); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); - final Set expectedFailures = KEY_SET_C; - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, expectedSuccesses))); - - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(expectedSuccesses)); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - no keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterWriterNoOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_C); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); - final Set expectedFailures = KEY_SET_C; - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, expectedSuccesses))); - - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(expectedSuccesses)); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - no keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#deleteAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapWriterNoOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_C); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); - final Set expectedFailures = KEY_SET_C; - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(expectedSuccesses)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, expectedSuccesses))); - verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, expectedSuccesses))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(KEY_SET_A.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - no keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#deleteAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeWriterNoOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_C); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); - final Set expectedFailures = KEY_SET_C; - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - @SuppressWarnings("unchecked") - final Set bcweSuccesses = (Set)this.bulkExceptionCaptor.getValue().getSuccesses(); - @SuppressWarnings("unchecked") - final Map bcweFailures = (Map)this.bulkExceptionCaptor.getValue().getFailures(); - - assertThat(union(bcweSuccesses, bcweFailures.keySet()), equalTo(contentUpdates)); - assertThat(Collections.disjoint(bcweSuccesses, bcweFailures.keySet()), is(true)); - assertThat(bcweSuccesses, everyItem(isIn(expectedSuccesses))); - assertThat(expectedFailures, everyItem(isIn(bcweFailures.keySet()))); - assertThat(copyWithout(fakeLoaderWriter.getEntryMap(), bcweFailures.keySet()), - equalTo(copyWithout(copyWithout(originalWriterContent, copyOnly(contentUpdates, bcweSuccesses)), bcweFailures.keySet()))); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - no keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#deleteAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterWriterNoOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_C); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); - final Set expectedFailures = KEY_SET_C; - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - @SuppressWarnings("unchecked") - final Set bcweSuccesses = (Set)this.bulkExceptionCaptor.getValue().getSuccesses(); - @SuppressWarnings("unchecked") - final Map bcweFailures = (Map)this.bulkExceptionCaptor.getValue().getFailures(); - - assertThat(union(bcweSuccesses, bcweFailures.keySet()), equalTo(contentUpdates)); - assertThat(Collections.disjoint(bcweSuccesses, bcweFailures.keySet()), is(true)); - assertThat(bcweSuccesses, everyItem(isIn(expectedSuccesses))); - assertThat(expectedFailures, everyItem(isIn(bcweFailures.keySet()))); - assertThat(copyWithout(fakeLoaderWriter.getEntryMap(), bcweFailures.keySet()), - equalTo(copyWithout(copyWithout(originalWriterContent, copyOnly(contentUpdates, bcweSuccesses)), bcweFailures.keySet()))); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - no keys overlap
  • - *
  • all {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapWriterNoOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("deleteAll failed")).when(this.cacheLoaderWriter).deleteAll(getAnyStringIterable()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(contentUpdates)); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, contentUpdates))); - verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - no keys overlap
  • - *
  • all {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeWriterNoOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("deleteAll failed")).when(this.cacheLoaderWriter).deleteAll(getAnyStringIterable()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), - getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(contentUpdates)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - no keys overlap
  • - *
  • all {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterWriterNoOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("deleteAll failed")).when(this.cacheLoaderWriter).deleteAll(getAnyStringIterable()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), - getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(contentUpdates)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • no {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapWriterSomeOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - ehcache.removeAll(contentUpdates); - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), equalTo(contentUpdates)); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, contentUpdates))); - verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, contentUpdates))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.SUCCESS)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(KEY_SET_A.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • no {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeWriterSomeOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - ehcache.removeAll(contentUpdates); - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, contentUpdates))); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • no {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterWriterSomeOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - ehcache.removeAll(contentUpdates); - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, contentUpdates))); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapWriterSomeOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - final Set expectedFailures = KEY_SET_D; - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(expectedSuccesses)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, expectedSuccesses))); - verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, expectedSuccesses))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(KEY_SET_A.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeWriterSomeOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - final Set expectedFailures = KEY_SET_D; - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, expectedSuccesses))); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(expectedSuccesses)); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterWriterSomeOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - final Set expectedFailures = KEY_SET_D; - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, expectedSuccesses))); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(expectedSuccesses)); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#deleteAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapWriterSomeOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - final Set expectedFailures = union(KEY_SET_D, Collections.singleton("keyC4")); - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(expectedSuccesses)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, expectedSuccesses))); - verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, expectedSuccesses))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(KEY_SET_A.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#deleteAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeWriterSomeOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - final Set expectedFailures = union(KEY_SET_D, Collections.singleton("keyC4")); - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - @SuppressWarnings("unchecked") - final Set bcweSuccesses = (Set)this.bulkExceptionCaptor.getValue().getSuccesses(); - @SuppressWarnings("unchecked") - final Map bcweFailures = (Map)this.bulkExceptionCaptor.getValue().getFailures(); - - assertThat(union(bcweSuccesses, bcweFailures.keySet()), equalTo(contentUpdates)); - assertThat(Collections.disjoint(bcweSuccesses, bcweFailures.keySet()), is(true)); - assertThat(bcweSuccesses, everyItem(isIn(expectedSuccesses))); - assertThat(expectedFailures, everyItem(isIn(bcweFailures.keySet()))); - assertThat(copyWithout(fakeLoaderWriter.getEntryMap(), bcweFailures.keySet()), - equalTo(copyWithout(copyWithout(originalWriterContent, copyOnly(contentUpdates, bcweSuccesses)), - bcweFailures.keySet()))); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#deleteAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterWriterSomeOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - final Set expectedFailures = union(KEY_SET_D, Collections.singleton("keyC4")); - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - @SuppressWarnings("unchecked") - final Set bcweSuccesses = (Set)this.bulkExceptionCaptor.getValue().getSuccesses(); - @SuppressWarnings("unchecked") - final Map bcweFailures = (Map)this.bulkExceptionCaptor.getValue().getFailures(); - - assertThat(union(bcweSuccesses, bcweFailures.keySet()), equalTo(contentUpdates)); - assertThat(Collections.disjoint(bcweSuccesses, bcweFailures.keySet()), is(true)); - assertThat(bcweSuccesses, everyItem(isIn(expectedSuccesses))); - assertThat(expectedFailures, everyItem(isIn(bcweFailures.keySet()))); - assertThat(copyWithout(fakeLoaderWriter.getEntryMap(), bcweFailures.keySet()), - equalTo(copyWithout(copyWithout(originalWriterContent, copyOnly(contentUpdates, bcweSuccesses)), - bcweFailures.keySet()))); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • all {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapWriterSomeOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("deleteAll failed")).when(this.cacheLoaderWriter).deleteAll(getAnyStringIterable()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(contentUpdates)); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, contentUpdates))); - verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • all {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeWriterSomeOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("deleteAll failed")).when(this.cacheLoaderWriter).deleteAll(getAnyStringIterable()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(contentUpdates)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - some keys overlap
  • - *
  • all {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterWriterSomeOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyA3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("deleteAll failed")).when(this.cacheLoaderWriter).deleteAll(getAnyStringIterable()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C, KEY_SET_D); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(contentUpdates)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • no {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapWriterFullOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_B, KEY_SET_C); - ehcache.removeAll(contentUpdates); - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), equalTo(contentUpdates)); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, contentUpdates))); - verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, contentUpdates))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.SUCCESS)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(KEY_SET_B.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • no {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeWriterFullOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_B, KEY_SET_C); - ehcache.removeAll(contentUpdates); - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, contentUpdates))); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • no {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterWriterFullOverlapNoneFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyB3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_B, KEY_SET_C); - ehcache.removeAll(contentUpdates); - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, contentUpdates))); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapWriterFullOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final Set expectedFailures = KEY_SET_D; - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(expectedSuccesses)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - } - - verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, expectedSuccesses))); - verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, expectedSuccesses))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(KEY_SET_B.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeWriterFullOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final Set expectedFailures = KEY_SET_D; - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, expectedSuccesses))); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(expectedSuccesses)); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterWriterFullOverlapSomeFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyB3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final Set expectedFailures = KEY_SET_D; - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, expectedSuccesses))); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), Matchers.>equalTo(expectedSuccesses)); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#deleteAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapWriterFullOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final Set expectedFailures = union(KEY_SET_D, Collections.singleton("keyC4")); - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), Matchers.>equalTo(expectedSuccesses)); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(expectedFailures)); - } - - verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, expectedSuccesses))); - verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(copyWithout(originalWriterContent, expectedSuccesses))); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(KEY_SET_B.size())); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#deleteAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeWriterFullOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final Set expectedFailures = union(KEY_SET_D, Collections.singleton("keyC4")); - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - @SuppressWarnings("unchecked") - final Set bcweSuccesses = (Set)this.bulkExceptionCaptor.getValue().getSuccesses(); - @SuppressWarnings("unchecked") - final Map bcweFailures = (Map)this.bulkExceptionCaptor.getValue().getFailures(); - - assertThat(union(bcweSuccesses, bcweFailures.keySet()), equalTo(contentUpdates)); - assertThat(Collections.disjoint(bcweSuccesses, bcweFailures.keySet()), is(true)); - assertThat(bcweSuccesses, everyItem(isIn(expectedSuccesses))); - assertThat(expectedFailures, everyItem(isIn(bcweFailures.keySet()))); - assertThat(copyWithout(fakeLoaderWriter.getEntryMap(), bcweFailures.keySet()), - equalTo(copyWithout(copyWithout(originalWriterContent, copyOnly(contentUpdates, bcweSuccesses)), - bcweFailures.keySet()))); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • some {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
  • at least one {@link CacheLoaderWriter#deleteAll(Iterable)} call aborts
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterWriterFullOverlapSomeFailWithAbort() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyB3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent, KEY_SET_D); - fakeLoaderWriter.setCompleteFailureKey("keyC4"); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final Set expectedFailures = union(KEY_SET_D, Collections.singleton("keyC4")); - final Set expectedSuccesses = copyWithout(contentUpdates, expectedFailures); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - @SuppressWarnings("unchecked") - final Set bcweSuccesses = (Set)this.bulkExceptionCaptor.getValue().getSuccesses(); - @SuppressWarnings("unchecked") - final Map bcweFailures = (Map)this.bulkExceptionCaptor.getValue().getFailures(); - - assertThat(union(bcweSuccesses, bcweFailures.keySet()), equalTo(contentUpdates)); - assertThat(Collections.disjoint(bcweSuccesses, bcweFailures.keySet()), is(true)); - assertThat(bcweSuccesses, everyItem(isIn(expectedSuccesses))); - assertThat(expectedFailures, everyItem(isIn(bcweFailures.keySet()))); - assertThat(copyWithout(fakeLoaderWriter.getEntryMap(), bcweFailures.keySet()), - equalTo(copyWithout(copyWithout(originalWriterContent, copyOnly(contentUpdates, bcweSuccesses)), - bcweFailures.keySet()))); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • all {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapWriterFullOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("deleteAll failed")).when(this.cacheLoaderWriter).deleteAll(getAnyStringIterable()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - assertThat(e.getSuccesses(), empty()); - assertThat(e.getFailures().keySet(), Matchers.>equalTo(contentUpdates)); - } - - verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, contentUpdates))); - verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - verifyZeroInteractions(this.spiedResilienceStrategy); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws before accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • all {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeWriterFullOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store) - .bulkCompute(getAnyStringSet(), getAnyEntryIterableFunction()); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("deleteAll failed")).when(this.cacheLoaderWriter).deleteAll(getAnyStringIterable()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(originalWriterContent)); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(contentUpdates)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for - *
    - *
  • non-empty request set
  • - *
  • populated {@code Store} - some keys overlap request
  • - *
  • {@link Store#bulkCompute} throws after accessing writer
  • - *
  • populated {@code CacheLoaderWriter} - all keys overlap
  • - *
  • all {@link CacheLoaderWriter#deleteAll(Iterable)} calls fail
  • - *
- */ - @Test - public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterWriterFullOverlapAllFail() throws Exception { - final Map originalStoreContent = getEntryMap(KEY_SET_A, KEY_SET_B); - final FakeStore fakeStore = new FakeStore(originalStoreContent, Collections.singleton("keyB3")); - this.store = spy(fakeStore); - - final Map originalWriterContent = getEntryMap(KEY_SET_B, KEY_SET_C, KEY_SET_D); - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception("deleteAll failed")).when(this.cacheLoaderWriter).deleteAll(getAnyStringIterable()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final Set contentUpdates = fanIn(KEY_SET_B, KEY_SET_C, KEY_SET_D); - try { - ehcache.removeAll(contentUpdates); - fail(); - } catch (BulkCacheWritingException e) { - // Expected - } - - final InOrder ordered = inOrder(this.store, this.cacheLoaderWriter, this.spiedResilienceStrategy); - ordered.verify(this.store, atLeast(1)) - .bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); - assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); - // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.cacheLoaderWriter, atLeast(1)).deleteAll(getAnyStringIterable()); - assertThat(fakeLoaderWriter.getEntryMap(), equalTo(originalWriterContent)); - ordered.verify(this.spiedResilienceStrategy) - .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class), this.bulkExceptionCaptor.capture()); - - assertThat(this.bulkExceptionCaptor.getValue().getSuccesses(), empty()); - assertThat(this.bulkExceptionCaptor.getValue().getFailures().keySet(), Matchers.>equalTo(contentUpdates)); - - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.FAILURE)); - assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); - } - - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter) { - final EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(CACHE_CONFIGURATION, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBasicRemoveAllTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } - - /** - * Collects all arguments captured by {@link #bulkComputeSetCaptor}. - * - * @return the argument values collected by {@link #bulkComputeSetCaptor}; the - * {@code Iterator} over the resulting {@code Set} returns the values - * in the order observed by the captor. - */ - private Set getBulkComputeArgs() { - final Set bulkComputeArgs = new LinkedHashSet<>(); - for (final Set set : this.bulkComputeSetCaptor.getAllValues()) { - bulkComputeArgs.addAll(set); - } - return bulkComputeArgs; - } - -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicRemoveTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicRemoveTest.java deleted file mode 100644 index 48014f3d05..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicRemoveTest.java +++ /dev/null @@ -1,493 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.Status; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheWritingException; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.hamcrest.CoreMatchers; -import org.junit.Test; -import org.mockito.InOrder; -import org.mockito.Mock; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.EnumSet; - -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; - -/** - * Provides testing of basic REMOVE(key) operations on an {@code EhcacheWithLoaderWriter}. - * - * @author Clifford W. Johnson - */ -public class EhcacheWithLoaderWriterBasicRemoveTest extends EhcacheBasicCrudBase { - - @Mock - protected CacheLoaderWriter cacheLoaderWriter; - - @Test - public void testRemoveNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.remove(null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
- */ - @Test - public void testRemoveNoStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.NOOP)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveNoStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - ehcache.remove("key"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.NOOP)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveNoStoreEntryHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - ehcache.remove("key"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.NOOP)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.delete} throws
  • - *
- */ - @Test - public void testRemoveNoStoreEntryCacheWritingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).delete("key"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.remove("key"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
- */ - @Test - public void testRemoveNoStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verify(this.spiedResilienceStrategy).removeFailure(eq("key"), any(StoreAccessException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveNoStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.remove("key"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).delete(eq("key")); - ordered.verify(this.spiedResilienceStrategy).removeFailure(eq("key"), any(StoreAccessException.class)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveNoStoreEntryStoreAccessExceptionHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.remove("key"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).delete(eq("key")); - ordered.verify(this.spiedResilienceStrategy).removeFailure(eq("key"), any(StoreAccessException.class)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • {@code CacheLoaderWriter.delete} throws
  • - *
- */ - @Test - public void testRemoveNoStoreEntryStoreAccessExceptionCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).delete("key"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - try { - ehcache.remove("key"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).delete(eq("key")); - ordered.verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
- */ - @Test - public void testRemoveHasStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveHasStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - ehcache.remove("key"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveHasStoreEntryHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - ehcache.remove("key"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.delete} throws
  • - *
- */ - @Test - public void testRemoveHasStoreEntryCacheWritingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).delete("key"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.remove("key"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
- */ - @Test - public void testRemoveHasStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verify(this.spiedResilienceStrategy).removeFailure(eq("key"), any(StoreAccessException.class)); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveHasStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.remove("key"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).delete(eq("key")); - ordered.verify(this.spiedResilienceStrategy).removeFailure(eq("key"), any(StoreAccessException.class)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveHasStoreEntryStoreAccessExceptionHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.remove("key"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).delete(eq("key")); - ordered.verify(this.spiedResilienceStrategy).removeFailure(eq("key"), any(StoreAccessException.class)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • {@code CacheLoaderWriter.delete} throws
  • - *
- */ - @Test - public void testRemoveHasStoreEntryStoreAccessExceptionCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).delete("key"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - try { - ehcache.remove("key"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).delete(eq("key")); - ordered.verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.FAILURE)); - } - - /** - * Gets an initialized {@link EhcacheWithLoaderWriter Ehcache} instance using the - * {@link CacheLoaderWriter} provided. - * - * @param cacheLoaderWriter the {@code CacheLoaderWriter} to use; may be {@code null} - * - * @return a new {@code EhcacheWithLoaderWriter} instance - */ - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter) { - final EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(CACHE_CONFIGURATION, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBasicRemoveTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicRemoveValueTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicRemoveValueTest.java deleted file mode 100644 index 7449062be6..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicRemoveValueTest.java +++ /dev/null @@ -1,843 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.Status; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheWritingException; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.hamcrest.CoreMatchers; -import org.junit.Test; -import org.mockito.Mock; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.EnumSet; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; - -/** - * Provides testing of basic REMOVE(key, value) operations on an {@code EhcacheWithLoaderWriter}. - * - * @author Clifford W. Johnson - */ -public class EhcacheWithLoaderWriterBasicRemoveValueTest extends EhcacheBasicCrudBase { - - @Mock - protected CacheLoaderWriter cacheLoaderWriter; - - @Test - public void testRemoveNullNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.remove(null, null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testRemoveKeyNull() throws Exception { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.remove("key", null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testRemoveNullValue() throws Exception { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.remove(null, "value"); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
- */ - @Test - public void testRemoveValueNoStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertFalse(ehcache.remove("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_MISSING)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with unequal value in {@code Store}
  • - *
- */ - @Test - public void testRemoveValueUnequalStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertFalse(ehcache.remove("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("unequalValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with equal value in {@code Store}
  • - *
- */ - @Test - public void testRemoveValueEqualStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertTrue(ehcache.remove("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
- */ - @Test - public void testRemoveValueNoStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
- */ - @Test - public void testRemoveValueUnequalStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
- */ - @Test - public void testRemoveValueEqualStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueNoStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertFalse(ehcache.remove("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_MISSING)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueUnequalStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertFalse(ehcache.remove("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("unequalValue"))); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueEqualStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertTrue(ehcache.remove("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueNoStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueUnequalStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueEqualStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key with unequal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueNoStoreEntryUnequalCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertFalse(ehcache.remove("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is("unequalValue")); - assertThat(fakeWriter.getEntryMap().get("key"), is("unequalValue")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • key with unequal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueUnequalStoreEntryUnequalCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertFalse(ehcache.remove("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("unequalValue"))); - assertThat(fakeWriter.getEntryMap().get("key"), is(equalTo("unequalValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • key with unequal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueEqualStoreEntryUnequalCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertTrue(ehcache.remove("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key with unequal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueNoStoreEntryStoreAccessExceptionUnequalCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - assertThat(fakeWriter.getEntryMap().get("key"), is(equalTo("unequalValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key with unequal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueUnequalStoreEntryStoreAccessExceptionUnequalCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.remove("key", "value"), is(false)); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - assertThat(fakeWriter.getEntryMap().get("key"), is(equalTo("unequalValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key with unequal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueEqualStoreEntryStoreAccessExceptionUnequalCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key with equal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueNoStoreEntryEqualCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "value")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertThat(ehcache.remove("key", "value"), is(true)); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • key with equal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueUnequalStoreEntryEqualCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "value")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertFalse(ehcache.remove("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("unequalValue"))); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • key with equal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueEqualStoreEntryEqualCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "value")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertTrue(ehcache.remove("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key with equal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueNoStoreEntryStoreAccessExceptionEqualCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "value")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.remove("key", "value"), is(true)); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(true)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key with equal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueUnequalStoreEntryStoreAccessExceptionEqualCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "value")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.remove("key", "value"), is(true)); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(true)); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key with equal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testRemoveValueEqualStoreEntryStoreAccessExceptionEqualCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "value")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.remove("key", "value"), is(true)); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(true)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.delete} throws
  • - *
- */ - @Test - public void testRemoveValueNoStoreEntryCacheWritingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "value")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).delete("key"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.remove("key", "value"); - fail(); - } catch (CacheWritingException e) { - // expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.ConditionalRemoveOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.delete} throws
  • - *
- */ - @Test - public void testRemoveValueUnequalStoreEntryCacheWritingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).delete("key"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertFalse(ehcache.remove("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("unequalValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.delete} throws
  • - *
- */ - @Test - public void testRemoveValueEqualStoreEntryCacheWritingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "value")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).delete("key"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.remove("key", "value"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • {@code CacheLoaderWriter.delete} throws
  • - *
- */ - @Test - public void testRemoveValueNoStoreEntryStoreAccessExceptionCacheWritingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "value")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).delete("key"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.remove("key", "value"); - fail(); - } catch (CacheWritingException e) { - // expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • {@code CacheLoaderWriter.delete} throws
  • - *
- */ - @Test - public void testRemoveValueUnequalStoreEntryStoreAccessExceptionCacheWritingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).delete("key"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.remove("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), eq(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#remove(Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • {@code CacheLoaderWriter.delete} throws
  • - *
- */ - @Test - public void testRemoveValueEqualStoreEntryStoreAccessExceptionCacheWritingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "value")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).delete("key"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.remove("key", "value"); - fail(); - } catch (CacheWritingException e) { - // expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .removeFailure(eq("key"), eq("value"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); - } - - /** - * Gets an initialized {@link EhcacheWithLoaderWriter Ehcache} instance using the - * {@link CacheLoaderWriter} provided. - * - * @param cacheLoaderWriter - * the {@code CacheLoaderWriter} to use; may be {@code null} - * - * @return a new {@code EhcacheWithLoaderWriter} instance - */ - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter) { - final EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(CACHE_CONFIGURATION, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBasicRemoveValueTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceTest.java deleted file mode 100644 index e1f349a283..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceTest.java +++ /dev/null @@ -1,559 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.Status; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.config.ResourcePoolsHelper; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheWritingException; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.hamcrest.CoreMatchers; -import org.junit.Test; -import org.mockito.InOrder; -import org.mockito.Mock; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.EnumSet; - -import static org.ehcache.core.util.Matchers.holding; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; - -/** - * Provides testing of basic REPLACE(key, value) operations on an {@code EhcacheWithLoaderWriter}. - * - * @author Clifford W. Johnson - */ -public class EhcacheWithLoaderWriterBasicReplaceTest extends EhcacheBasicCrudBase { - - @Mock - protected CacheLoaderWriter cacheLoaderWriter; - - @Test - public void testReplaceNullNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace(null, null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testReplaceKeyNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace("key", null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testReplaceNullValue() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace(null, "value"); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
- */ - @Test - public void testReplaceNoStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertNull(ehcache.replace("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_NOT_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceNoStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeLoaderWriter); - - assertNull(ehcache.replace("key", "value")); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - assertThat(fakeLoaderWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_NOT_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceNoStoreEntryHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeLoaderWriter); - - assertThat(ehcache.replace("key", "value"), is("oldValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(true)); - assertThat(fakeLoaderWriter.getEntryMap().get("key"), is(equalTo("value"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testReplaceNoStoreEntryCacheWritingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(cacheLoaderWriter); - - try { - ehcache.replace("key", "value"); - fail(); - } catch (CacheWritingException e) { - e.printStackTrace(); - } - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.ReplaceOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
- */ - @Test - public void testReplaceNoStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verify(this.spiedResilienceStrategy).replaceFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceNoStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.replace("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).load("key"); - ordered.verify(this.spiedResilienceStrategy).replaceFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - assertThat(fakeLoaderWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceNoStoreEntryStoreAccessExceptionHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.replace("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), any(String.class)); - ordered.verify(this.spiedResilienceStrategy).replaceFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - assertThat(fakeLoaderWriter.getEntryMap().get("key"), is(equalTo("value"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testReplaceNoStoreEntryStoreAccessExceptionCacheWritingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - try { - ehcache.replace("key", "value"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), any(String.class)); - ordered.verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("value"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
- */ - @Test - public void testReplaceHasStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertThat(ehcache.replace("key", "value"), is(equalTo("oldValue"))); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("value"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceHasStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeLoaderWriter); - - assertThat(ehcache.replace("key", "value"), is(equalTo("oldValue"))); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("value"))); - assertThat(fakeLoaderWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceHasStoreEntryHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeLoaderWriter); - - assertThat(ehcache.replace("key", "value"), is(equalTo("oldValue"))); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("value"))); - assertThat(fakeLoaderWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testReplaceHasStoreEntryCacheWritingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace("key", "value"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.ReplaceOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
- */ - @Test - public void testReplaceHasStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verify(this.spiedResilienceStrategy).replaceFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceHasStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.replace("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verify(this.store, times(1)).remove("key"); - ordered.verify(this.cacheLoaderWriter).load(eq("key")); - ordered.verify(this.spiedResilienceStrategy).replaceFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - assertThat(fakeLoaderWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • key present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceHasStoreEntryStoreAccessExceptionHasCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - ehcache.replace("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), any(String.class)); - ordered.verify(this.spiedResilienceStrategy).replaceFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - assertThat(fakeLoaderWriter.getEntryMap().get("key"), is(equalTo("value"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object)} for - *
    - *
  • key present in {@code Store}
  • - *
  • {@code Store.compute} throws
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testReplaceHasStoreEntryStoreAccessExceptionCacheWritingException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction()); - - final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeLoaderWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "value"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - final InOrder ordered = inOrder(this.cacheLoaderWriter, this.spiedResilienceStrategy); - - try { - ehcache.replace("key", "value"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction()); - ordered.verify(this.cacheLoaderWriter).write(eq("key"), any(String.class)); - ordered.verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("value"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - @Test - public void testReplaceWithImmediatelyExpiredEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "old-value")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "old-value")); - - @SuppressWarnings("unchecked") - final Expiry expiry = mock(Expiry.class); - when(expiry.getExpiryForUpdate(eq("key"), argThat(holding("old-value")), eq("value"))).thenReturn(Duration.ZERO); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter, expiry); - - ehcache.replace("key", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), nullValue()); - assertThat(fakeWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); - } - - /** - * Gets an initialized {@link EhcacheWithLoaderWriter Ehcache} instance using the - * {@link CacheLoaderWriter} provided. - * - * @param cacheLoaderWriter the {@code CacheLoaderWriter} to use; may be {@code null} - * - * @return a new {@code EhcacheWithLoaderWriter} instance - */ - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter) { - return getEhcache(cacheLoaderWriter, Expirations.noExpiration()); - } - - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter, Expiry expiry) { - CacheConfiguration config = new BaseCacheConfiguration<>(String.class, String.class, null, null, - expiry, ResourcePoolsHelper.createHeapOnlyPools()); - final EhcacheWithLoaderWriter ehcache - = new EhcacheWithLoaderWriter<>(config, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBasicReplaceTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceValueTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceValueTest.java deleted file mode 100644 index 57b10a8bfb..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceValueTest.java +++ /dev/null @@ -1,935 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.Status; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.config.ResourcePoolsHelper; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheWritingException; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.hamcrest.CoreMatchers; -import org.junit.Test; -import org.mockito.Mock; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.EnumSet; - -import static org.ehcache.core.util.Matchers.holding; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; - -/** - * Provides testing of basic REPLACE(key, newValue, oldValue) operations on an {@code EhcacheWithLoaderWriter}. - * - * @author Clifford W. Johnson - */ -public class EhcacheWithLoaderWriterBasicReplaceValueTest extends EhcacheBasicCrudBase { - - @Mock - protected CacheLoaderWriter cacheLoaderWriter; - - - @Test - public void testReplaceValueNullNullNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace(null, null, null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testReplaceKeyNullNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace("key", null, null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testReplaceKeyValueNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace("key", "oldValue", null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testReplaceKeyNullValue() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace("key", null, "newValue"); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testReplaceNullValueNull() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace(null, "oldValue", null); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testReplaceNullValueValue() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace(null, "oldValue", "newValue"); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - @Test - public void testReplaceNullNullValue() { - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace(null, null, "newValue"); - fail(); - } catch (NullPointerException e) { - // expected - } - } - - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
- */ - @Test - public void testReplaceValueNoStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertFalse(ehcache.replace("key", "oldValue", "newValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_NOT_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with unequal value in {@code Store}
  • - *
- */ - @Test - public void testReplaceValueUnequalStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertFalse(ehcache.replace("key", "oldValue", "newValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("unequalValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with equal value in {@code Store}
  • - *
- */ - @Test - public void testReplaceValueEqualStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertTrue(ehcache.replace("key", "oldValue", "newValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("newValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
- */ - @Test - public void testReplaceValueNoStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "oldValue", "newValue"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
- */ - @Test - public void testReplaceValueUnequalStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "oldValue", "newValue"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
- */ - @Test - public void testReplaceValueEqualStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "oldValue", "newValue"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueNoStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertFalse(ehcache.replace("key", "oldValue", "newValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_NOT_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueUnequalStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertFalse(ehcache.replace("key", "oldValue", "newValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("unequalValue"))); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueEqualStoreEntryNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertTrue(ehcache.replace("key", "oldValue", "newValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("newValue"))); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueNoStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "oldValue", "newValue"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(false)); - assertThat(fakeWriter.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueUnequalStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "oldValue", "newValue"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(false)); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key not present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueEqualStoreEntryStoreAccessExceptionNoCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.emptyMap()); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "oldValue", "newValue"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(false)); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key with unequal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueNoStoreEntryUnequalCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertFalse(ehcache.replace("key", "oldValue", "newValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is("unequalValue")); - assertThat(fakeWriter.getEntryMap().get("key"), is(equalTo("unequalValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • key with unequal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueUnequalStoreEntryUnequalCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertFalse(ehcache.replace("key", "oldValue", "newValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("unequalValue"))); - assertThat(fakeWriter.getEntryMap().get("key"), is(equalTo("unequalValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • key with unequal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueEqualStoreEntryUnequalCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertTrue(ehcache.replace("key", "oldValue", "newValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("newValue"))); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key with unequal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueNoStoreEntryStoreAccessExceptionUnequalCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "oldValue", "newValue"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(false)); - assertThat(fakeWriter.getEntryMap().get("key"), is(equalTo("unequalValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key with unequal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueUnequalStoreEntryStoreAccessExceptionUnequalCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "oldValue", "newValue"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(false)); - assertThat(fakeWriter.getEntryMap().get("key"), is(equalTo("unequalValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key with unequal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueEqualStoreEntryStoreAccessExceptionUnequalCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "unequalValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "oldValue", "newValue"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(false)); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • key with equal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueNoStoreEntryEqualCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertThat(ehcache.replace("key", "oldValue", "newValue"), is(true)); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is("newValue")); - assertThat(fakeWriter.getEntryMap().get("key"), is(equalTo("newValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • key with equal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueUnequalStoreEntryEqualCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertFalse(ehcache.replace("key", "oldValue", "newValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("unequalValue"))); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • key with equal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueEqualStoreEntryEqualCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter); - - assertTrue(ehcache.replace("key", "oldValue", "newValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("newValue"))); - assertThat(fakeWriter.getEntryMap().get("key"), is(equalTo("newValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key with equal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueNoStoreEntryStoreAccessExceptionEqualCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "oldValue", "newValue"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(true)); - assertThat(fakeWriter.getEntryMap().get("key"), is(equalTo("newValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key with equal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueUnequalStoreEntryStoreAccessExceptionEqualCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "oldValue", "newValue"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(true)); - // Broken initial state: CacheLoaderWriter check omitted - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • key with equal value present via {@code CacheLoaderWriter}
  • - *
- */ - @Test - public void testReplaceValueEqualStoreEntryStoreAccessExceptionEqualCacheLoaderWriterEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - ehcache.replace("key", "oldValue", "newValue"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(true)); - assertThat(fakeWriter.getEntryMap().get("key"), is(equalTo("newValue"))); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testReplaceValueNoStoreEntryCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "newValue"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace("key", "oldValue", "newValue"); - fail(); - } catch (CacheWritingException e) { - // expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.ReplaceOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testReplaceValueUnequalStoreEntryCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "newValue"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - assertFalse(ehcache.replace("key", "oldValue", "newValue")); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_PRESENT)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testReplaceValueEqualStoreEntryCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "newValue"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace("key", "oldValue", "newValue"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.ReplaceOutcome.class)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key not present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testReplaceValueNoStoreEntryStoreAccessExceptionCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "newValue"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace("key", "oldValue", "newValue"); - fail(); - } catch (CacheWritingException e) { - // expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with unequal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testReplaceValueUnequalStoreEntryStoreAccessExceptionCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "newValue"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace("key", "oldValue", "newValue"); - fail(); - } catch (CacheWritingException e) { - // expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#replace(Object, Object, Object)} for - *
    - *
  • key with equal value present in {@code Store}
  • - *
  • >{@code Store.compute} throws
  • - *
  • {@code CacheLoaderWriter.write} throws
  • - *
- */ - @Test - public void testReplaceValueEqualStoreEntryStoreAccessExceptionCacheLoaderWriterException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); - this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "oldValue")); - this.cacheLoaderWriter = spy(fakeWriter); - doThrow(new Exception()).when(this.cacheLoaderWriter).write("key", "newValue"); - final EhcacheWithLoaderWriter ehcache = this.getEhcache(this.cacheLoaderWriter); - - try { - ehcache.replace("key", "oldValue", "newValue"); - fail(); - } catch (CacheWritingException e) { - // Expected - } - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), any(CacheWritingException.class)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); - } - - @Test - public void testReplaceWithImmediatelyExpiredEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "old-value")); - this.store = spy(fakeStore); - - final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "old-value")); - - @SuppressWarnings("unchecked") - final Expiry expiry = mock(Expiry.class); - when(expiry.getExpiryForUpdate(eq("key"), argThat(holding("old-value")), eq("value"))).thenReturn(Duration.ZERO); - - final EhcacheWithLoaderWriter ehcache = this.getEhcache(fakeWriter, expiry); - - ehcache.replace("key", "old-value", "value"); - verify(this.store).compute(eq("key"), getAnyBiFunction(), getBooleanSupplier()); - verifyZeroInteractions(this.spiedResilienceStrategy); - assertThat(fakeStore.getEntryMap().get("key"), nullValue()); - assertThat(fakeWriter.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); - } - - /** - * Gets an initialized {@link EhcacheWithLoaderWriter Ehcache} instance using the - * {@link org.ehcache.spi.loaderwriter.CacheLoaderWriter CacheLoaderWriter} provided. - * - * @param cacheLoaderWriter - * the {@code CacheLoaderWriter} to use; may be {@code null} - * - * @return a new {@code EhcacheWithLoaderWriter} instance - */ - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter) { - return getEhcache(cacheLoaderWriter, Expirations.noExpiration()); - } - - private EhcacheWithLoaderWriter getEhcache(final CacheLoaderWriter cacheLoaderWriter, Expiry expiry) { - CacheConfiguration config = new BaseCacheConfiguration<>(String.class, String.class, null, null, - expiry, ResourcePoolsHelper.createHeapOnlyPools()); - final EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(config, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBasicReplaceValueTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBulkMethodsTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBulkMethodsTest.java deleted file mode 100644 index 433df5006f..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBulkMethodsTest.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.core; - -import java.util.*; -import java.util.function.Function; - -import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.events.CacheEventDispatcher; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.Store.ValueHolder; -import org.ehcache.expiry.Expiry; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.junit.Test; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; -import org.slf4j.LoggerFactory; - -import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.core.IsCollectionContaining.hasItems; -import static org.junit.Assert.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; -import static org.ehcache.core.EhcacheBulkMethodsTest.entry; -import static org.ehcache.core.EhcacheBulkMethodsTest.valueHolder; - -/** - * @author Abhilash - * - */ -@SuppressWarnings("unchecked") -public class EhcacheWithLoaderWriterBulkMethodsTest { - - @Test - public void testPutAllWithWriter() throws Exception { - Store store = mock(Store.class); - when(store.bulkCompute((Set) argThat(hasItems(1, 2, 3)), any(Function.class))).thenAnswer(invocation -> { - Function>, Object> function = - (Function>, Object>)invocation.getArguments()[1]; - function.apply(Arrays.asList(entry(1, "one"), entry(2, "two"), entry(3, "three"))); - return null; - }); - CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); - - InternalCache ehcache = getCache(store, cacheLoaderWriter); - ehcache.init(); - - ehcache.putAll(new LinkedHashMap() {{ - put(1, "one"); - put(2, "two"); - put(3, "three"); - }}); - - verify(store).bulkCompute((Set) argThat(hasItems(1, 2, 3)), any(Function.class)); - verify(cacheLoaderWriter).writeAll(argThat(hasItems(entry(1, "one"), entry(2, "two"), entry(3, "three")))); - } - - @Test - public void testGetAllWithLoader() throws Exception { - Store store = mock(Store.class); - - when(store.bulkComputeIfAbsent((Set)argThat(hasItems(1, 2, 3)), any(Function.class))).thenAnswer(invocation -> { - Function function = (Function)invocation.getArguments()[1]; - function.apply(invocation.getArguments()[0]); - - final Map>loaderValues = new LinkedHashMap<>(); - loaderValues.put(1, valueHolder((CharSequence)"one")); - loaderValues.put(2, valueHolder((CharSequence)"two")); - loaderValues.put(3, null); - return loaderValues; - }); - - CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); - - InternalCache ehcache = getCache(store, cacheLoaderWriter); - ehcache.init(); - Map result = ehcache.getAll(new HashSet(Arrays.asList(1, 2, 3))); - - assertThat(result, hasEntry((Number)1, (CharSequence) "one")); - assertThat(result, hasEntry((Number)2, (CharSequence) "two")); - assertThat(result, hasEntry((Number)3, (CharSequence) null)); - verify(store).bulkComputeIfAbsent((Set)argThat(hasItems(1, 2, 3)), any(Function.class)); - verify(cacheLoaderWriter).loadAll(argThat(hasItems(1, 2, 3))); - } - - @Test - public void testRemoveAllWithWriter() throws Exception { - Store store = mock(Store.class); - when(store.bulkCompute((Set) argThat(hasItems(1, 2, 3)), any(Function.class))).thenAnswer(invocation -> { - Function function = (Function)invocation.getArguments()[1]; - function.apply(Arrays.asList(entry(1, "one"), entry(2, "two"), entry(3, "three"))); - return null; - }); - CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); - - InternalCache ehcache = getCache(store, cacheLoaderWriter); - ehcache.init(); - ehcache.removeAll(new LinkedHashSet(Arrays.asList(1, 2, 3))); - - verify(store).bulkCompute((Set) argThat(hasItems(1, 2, 3)), any(Function.class)); - verify(cacheLoaderWriter).deleteAll(argThat(hasItems(1, 2, 3))); - } - - protected InternalCache getCache(Store store, CacheLoaderWriter cacheLoaderWriter) { - CacheConfiguration cacheConfig = mock(CacheConfiguration.class); - when(cacheConfig.getExpiry()).thenReturn(mock(Expiry.class)); - CacheEventDispatcher cacheEventDispatcher = mock(CacheEventDispatcher.class); - return new EhcacheWithLoaderWriter(cacheConfig, store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterBulkMethodsTest")); - } - -} diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterTest.java deleted file mode 100644 index cb4273c67d..0000000000 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterTest.java +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.core; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; - -import org.ehcache.Cache; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.config.ResourcePoolsHelper; -import org.ehcache.core.events.CacheEventDispatcher; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.events.StoreEventSource; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.junit.Test; -import org.slf4j.LoggerFactory; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.sameInstance; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.mock; - -/** - * @author Abhilash - * - */ -public class EhcacheWithLoaderWriterTest extends CacheTest { - - @Override - protected InternalCache getCache(Store store) { - final CacheConfiguration config = new BaseCacheConfiguration<>(Object.class, Object.class, null, - null, null, ResourcePoolsHelper.createHeapOnlyPools()); - @SuppressWarnings("unchecked") - CacheEventDispatcher cacheEventDispatcher = mock(CacheEventDispatcher.class); - @SuppressWarnings("unchecked") - CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); - return new EhcacheWithLoaderWriter<>(config, store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterTest")); - } - - @Test - public void testIgnoresKeysReturnedFromCacheLoaderLoadAll() { - LoadAllVerifyStore store = new LoadAllVerifyStore(); - KeyFumblingCacheLoaderWriter loader = new KeyFumblingCacheLoaderWriter(); - @SuppressWarnings("unchecked") - CacheEventDispatcher cacheEventDispatcher = mock(CacheEventDispatcher.class); - CacheConfiguration config = new BaseCacheConfiguration<>(String.class, String.class, null, - null, null, ResourcePoolsHelper.createHeapOnlyPools()); - EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(config, store, loader, cacheEventDispatcher, LoggerFactory - .getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheTest6")); - ehcache.init(); - - HashSet keys = new HashSet<>(); - keys.add("key1"); - keys.add("key2"); - keys.add("key3"); - keys.add("key4"); - - ehcache.getAll(keys); - assertTrue("validation performed inline by LoadAllVerifyStore", true); - } - - private static class LoadAllVerifyStore implements Store { - - @Override - public Map> bulkComputeIfAbsent(Set keys, Function, Iterable>> mappingFunction) throws StoreAccessException { - Iterable> result = mappingFunction.apply(keys); - ArrayList functionReturnedKeys = new ArrayList<>(); - for (Map.Entry entry : result) { - functionReturnedKeys.add(entry.getKey()); - } - assertThat(functionReturnedKeys.size(), is(keys.size())); - - ArrayList paramKeys = new ArrayList<>(keys); - Collections.sort(paramKeys); - Collections.sort(functionReturnedKeys); - - for (int i = 0; i < functionReturnedKeys.size(); i++) { - assertThat(functionReturnedKeys.get(i), sameInstance(paramKeys.get(i))); - } - - return Collections.emptyMap(); - } - - @Override - public List getConfigurationChangeListeners() { - return new ArrayList<>(); - } - - @Override - public ValueHolder get(String key) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public boolean containsKey(String key) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public PutStatus put(String key, String value) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public ValueHolder putIfAbsent(String key, String value) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public boolean remove(String key) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public RemoveStatus remove(String key, String value) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public ValueHolder replace(String key, String value) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public ReplaceStatus replace(String key, String oldValue, String newValue) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public void clear() throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public StoreEventSource getStoreEventSource() { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public Iterator>> iterator() { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public ValueHolder compute(String key, BiFunction mappingFunction) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public ValueHolder compute(String key, BiFunction mappingFunction, Supplier replaceEqual) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public ValueHolder computeIfAbsent(String key, Function mappingFunction) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction, Supplier replaceEqual) throws StoreAccessException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - } - - private static class KeyFumblingCacheLoaderWriter implements CacheLoaderWriter { - @Override - public Map loadAll(Iterable keys) throws Exception { - HashMap result = new HashMap<>(); - for (String key : keys) { - result.put(new String(key), "valueFor" + key); - } - return result; - } - - @Override - public void write(String key, String value) throws Exception { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public void writeAll(Iterable> entries) throws BulkCacheWritingException, Exception { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public void delete(String key) throws Exception { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public String load(String key) throws Exception { - throw new UnsupportedOperationException("TODO Implement me!"); - } - } - -} diff --git a/core/src/test/java/org/ehcache/core/UserManagedCacheTest.java b/core/src/test/java/org/ehcache/core/UserManagedCacheTest.java deleted file mode 100644 index e86f1470ca..0000000000 --- a/core/src/test/java/org/ehcache/core/UserManagedCacheTest.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core; - -import org.ehcache.Status; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.config.ResourcePoolsHelper; -import org.ehcache.core.events.CacheEventDispatcher; -import org.ehcache.core.spi.store.Store; -import org.ehcache.StateTransitionException; -import org.ehcache.core.spi.LifeCycled; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.hamcrest.CoreMatchers; -import org.junit.Test; -import org.slf4j.LoggerFactory; - -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.verify; - -@SuppressWarnings({ "unchecked", "rawtypes" }) -public class UserManagedCacheTest { - - @Test - public void testUserManagedCacheDelegatesLifecycleCallsToStore() throws Exception { - final Store store = mock(Store.class); - CacheConfiguration config = new BaseCacheConfiguration<>(Object.class, Object.class, null, null, - null, ResourcePoolsHelper.createHeapOnlyPools()); - Ehcache ehcache = new Ehcache(config, store, mock(CacheEventDispatcher.class), LoggerFactory.getLogger(Ehcache.class + "testUserManagedCacheDelegatesLifecycleCallsToStore")); - assertCacheDelegatesLifecycleCallsToStore(ehcache); - - EhcacheWithLoaderWriter ehcacheWithLoaderWriter = new EhcacheWithLoaderWriter(config, store, - mock(CacheLoaderWriter.class), mock(CacheEventDispatcher.class), LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "testUserManagedCacheDelegatesLifecycleCallsToStore")); - assertCacheDelegatesLifecycleCallsToStore(ehcacheWithLoaderWriter); - } - - private void assertCacheDelegatesLifecycleCallsToStore(InternalCache cache) throws Exception { - final LifeCycled mock = mock(LifeCycled.class); - cache.addHook(mock); - cache.init(); - verify(mock).init(); - cache.close(); - verify(mock).close(); - } - - @Test - public void testUserManagedEhcacheFailingTransitionGoesToLowestStatus() throws Exception { - final Store store = mock(Store.class); - CacheConfiguration config = new BaseCacheConfiguration<>(Object.class, Object.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); - Ehcache ehcache = new Ehcache(config, store, mock(CacheEventDispatcher.class), LoggerFactory.getLogger(Ehcache.class + "testUserManagedEhcacheFailingTransitionGoesToLowestStatus")); - assertFailingTransitionGoesToLowestStatus(ehcache); - EhcacheWithLoaderWriter ehcacheWithLoaderWriter = new EhcacheWithLoaderWriter(config, store, - mock(CacheLoaderWriter.class), mock(CacheEventDispatcher.class), LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "testUserManagedCacheDelegatesLifecycleCallsToStore")); - assertFailingTransitionGoesToLowestStatus(ehcacheWithLoaderWriter); - } - - private void assertFailingTransitionGoesToLowestStatus(InternalCache cache) throws Exception { - final LifeCycled mock = mock(LifeCycled.class); - cache.addHook(mock); - doThrow(new Exception()).when(mock).init(); - try { - cache.init(); - fail(); - } catch (StateTransitionException e) { - assertThat(cache.getStatus(), CoreMatchers.is(Status.UNINITIALIZED)); - } - - reset(mock); - cache.init(); - assertThat(cache.getStatus(), is(Status.AVAILABLE)); - doThrow(new Exception()).when(mock).close(); - try { - cache.close(); - fail(); - } catch (StateTransitionException e) { - assertThat(cache.getStatus(), is(Status.UNINITIALIZED)); - } - - } - -} diff --git a/core/src/test/java/org/ehcache/core/config/BaseCacheConfigurationTest.java b/core/src/test/java/org/ehcache/core/config/BaseCacheConfigurationTest.java deleted file mode 100644 index 8039cb69f8..0000000000 --- a/core/src/test/java/org/ehcache/core/config/BaseCacheConfigurationTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.config; - -import org.ehcache.config.ResourcePools; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import static org.mockito.Mockito.mock; - -/** - * BaseCacheConfigurationTest - */ -public class BaseCacheConfigurationTest { - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @Test - public void testThrowsWithNullKeyType() { - expectedException.expect(NullPointerException.class); - expectedException.expectMessage("keyType"); - - new BaseCacheConfiguration<>(null, String.class, null, - null, null, mock(ResourcePools.class)); - } - - @Test - public void testThrowsWithNullValueType() { - expectedException.expect(NullPointerException.class); - expectedException.expectMessage("valueType"); - - new BaseCacheConfiguration<>(Long.class, null, null, - null, null, mock(ResourcePools.class)); - } - - @Test - public void testThrowsWithNullResourcePools() { - expectedException.expect(NullPointerException.class); - expectedException.expectMessage("resourcePools"); - - new BaseCacheConfiguration<>(Long.class, String.class, null, - null, null, null); - } - -} diff --git a/core/src/test/java/org/ehcache/core/config/ResourcePoolsHelper.java b/core/src/test/java/org/ehcache/core/config/ResourcePoolsHelper.java deleted file mode 100644 index c6f1751f74..0000000000 --- a/core/src/test/java/org/ehcache/core/config/ResourcePoolsHelper.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.core.config; - -import org.ehcache.config.ResourcePool; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.ResourceType; -import org.ehcache.config.ResourceUnit; -import org.ehcache.config.SizedResourcePool; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; - -import java.util.HashMap; -import java.util.Map; - -/** - * @author Ludovic Orban - */ -public class ResourcePoolsHelper { - - public static ResourcePools createHeapOnlyPools() { - return createHeapOnlyPools(Long.MAX_VALUE); - } - - public static ResourcePools createHeapOnlyPools(long heapSize) { - Map, ResourcePool> poolsMap = new HashMap<>(); - poolsMap.put(ResourceType.Core.HEAP, new SizedResourcePoolImpl<>(ResourceType.Core.HEAP, heapSize, EntryUnit.ENTRIES, false)); - return new ResourcePoolsImpl(poolsMap); - } - - public static ResourcePools createHeapOnlyPools(long heapSize, ResourceUnit resourceUnit) { - Map, ResourcePool> poolsMap = new HashMap<>(); - poolsMap.put(ResourceType.Core.HEAP, new SizedResourcePoolImpl<>(ResourceType.Core.HEAP, heapSize, resourceUnit, false)); - return new ResourcePoolsImpl(poolsMap); - } - - public static ResourcePools createOffheapOnlyPools(long offheapSizeInMb) { - Map, ResourcePool> poolsMap = new HashMap<>(); - poolsMap.put(ResourceType.Core.OFFHEAP, new SizedResourcePoolImpl<>(ResourceType.Core.OFFHEAP, offheapSizeInMb, MemoryUnit.MB, false)); - return new ResourcePoolsImpl(poolsMap); - } - - public static ResourcePools createDiskOnlyPools(long diskSize, ResourceUnit resourceUnit) { - Map, ResourcePool> poolsMap = new HashMap<>(); - poolsMap.put(ResourceType.Core.DISK, new SizedResourcePoolImpl<>(ResourceType.Core.DISK, diskSize, resourceUnit, false)); - return new ResourcePoolsImpl(poolsMap); - } - - public static ResourcePools createHeapDiskPools(long heapSize, long diskSizeInMb) { - Map, ResourcePool> poolsMap = new HashMap<>(); - poolsMap.put(ResourceType.Core.HEAP, new SizedResourcePoolImpl<>(ResourceType.Core.HEAP, heapSize, EntryUnit.ENTRIES, false)); - poolsMap.put(ResourceType.Core.DISK, new SizedResourcePoolImpl<>(ResourceType.Core.DISK, diskSizeInMb, MemoryUnit.MB, false)); - return new ResourcePoolsImpl(poolsMap); - } - - public static ResourcePools createHeapDiskPools(long heapSize, ResourceUnit heapResourceUnit, long diskSizeInMb) { - Map, ResourcePool> poolsMap = new HashMap<>(); - poolsMap.put(ResourceType.Core.HEAP, new SizedResourcePoolImpl<>(ResourceType.Core.HEAP, heapSize, heapResourceUnit, false)); - poolsMap.put(ResourceType.Core.DISK, new SizedResourcePoolImpl<>(ResourceType.Core.DISK, diskSizeInMb, MemoryUnit.MB, false)); - return new ResourcePoolsImpl(poolsMap); - } - -} diff --git a/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorTest.java b/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorTest.java deleted file mode 100644 index 96ee3164b5..0000000000 --- a/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorTest.java +++ /dev/null @@ -1,416 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.service; - -import java.io.IOException; -import java.net.URL; -import java.util.Collection; -import java.util.Enumeration; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; - -import org.ehcache.core.EhcacheWithLoaderWriter; -import org.ehcache.core.spi.store.CacheProvider; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.spi.service.ServiceDependencies; -import org.ehcache.core.spi.services.DefaultTestProvidedService; -import org.ehcache.core.spi.services.DefaultTestService; -import org.ehcache.core.spi.services.FancyCacheProvider; -import org.ehcache.core.spi.services.TestProvidedService; -import org.ehcache.core.spi.services.TestService; -import org.hamcrest.CoreMatchers; -import org.junit.Ignore; -import org.junit.Test; - -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.CoreMatchers.sameInstance; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.withSettings; - -/** - * Tests for {@link ServiceLocator}. - */ -public class ServiceLocatorTest { - - @Test - public void testClassHierarchies() { - ServiceLocator.DependencySet dependencySet = dependencySet(); - final Service service = new ChildTestService(); - dependencySet.with(service); - assertThat(dependencySet.providerOf(FooProvider.class), sameInstance(service)); - final Service fancyCacheProvider = new FancyCacheProvider(); - dependencySet.with(fancyCacheProvider); - - final Collection servicesOfType = dependencySet.providersOf(CacheProvider.class); - assertThat(servicesOfType, is(not(empty()))); - assertThat(servicesOfType.iterator().next(), sameInstance(fancyCacheProvider)); - } - - @Test - public void testDoesNotUseTCCL() { - Thread.currentThread().setContextClassLoader(new ClassLoader() { - @Override - public Enumeration getResources(String name) throws IOException { - throw new AssertionError(); - } - }); - - dependencySet().with(TestService.class).build().getService(TestService.class); - } - - @Test - public void testAttemptsToStopStartedServicesOnInitFailure() { - Service s1 = new ParentTestService(); - FancyCacheProvider s2 = new FancyCacheProvider(); - - ServiceLocator locator = dependencySet().with(s1).with(s2).build(); - try { - locator.startAllServices(); - fail(); - } catch (Exception e) { - // see org.ehcache.spi.ParentTestService.start() - assertThat(e, instanceOf(RuntimeException.class)); - assertThat(e.getMessage(), is("Implement me!")); - } - assertThat(s2.startStopCounter, is(0)); - } - - @Test - public void testAttemptsToStopAllServicesOnCloseFailure() { - Service s1 = mock(CacheProvider.class); - Service s2 = mock(FooProvider.class); - Service s3 = mock(CacheLoaderWriterProvider.class); - - ServiceLocator locator = dependencySet().with(s1).with(s2).with(s3).build(); - try { - locator.startAllServices(); - } catch (Exception e) { - fail(); - } - final RuntimeException thrown = new RuntimeException(); - doThrow(thrown).when(s1).stop(); - - try { - locator.stopAllServices(); - fail(); - } catch (Exception e) { - assertThat(e, CoreMatchers.sameInstance(thrown)); - } - verify(s1).stop(); - verify(s2).stop(); - verify(s3).stop(); - } - - @Test - public void testStopAllServicesOnlyStopsEachServiceOnce() throws Exception { - Service s1 = mock(CacheProvider.class, withSettings().extraInterfaces(CacheLoaderWriterProvider.class)); - - ServiceLocator locator = dependencySet().with(s1).build(); - try { - locator.startAllServices(); - } catch (Exception e) { - fail(); - } - - locator.stopAllServices(); - verify(s1, times(1)).stop(); - } - - @Test - public void testCanOverrideDefaultServiceFromServiceLoader() { - ServiceLocator locator = dependencySet().with(new ExtendedTestService()).build(); - TestService testService = locator.getService(TestService.class); - assertThat(testService, instanceOf(ExtendedTestService.class)); - } - - @Test - public void testCanOverrideServiceDependencyWithoutOrderingProblem() throws Exception { - final AtomicBoolean started = new AtomicBoolean(false); - ServiceLocator serviceLocator = dependencySet().with(new TestServiceConsumerService()) - .with(new TestService() { - @Override - public void start(ServiceProvider serviceProvider) { - started.set(true); - } - - @Override - public void stop() { - // no-op - } - }).build(); - serviceLocator.startAllServices(); - assertThat(started.get(), is(true)); - } - - @Test - public void testServicesInstanciatedOnceAndStartedOnce() throws Exception { - - @ServiceDependencies(TestProvidedService.class) - class Consumer1 implements Service { - @Override - public void start(ServiceProvider serviceProvider) { - } - - @Override - public void stop() { - - } - } - - @ServiceDependencies(TestProvidedService.class) - class Consumer2 implements Service { - TestProvidedService testProvidedService; - @Override - public void start(ServiceProvider serviceProvider) { - testProvidedService = serviceProvider.getService(TestProvidedService.class); - } - - @Override - public void stop() { - - } - } - - Consumer1 consumer1 = spy(new Consumer1()); - Consumer2 consumer2 = new Consumer2(); - ServiceLocator.DependencySet dependencySet = dependencySet(); - - // add some services - dependencySet.with(consumer1); - dependencySet.with(consumer2); - dependencySet.with(new TestService() { - @Override - public void start(ServiceProvider serviceProvider) { - } - - @Override - public void stop() { - // no-op - } - }); - - // simulate what is done in ehcachemanager - dependencySet.with(TestService.class); - ServiceLocator serviceLocator = dependencySet.build(); - serviceLocator.startAllServices(); - - serviceLocator.stopAllServices(); - - verify(consumer1, times(1)).start(serviceLocator); - verify(consumer1, times(1)).stop(); - - assertThat(consumer2.testProvidedService.ctors(), greaterThanOrEqualTo(1)); - assertThat(consumer2.testProvidedService.stops(), equalTo(1)); - assertThat(consumer2.testProvidedService.starts(), equalTo(1)); - } - - @Test - public void testRedefineDefaultServiceWhileDependingOnIt() throws Exception { - ServiceLocator serviceLocator = dependencySet().with(new YetAnotherCacheProvider()).build(); - - serviceLocator.startAllServices(); - } - - @Test - @Ignore - public void testCircularDeps() throws Exception { - - final class StartStopCounter { - final AtomicInteger startCounter = new AtomicInteger(0); - final AtomicReference> startServiceProvider = new AtomicReference<>(); - final AtomicInteger stopCounter = new AtomicInteger(0); - public void countStart(ServiceProvider serviceProvider) { - startCounter.incrementAndGet(); - startServiceProvider.set(serviceProvider); - } - public void countStop() { - stopCounter.incrementAndGet(); - } - } - - @ServiceDependencies(TestProvidedService.class) - class Consumer1 implements Service { - final StartStopCounter startStopCounter = new StartStopCounter(); - @Override - public void start(ServiceProvider serviceProvider) { - assertThat(serviceProvider.getService(TestProvidedService.class), is(notNullValue())); - startStopCounter.countStart(serviceProvider); - } - @Override - public void stop() { - startStopCounter.countStop(); - } - } - - @ServiceDependencies(Consumer1.class) - class Consumer2 implements Service { - final StartStopCounter startStopCounter = new StartStopCounter(); - @Override - public void start(ServiceProvider serviceProvider) { - assertThat(serviceProvider.getService(Consumer1.class), is(notNullValue())); - startStopCounter.countStart(serviceProvider); - } - @Override - public void stop() { - startStopCounter.countStop(); - } - } - - @ServiceDependencies(Consumer2.class) - class MyTestProvidedService extends DefaultTestProvidedService { - final StartStopCounter startStopCounter = new StartStopCounter(); - @Override - public void start(ServiceProvider serviceProvider) { - assertThat(serviceProvider.getService(Consumer2.class), is(notNullValue())); - startStopCounter.countStart(serviceProvider); - super.start(serviceProvider); - } - @Override - public void stop() { - startStopCounter.countStop(); - super.stop(); - } - } - - @ServiceDependencies(DependsOnMe.class) - class DependsOnMe implements Service { - final StartStopCounter startStopCounter = new StartStopCounter(); - @Override - public void start(ServiceProvider serviceProvider) { - assertThat(serviceProvider.getService(DependsOnMe.class), sameInstance(this)); - startStopCounter.countStart(serviceProvider); - } - @Override - public void stop() { - startStopCounter.countStop(); - } - } - - ServiceLocator.DependencySet dependencySet = dependencySet(); - - Consumer1 consumer1 = new Consumer1(); - Consumer2 consumer2 = new Consumer2(); - MyTestProvidedService myTestProvidedService = new MyTestProvidedService(); - DependsOnMe dependsOnMe = new DependsOnMe(); - - // add some services - dependencySet.with(consumer1); - dependencySet.with(consumer2); - dependencySet.with(myTestProvidedService); - dependencySet.with(dependsOnMe); - - ServiceLocator serviceLocator = dependencySet.build(); - // simulate what is done in ehcachemanager - serviceLocator.startAllServices(); - - serviceLocator.stopAllServices(); - - assertThat(consumer1.startStopCounter.startCounter.get(), is(1)); - assertThat(consumer1.startStopCounter.startServiceProvider.get(), CoreMatchers.>is(serviceLocator)); - assertThat(consumer2.startStopCounter.startCounter.get(), is(1)); - assertThat(consumer2.startStopCounter.startServiceProvider.get(), CoreMatchers.>is(serviceLocator)); - assertThat(myTestProvidedService.startStopCounter.startCounter.get(), is(1)); - assertThat(myTestProvidedService.startStopCounter.startServiceProvider.get(), CoreMatchers.>is(serviceLocator)); - assertThat(dependsOnMe.startStopCounter.startCounter.get(), is(1)); - assertThat(dependsOnMe.startStopCounter.startServiceProvider.get(), CoreMatchers.>is(serviceLocator)); - - assertThat(consumer1.startStopCounter.stopCounter.get(), is(1)); - assertThat(consumer2.startStopCounter.stopCounter.get(), is(1)); - assertThat(myTestProvidedService.startStopCounter.stopCounter.get(), is(1)); - assertThat(dependsOnMe.startStopCounter.stopCounter.get(), is(1)); - } -} - -@ServiceDependencies(FancyCacheProvider.class) -class YetAnotherCacheProvider implements CacheProvider { - - @Override - public EhcacheWithLoaderWriter createCache(Class keyClazz, Class valueClazz, ServiceConfiguration... config) { - return null; - } - - @Override - public void releaseCache(EhcacheWithLoaderWriter resource) { - // no-op - } - - @Override - public void start(ServiceProvider serviceProvider) { - // no-op - } - - @Override - public void stop() { - // no-op - } -} - -class ExtendedTestService extends DefaultTestService { - -} - -interface FooProvider extends Service { - -} - -@ServiceDependencies(TestService.class) -class TestServiceConsumerService implements Service { - - @Override - public void start(ServiceProvider serviceProvider) { - assertThat(serviceProvider.getService(TestService.class), notNullValue()); - } - - @Override - public void stop() { - // no-op - } -} - -class ParentTestService implements FooProvider { - - @Override - public void start(final ServiceProvider serviceProvider) { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public void stop() { } - -} - -class ChildTestService extends ParentTestService { - - @Override - public void start(final ServiceProvider serviceProvider) { - throw new UnsupportedOperationException("Implement me!"); - } -} diff --git a/core/src/test/java/org/ehcache/core/internal/util/ClassLoadingTest.java b/core/src/test/java/org/ehcache/core/internal/util/ClassLoadingTest.java deleted file mode 100644 index 90d7495d2b..0000000000 --- a/core/src/test/java/org/ehcache/core/internal/util/ClassLoadingTest.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.internal.util; - -import static org.junit.Assert.*; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URL; -import java.util.Enumeration; -import java.util.Vector; - -import org.ehcache.core.internal.util.ClassLoading; -import org.junit.Test; - -public class ClassLoadingTest { - - @Test - public void testDefaultClassLoader() throws Exception { - String resource = getClass().getName().replace('.', '/').concat(".class"); - ClassLoader thisLoader = getClass().getClassLoader(); - ClassLoader defaultClassLoader = ClassLoading.getDefaultClassLoader(); - - Thread.currentThread().setContextClassLoader(null); - assertSame(thisLoader.loadClass(getClass().getName()), defaultClassLoader.loadClass(getClass().getName())); - assertEquals(thisLoader.getResource(resource), defaultClassLoader.getResource(resource)); - assertEqualEnumeration(thisLoader.getResources(resource), defaultClassLoader.getResources(resource)); - - Thread.currentThread().setContextClassLoader(new FindNothingLoader()); - assertSame(thisLoader.loadClass(getClass().getName()), defaultClassLoader.loadClass(getClass().getName())); - assertEquals(thisLoader.getResource(resource), defaultClassLoader.getResource(resource)); - assertEqualEnumeration(thisLoader.getResources(resource), defaultClassLoader.getResources(resource)); - - URL url = new URL("file:///tmp"); - ClassLoader tc = new TestClassLoader(url); - Thread.currentThread().setContextClassLoader(tc); - Class c = defaultClassLoader.loadClass(getClass().getName()); - assertNotSame(getClass(), c); - assertSame(tc, c.getClassLoader()); - assertEquals(url, defaultClassLoader.getResource(resource)); - assertEqualEnumeration(enumerationOf(url), defaultClassLoader.getResources(resource)); - } - - @SafeVarargs - private static Enumeration enumerationOf(T... values) { - Vector v = new Vector<>(); - for (T t : values) { - v.add(t); - } - return v.elements(); - } - - private static class TestClassLoader extends ClassLoader { - private final URL url; - - TestClassLoader(URL url) { - super(null); - this.url = url; - } - - @Override - public Class findClass(String name) throws ClassNotFoundException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - byte[] buf = new byte[1024]; - - try { - InputStream is = getClass().getClassLoader().getResourceAsStream(name.replace('.', '/').concat(".class")); - int read; - while ((read = is.read(buf)) >= 0) { - baos.write(buf, 0, read); - } - } catch (IOException ioe) { - throw new ClassNotFoundException(); - } - - byte[] data = baos.toByteArray(); - return defineClass(name, data, 0, data.length); - } - - @Override - public URL getResource(String name) { - return url; - } - - @Override - public Enumeration getResources(String name) throws IOException { - return enumerationOf(url); - } - - } - - private static class FindNothingLoader extends ClassLoader { - @Override - public Class loadClass(String name) throws ClassNotFoundException { - throw new ClassNotFoundException(); - } - - @Override - public URL getResource(String name) { - return null; - } - - @Override - public Enumeration getResources(String name) throws IOException { - return new Vector().elements(); - } - } - - private void assertEqualEnumeration(Enumeration e1, Enumeration e2) { - while (e1.hasMoreElements()) { - if (!e2.hasMoreElements()) { - throw new AssertionError(); - } - - Object o1 = e1.nextElement(); - Object o2 = e2.nextElement(); - - if (o1 == null || o2 == null) { - throw new AssertionError(); - } - - if ((!o1.equals(o2)) || (!o2.equals(o1))) { - throw new AssertionError(); - } - } - - if (e2.hasMoreElements()) { - throw new AssertionError(); - } - } - -} diff --git a/core/src/test/java/org/ehcache/core/spi/store/AbstractValueHolderTest.java b/core/src/test/java/org/ehcache/core/spi/store/AbstractValueHolderTest.java deleted file mode 100644 index 0722c96d9a..0000000000 --- a/core/src/test/java/org/ehcache/core/spi/store/AbstractValueHolderTest.java +++ /dev/null @@ -1,283 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.spi.store; - -import org.ehcache.expiry.Duration; -import org.ehcache.core.spi.time.TimeSource; -import org.junit.Test; - -import java.util.concurrent.TimeUnit; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.core.Is.is; - -/** - * @author Ludovic Orban - */ -public class AbstractValueHolderTest { - - @Test - public void testCreationTime() throws Exception { - AbstractValueHolder valueHolder = newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L); - - assertThat(valueHolder.creationTime(TimeUnit.SECONDS), is(1L)); - assertThat(valueHolder.creationTime(TimeUnit.MILLISECONDS), is(1000L)); - assertThat(valueHolder.creationTime(TimeUnit.MICROSECONDS), is(1000000L)); - } - - @Test - public void testExpirationTime() throws Exception { - AbstractValueHolder valueHolder = newAbstractValueHolder(TimeUnit.MILLISECONDS, 0L, 1000L); - - assertThat(valueHolder.expirationTime(TimeUnit.SECONDS), is(1L)); - assertThat(valueHolder.expirationTime(TimeUnit.MILLISECONDS), is(1000L)); - assertThat(valueHolder.expirationTime(TimeUnit.MICROSECONDS), is(1000000L)); - } - - - @Test - public void testLastAccessTime() throws Exception { - // last access time defaults to create time - AbstractValueHolder valueHolder = newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L); - - assertThat(valueHolder.lastAccessTime(TimeUnit.SECONDS), is(1L)); - assertThat(valueHolder.lastAccessTime(TimeUnit.MILLISECONDS), is(1000L)); - assertThat(valueHolder.lastAccessTime(TimeUnit.MICROSECONDS), is(1000000L)); - - valueHolder = newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L, 0L, 2000L); - - assertThat(valueHolder.lastAccessTime(TimeUnit.SECONDS), is(2L)); - assertThat(valueHolder.lastAccessTime(TimeUnit.MILLISECONDS), is(2000L)); - assertThat(valueHolder.lastAccessTime(TimeUnit.MICROSECONDS), is(2000000L)); - } - - - @Test - public void testIsExpired() throws Exception { - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L).isExpired(1L, TimeUnit.SECONDS), is(false)); - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L).isExpired(1000L, TimeUnit.MILLISECONDS), is(false)); - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L).isExpired(1000000L, TimeUnit.MICROSECONDS), is(false)); - - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L, 1001L).isExpired(1L, TimeUnit.SECONDS), is(false)); - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L, 1001L).isExpired(1000L, TimeUnit.MILLISECONDS), is(false)); - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L, 1001L).isExpired(1000000L, TimeUnit.MICROSECONDS), is(false)); - - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L, 1000L).isExpired(1L, TimeUnit.SECONDS), is(true)); - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L, 1000L).isExpired(1000L, TimeUnit.MILLISECONDS), is(true)); - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L, 1000L).isExpired(1000000L, TimeUnit.MICROSECONDS), is(true)); - } - - @Test - public void testEquals() throws Exception { - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 0L).equals(newAbstractValueHolder(TimeUnit.MILLISECONDS, 0L)), is(true)); - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1L).equals(newAbstractValueHolder(TimeUnit.MILLISECONDS, 0L)), is(false)); - - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 2L, 0L).equals(newAbstractValueHolder(TimeUnit.MILLISECONDS, 2L, 0L)), is(true)); - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 2L, 0L).equals(newAbstractValueHolder(TimeUnit.MILLISECONDS, 2L, 1L)), is(false)); - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 2L, 0L).equals(newAbstractValueHolder(TimeUnit.MILLISECONDS, 3L, 0L)), is(false)); - - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 0L, 2L, 1L).equals(newAbstractValueHolder(TimeUnit.MILLISECONDS, 0L, 2L, 1L)), is(true)); - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1L, 2L, 1L).equals(newAbstractValueHolder(TimeUnit.MILLISECONDS, 0L, 2L, 1L)), is(false)); - - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 0L, 3L, 1L).equals(newAbstractValueHolder(TimeUnit.MILLISECONDS, 0L, 2L, 1L)), is(false)); - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 0L, 2L, 3L).equals(newAbstractValueHolder(TimeUnit.MILLISECONDS, 0L, 2L, 1L)), is(false)); - - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1L).equals(newAbstractValueHolder(TimeUnit.SECONDS, 1L)), is(false)); - assertThat(newAbstractValueHolder(TimeUnit.NANOSECONDS, 1L).equals(newAbstractValueHolder(TimeUnit.SECONDS, 0L)), is(false)); - assertThat(newAbstractValueHolder(TimeUnit.SECONDS, 0L).equals(newAbstractValueHolder(TimeUnit.NANOSECONDS, 1L)), is(false)); - assertThat(newAbstractValueHolder(TimeUnit.SECONDS, 1L).equals(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L)), is(true)); - assertThat(newAbstractValueHolder(TimeUnit.MILLISECONDS, 1000L).equals(newAbstractValueHolder(TimeUnit.SECONDS, 1L)), is(true)); - } - - @Test - public void testSubclassEquals() throws Exception { - assertThat(new AbstractValueHolder(-1, 1000L) { - @Override - public String value() { - return "aaa"; - } - - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } - - @Override - public int hashCode() { - return super.hashCode() + value().hashCode(); - } - @Override - public boolean equals(Object obj) { - if (obj instanceof AbstractValueHolder) { - AbstractValueHolder other = (AbstractValueHolder) obj; - return super.equals(obj) && value().equals(other.value()); - } - return false; - } - }.equals(new AbstractValueHolder(-1, 1L) { - @Override - public String value() { - return "aaa"; - } - - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.SECONDS; - } - - @Override - public int hashCode() { - return super.hashCode() + value().hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (obj instanceof AbstractValueHolder) { - AbstractValueHolder other = (AbstractValueHolder)obj; - return super.equals(obj) && value().equals(other.value()); - } - return false; - } - }), is(true)); - - assertThat(new AbstractValueHolder(-1, 1000L) { - @Override - public String value() { - return "aaa"; - } - - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MICROSECONDS; - } - - @Override - public int hashCode() { - return super.hashCode() + value().hashCode(); - } - @Override - public boolean equals(Object obj) { - if (obj instanceof AbstractValueHolder) { - AbstractValueHolder other = (AbstractValueHolder) obj; - return super.equals(obj) && value().equals(other.value()); - } - return false; - } - }.equals(new AbstractValueHolder(-1, 1L) { - @Override - public String value() { - return "bbb"; - } - - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } - - @Override - public int hashCode() { - return super.hashCode() + value().hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (obj instanceof AbstractValueHolder) { - AbstractValueHolder other = (AbstractValueHolder)obj; - return super.equals(obj) && value().equals(other.value()); - } - return false; - } - }), is(false)); - } - - @Test - public void testAbstractValueHolderHitRate() { - TestTimeSource timeSource = new TestTimeSource(); - timeSource.advanceTime(1); - AbstractValueHolder valueHolder = new AbstractValueHolder(-1, timeSource.getTimeMillis()) { - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } - - @Override - public String value() { - return "abc"; - } - }; - valueHolder.accessed((timeSource.getTimeMillis()), new Duration(1L, TimeUnit.MILLISECONDS)); - timeSource.advanceTime(1000); - assertThat(valueHolder.hitRate(timeSource.getTimeMillis(), - TimeUnit.SECONDS), is(1.0f)); - } - - - private AbstractValueHolder newAbstractValueHolder(final TimeUnit timeUnit, long creationTime) { - return new AbstractValueHolder(-1, creationTime) { - @Override - protected TimeUnit nativeTimeUnit() { - return timeUnit; - } - @Override - public String value() { - throw new UnsupportedOperationException(); - } - }; - } - private AbstractValueHolder newAbstractValueHolder(final TimeUnit timeUnit, long creationTime, long expirationTime) { - return new AbstractValueHolder(-1, creationTime, expirationTime) { - @Override - protected TimeUnit nativeTimeUnit() { - return timeUnit; - } - @Override - public String value() { - throw new UnsupportedOperationException(); - } - }; - } - private AbstractValueHolder newAbstractValueHolder(final TimeUnit timeUnit, long creationTime, long expirationTime, long lastAccessTime) { - final AbstractValueHolder abstractValueHolder = new AbstractValueHolder(-1, creationTime, expirationTime) { - @Override - protected TimeUnit nativeTimeUnit() { - return timeUnit; - } - - @Override - public String value() { - throw new UnsupportedOperationException(); - } - }; - abstractValueHolder.setLastAccessTime(lastAccessTime, timeUnit); - return abstractValueHolder; - } - - private static class TestTimeSource implements TimeSource { - - private long time = 0; - - @Override - public long getTimeMillis() { - return time; - } - - public void advanceTime(long step) { - time += step; - } - } - -} diff --git a/core/src/test/java/org/ehcache/core/util/Matchers.java b/core/src/test/java/org/ehcache/core/util/Matchers.java deleted file mode 100644 index 72201329bc..0000000000 --- a/core/src/test/java/org/ehcache/core/util/Matchers.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.core.util; - -import org.ehcache.ValueSupplier; -import org.hamcrest.Description; -import org.hamcrest.Matcher; -import org.hamcrest.TypeSafeMatcher; - -import static org.hamcrest.Matchers.equalTo; - -/** - * Matchers - */ -public class Matchers { - - public static Matcher> holding(final V value) { - return holding(equalTo(value)); - } - - public static Matcher> holding(final Matcher matcher) { - return new TypeSafeMatcher>() { - @Override - protected boolean matchesSafely(ValueSupplier item) { - return matcher.matches(item.value()); - } - - @Override - public void describeTo(Description description) { - description.appendText("holder containing value ").appendDescriptionOf(matcher); - } - }; - } -} diff --git a/core/src/test/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory b/core/src/test/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory deleted file mode 100644 index 37a16ecef8..0000000000 --- a/core/src/test/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory +++ /dev/null @@ -1,19 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -org.ehcache.core.spi.services.TestServiceFactory -org.ehcache.core.spi.services.TestProvidedServiceFactory -org.ehcache.core.spi.services.FancyCacheProviderFactory \ No newline at end of file diff --git a/demos/00-NoCache/gradle.properties b/demos/00-NoCache/gradle.properties deleted file mode 100755 index 83967b0dd6..0000000000 --- a/demos/00-NoCache/gradle.properties +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -subPomName = Ehcache 3 no cache demo module -subPomDesc = The no cache demo module of Ehcache 3 diff --git a/demos/00-NoCache/src/main/java/org/ehcache/demos/peeper/PeeperServlet.java b/demos/00-NoCache/src/main/java/org/ehcache/demos/peeper/PeeperServlet.java index 222696ed4c..c23530b94a 100755 --- a/demos/00-NoCache/src/main/java/org/ehcache/demos/peeper/PeeperServlet.java +++ b/demos/00-NoCache/src/main/java/org/ehcache/demos/peeper/PeeperServlet.java @@ -16,6 +16,7 @@ package org.ehcache.demos.peeper; import javax.servlet.ServletException; +import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @@ -26,8 +27,14 @@ /** * @author Ludovic Orban */ +@WebServlet( + name = "PeeperServlet", + urlPatterns = {"/*"} +) public class PeeperServlet extends HttpServlet { + private static final long serialVersionUID = 1L; + @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { resp.setContentType("text/html"); @@ -61,9 +68,8 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws Se out.println(""); } - @Override - protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { + protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException { String peepText = req.getParameter("peep"); try { diff --git a/demos/00-NoCache/src/main/java/org/ehcache/demos/peeper/PeeperServletContextListener.java b/demos/00-NoCache/src/main/java/org/ehcache/demos/peeper/PeeperServletContextListener.java index cb018da806..9b19b3ce98 100755 --- a/demos/00-NoCache/src/main/java/org/ehcache/demos/peeper/PeeperServletContextListener.java +++ b/demos/00-NoCache/src/main/java/org/ehcache/demos/peeper/PeeperServletContextListener.java @@ -17,10 +17,12 @@ import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; +import javax.servlet.annotation.WebListener; /** * @author Ludovic Orban */ +@WebListener public class PeeperServletContextListener implements ServletContextListener { public static final DataStore DATA_STORE = new DataStore(); diff --git a/demos/00-NoCache/src/main/webapp/WEB-INF/web.xml b/demos/00-NoCache/src/main/webapp/WEB-INF/web.xml deleted file mode 100755 index f9df833ddb..0000000000 --- a/demos/00-NoCache/src/main/webapp/WEB-INF/web.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - org.ehcache.demos.peeper.PeeperServletContextListener - - - - PeeperServlet - org.ehcache.demos.peeper.PeeperServlet - 1 - - - - PeeperServlet - /* - - - diff --git a/demos/01-CacheAside/gradle.properties b/demos/01-CacheAside/gradle.properties deleted file mode 100755 index 2f155acc0b..0000000000 --- a/demos/01-CacheAside/gradle.properties +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -subPomName = Ehcache 3 cache aside demo module -subPomDesc = The cache aside demo module of Ehcache 3 diff --git a/demos/01-CacheAside/src/main/java/org/ehcache/demos/peeper/PeeperServlet.java b/demos/01-CacheAside/src/main/java/org/ehcache/demos/peeper/PeeperServlet.java index 222696ed4c..5d854a6bc3 100755 --- a/demos/01-CacheAside/src/main/java/org/ehcache/demos/peeper/PeeperServlet.java +++ b/demos/01-CacheAside/src/main/java/org/ehcache/demos/peeper/PeeperServlet.java @@ -16,6 +16,7 @@ package org.ehcache.demos.peeper; import javax.servlet.ServletException; +import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @@ -26,8 +27,14 @@ /** * @author Ludovic Orban */ +@WebServlet( + name = "PeeperServlet", + urlPatterns = {"/*"} +) public class PeeperServlet extends HttpServlet { + private static final long serialVersionUID = 1L; + @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { resp.setContentType("text/html"); @@ -63,7 +70,7 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws Se @Override - protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { + protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException { String peepText = req.getParameter("peep"); try { diff --git a/demos/01-CacheAside/src/main/java/org/ehcache/demos/peeper/PeeperServletContextListener.java b/demos/01-CacheAside/src/main/java/org/ehcache/demos/peeper/PeeperServletContextListener.java index cb018da806..9b19b3ce98 100755 --- a/demos/01-CacheAside/src/main/java/org/ehcache/demos/peeper/PeeperServletContextListener.java +++ b/demos/01-CacheAside/src/main/java/org/ehcache/demos/peeper/PeeperServletContextListener.java @@ -17,10 +17,12 @@ import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; +import javax.servlet.annotation.WebListener; /** * @author Ludovic Orban */ +@WebListener public class PeeperServletContextListener implements ServletContextListener { public static final DataStore DATA_STORE = new DataStore(); diff --git a/demos/01-CacheAside/src/main/webapp/WEB-INF/web.xml b/demos/01-CacheAside/src/main/webapp/WEB-INF/web.xml deleted file mode 100755 index 2993f41a64..0000000000 --- a/demos/01-CacheAside/src/main/webapp/WEB-INF/web.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - org.ehcache.demos.peeper.PeeperServletContextListener - - - - PeeperServlet - org.ehcache.demos.peeper.PeeperServlet - 1 - - - - PeeperServlet - /* - - - diff --git a/demos/build.gradle b/demos/build.gradle index 600578055b..fbf81c9647 100644 --- a/demos/build.gradle +++ b/demos/build.gradle @@ -1,26 +1,26 @@ plugins { - id 'org.akhikhl.gretty' version '1.4.0' + id 'org.ehcache.build.conventions.war' apply false + id 'org.gretty' apply false } subprojects { - configurations.all { - resolutionStrategy { - // It seems jetty has some internal conflict and so those need to be forced - force 'org.ow2.asm:asm:5.0.3', 'org.ow2.asm:asm-commons:5.0.3', 'org.glassfish:javax.el:3.0.1-b08' - } - } + apply plugin: 'org.ehcache.build.conventions.war' + apply plugin: 'org.gretty' - apply plugin: 'war' - apply plugin: 'org.akhikhl.gretty' + repositories { + jcenter() + } gretty { - port = 8080 + httpPort = 8080 contextPath = '/' servletContainer = 'jetty9' } dependencies { - compile 'javax.servlet:servlet-api:2.5', project(':impl') - runtime 'ch.qos.logback:logback-classic:1.2.3', 'com.h2database:h2:1.4.192' + implementation project(':ehcache-impl') + implementation 'javax.servlet:javax.servlet-api:3.1.0' + runtimeOnly 'ch.qos.logback:logback-classic:1.2.3' + runtimeOnly 'com.h2database:h2:1.4.196' } } diff --git a/deploy.sh b/deploy.sh new file mode 100755 index 0000000000..6e15781ff5 --- /dev/null +++ b/deploy.sh @@ -0,0 +1,388 @@ +#!/usr/bin/env bash + +### +# Call this script to perform a release of Ehcache (Maven central, website, kit and all). +# +# See https://github.com/ehcache/ehcache3/wiki/dev.release for details. +# +# Set a dryRun variable if you want to skip commits and pushes +### + +# to exit in case of error +set -e +# to see what's going on +#set -v + +function pause { + echo + read -p "Press [enter] to continue" +} + +echo 'Welcome to the Ehcache release wizard' +echo +echo 'This wizard will guide you through an Ehcache release. Some steps will be performed automatically, some will require your help' +echo + +if [ -z "$git_origin" ]; then + git_origin=git@github.com:ehcache/ehcache3.git +fi +current_branch=$(git branch | grep '^\*' | cut -d ' ' -f 2) + +read -e -p "You want to deploy from the git branch named ${current_branch}, is that right? (Y/n): " YN +[[ $YN != "y" && $YN != "Y" && $YN != "" ]] && (echo "Please checkout the correct branch and restart this script" && exit 1) + +echo +echo 'We will now make sure you are up-to-date with the origin' +echo + +git pull $git_origin $current_branch + +if [ ! -z "$(git status --porcelain)" ]; then + echo 'You have local changes. Please remove them and relaunch this script' + exit 1 +fi + +echo +echo 'All good' +echo + +read -e -p "Which version do you want to release? " version + +# A major release will end with 0. e.g. 3.7.0, 3.8.0 +if [ "$(echo $version | cut -d '.' -f 3)" == "0" ]; then + is_major=1 +else + is_major=0 +fi + +# A latest version will always be deployed from master. Bugfix of ealier versions will be from a release/x.y branch +if [ "$current_branch" == "master" ]; then + is_latest_version=1 +else + is_latest_version=0 +fi + +read -e -p "You want to deploy version ${version} from branch ${current_branch}. Is that correct? (Y/n)" YN +[[ $YN != "y" && $YN != "Y" && $YN != "" ]] && (echo "Aborting due to wrong input" && exit 1) + +major_version="$(echo $version | cut -d'.' -f 1).$(echo $version | cut -d'.' -f 2)" +short_major_version=${major_version//[.]/} + +echo +echo 'We will start by configuring GitHub correctly' +echo "First make sure you have a milestone for version ${version} at https://github.com/ehcache/ehcache3/milestones" +echo "If you don't, create it. Name it ${version}" + +read -e -p "What is the milestone number? (look at the URL) " milestone + +echo +echo 'Now attach any closed issues and PR since the last version' +read -e -p 'What was the previous version? ' previous_version +echo 'A helpful git log will now be printed' +echo +git --no-pager log v${previous_version}..HEAD +pause + +echo "Now, let's create an issue for the release" +echo "It contains checkboxes that you will check along the release" +echo "Open https://github.com/ehcache/ehcache3/issues" +echo "Press 'New Issue'" +echo "Set the title to 'Release ${version}'" +echo "Attach the issue to the milestone ${version}" +echo "Assign the issue to you" +echo "Set the description to the content of dist/templates/github-release-issue.md" +echo "Create the issue" +pause + +echo "We will now" +echo "1- Create a local release branch named release-${version}." +echo "2- Commit the final version and tag it." +echo "Only the tag will be pushed to origin (not the branch)." +echo + +sed -i '' "s/%VERSION%/${version}/g" dist/templates/github-release.md +sed -i '' "s/%MILESTONE%/${milestone}/g" dist/templates/github-release.md +sed -i '' "s/%MAJORVERSION%/${major_version}/g" dist/templates/github-release.md +echo "Please add a little description for the release in dist/templates/github-release.md" +pause + +sed -i '' "s/ehcacheVersion = .*/ehcacheVersion = ${version}/" gradle.properties +if [ -z "$dryRun" ]; then + git checkout -b release-${version} + git add gradle.properties dist/templates/github-release.md + git commit -m "Version ${version}" + git tag -m ":ship: Release ${version}" -v${version} + git push $git_origin v${version} +else + echo git checkout -b release-${version} + echo git add gradle.properties dist/templates/github-release.md + echo git commit -m "Version ${version}" + echo git tag -m ":ship: Release ${version}" v${version} + echo git push $git_origin v${version} +fi + +echo +echo 'Now launch the release to Maven central' +echo '1- Open http://jenkins.terracotta.eur.ad.sag:8080/view/All%20Pipelines/job/publishers-10.2.0/job/ehcache-releaser-3.x/' +echo '2- Press "Build with parameters' +echo "3- Enter v${version} in the git_tag field" +echo "4- Come back here when it's done" +pause + +echo +echo "We will now create a GitHub release" +echo "Open https://github.com/ehcache/ehcache3/tags" +echo "On our new tag v${version}, you will see three dots at the far right" +echo "Click on it and select 'Create Release'" +echo "Set the Tag version to v${version}" +echo "Set the Release title to 'Ehcache ${version}'" +echo "Set the following in the description" +release_description=$(< dist/templates/github-release.md) +echo "$release_description" +pause + +echo "Add the binaries from Maven central to the release" +echo "They will be downloaded in dist/build/binaries" +mkdir -p dist/build/binaries +pushd dist/build/binaries +wget https://repo1.maven.org/maven2/org/ehcache/ehcache/${version}/ehcache-${version}-javadoc.jar +wget https://repo1.maven.org/maven2/org/ehcache/ehcache/${version}/ehcache-${version}-spi-javadoc.jar +wget https://repo1.maven.org/maven2/org/ehcache/ehcache/${version}/ehcache-${version}.jar +wget https://repo1.maven.org/maven2/org/ehcache/ehcache-clustered/${version}/ehcache-clustered-${version}-javadoc.jar +wget https://repo1.maven.org/maven2/org/ehcache/ehcache-clustered/${version}/ehcache-clustered-${version}-kit.tgz +wget https://repo1.maven.org/maven2/org/ehcache/ehcache-clustered/${version}/ehcache-clustered-${version}-kit.zip +wget https://repo1.maven.org/maven2/org/ehcache/ehcache-clustered/${version}/ehcache-clustered-${version}.jar +wget https://repo1.maven.org/maven2/org/ehcache/ehcache-transactions/${version}/ehcache-transactions-${version}-javadoc.jar +wget https://repo1.maven.org/maven2/org/ehcache/ehcache-transactions/${version}/ehcache-transactions-${version}.jar +popd +pause + +echo "Create the release" +pause + +echo "We are doing good. Now let's attack the website" +read -e -p "Where is the ehcache.org-site clone located? (default:../ehcache.org-site): " site_dir + +if [ "$site_dir" == "" ]; then + site_dir='../ehcache.org-site' +fi + +if [ $is_major ]; then + echo "Adding XSDs since this is a major version" + cp ehcache-xml/src/main/schema/ehcache-core.xsd $site_dir/schema/ehcache-core.xsd + cp ehcache-xml/src/main/schema/ehcache-core.xsd $site_dir/schema/ehcache-core-${major_version}.xsd + cp ehcache-107/src/main/resources/ehcache-107-ext.xsd $site_dir/schema/ehcache-107-ext.xsd + cp ehcache-107/src/main/resources/ehcache-107-ext.xsd $site_dir/schema/ehcache-107-ext-${major_version}.xsd + cp clustered/ehcache-client/src/main/resources/ehcache-clustered-ext.xsd $site_dir/schema/ehcache-clustered-ext.xsd + cp clustered/ehcache-client/src/main/resources/ehcache-clustered-ext.xsd $site_dir/schema/ehcache-clustered-ext-${major_version}.xsd + cp ehcache-transactions/src/main/resources/ehcache-tx-ext.xsd $site_dir/schema/ehcache-tx-ext.xsd + cp ehcache-transactions/src/main/resources/ehcache-tx-ext.xsd $site_dir/schema/ehcache-tx-ext-${major_version}.xsd +fi + +echo "Copy the javadoc from Maven central" +unzip "dist/build/binaries/ehcache-${version}-javadoc.jar" -d "${site_dir}/apidocs/${version}" +unzip "dist/build/binaries/ehcache-clustered-${version}-javadoc.jar" -d "${site_dir}/apidocs/${version}/clustered" +unzip "dist/build/binaries/ehcache-transactions-${version}-javadoc.jar" -d "${site_dir}/apidocs/${version}/transactions" + +echo "Remove Manifests" +rm -rf "${site_dir}/apidocs/${version}/META-INF" "${site_dir}/apidocs/${version}/clustered/META-INF" "${site_dir}/apidocs/${version}/transactions/META-INF" + +pushd $site_dir +if [ -z "$dryRun" ]; then + git checkout master + git pull origin master + git checkout -b "ehcache${version}" +else + echo git checkout master + echo git pull origin master + echo git checkout -b "ehcache${version}" +fi + +if [ $is_major ]; then + echo "Update _config.yml" + echo " -" >> _config.yml + echo " scope:" >> _config.yml + echo " path: \"documentation/${major_version}\"" >> _config.yml + echo " type: \"pages\"" >> _config.yml + echo " values:" >> _config.yml + echo " layout: \"docs35_page\"" >> _config.yml + echo " ehc_version: \"${major_version}\"" >> _config.yml + echo " ehc_javadoc_version: \"${version}\"" >> _config.yml + echo " ehc_checkout_dir_var: \"sourcedir39\"" >> _config.yml + + sed -i '' "s/#needle\_for\_sourcedir/ - sourcedir${short_major_version}=\/_eh${short_major_version}\\ +#needle_for_sourcedir/" _config.yml + sed -i '' "s/current: \"[0-9]\.[0-9]\"/current: \"${major_version}\"/" _config.yml + read -e -p "What is the future version? " future_version + sed -i '' "s/future: \"[0-9]\.[0-9]\"/future: \"${future_version}\"/" _config.yml + + echo "Update home_announcement.html" + sed -i '' "s/Ehcache [0-9]\.[0-9] is now available/Ehcache ${major_version} is now available/" _includes/home_announcement.html + + echo "Update documentation/index.md" + echo "Please add the following line in the current documentation section and move the existing one to history" + echo "|[Ehcache ${major_version} User Guide](/documentation/${major_version}/) |[Core JavaDoc](/apidocs/${version}/index.html){:target=\"_blank\"}
[Clustered Module JavaDoc](/apidocs/${version}/clustered/index.html){:target=\"_blank\"}
[Transactions Module JavaDoc](/apidocs/${version}/transactions/index.html){:target=\"_blank\"}|" + + echo "Update schema/index.md" + sed -i '' "s/\[\/\/\]: # (needle_core)/ * [ehcache-core-${major_version}.xsd](\/schema\/ehcache-core-${major_version}.xsd)\\ +[\/\/]: # (needle_core)/" schema/index.md + sed -i '' "s/\[\/\/\]: # (needle_107)/ * [ehcache-107-ext-${major_version}.xsd](\/schema\/ehcache-107-ext-${major_version}.xsd)\\ +[\/\/]: # (needle_107)/" schema/index.md + sed -i '' "s/\[\/\/\]: # (needle_tx)/ * [ehcache-tx-ext-${major_version}.xsd](\/schema\/ehcache-tx-ext-${major_version}.xsd)\\ +[\/\/]: # (needle_tx)/" schema/index.md + sed -i '' "s/\[\/\/\]: # (needle_clustered)/ * [ehcache-clustered-ext-${major_version}.xsd](\/schema\/ehcache-clustered-ext-${major_version}.xsd)\\ +[\/\/]: # (needle_clustered)/" schema/index.md + +else + echo "Update _config.yml" + sed -i '' "s/ehc_javadoc_version: \"${major_version}\.[0-9]\"/ehc_javadoc_version: \"${version}\"/" _config.yml + + echo "Update documentation/index.md" + echo "Update with the following line in the current documentation section" + echo "|[Ehcache ${major_version} User Guide](/documentation/${major_version}/) |[Core JavaDoc](/apidocs/${version}/index.html){:target=\"_blank\"}
[Clustered Module JavaDoc](/apidocs/${version}/clustered/index.html){:target=\"_blank\"}
[Transactions Module JavaDoc](/apidocs/${version}/transactions/index.html){:target=\"_blank\"}|" +fi + +if [ $is_latest_version ]; then + echo "Update ehc3_quickstart.html" + sed -i '' "s/version>[0-9]\.[0-9]\.[0-9]<\/version/version\>${version}\<\/version/" _includes/ehc3_quickstart.html +fi + +echo "Please make sur the docs35_page layout in _config.yml is still valid" +pause + +echo "Check that README.md table about version, version_dir and branch is still accurate" +pause + +read -e -p "What is the upstream repository name? " samples_upstream + +if [ -z "$dryRun" ]; then + git add . + git commit -m "Release ${version}" + git push --set-upstream ${samples_upstream} "ehcache${version}" +else + echo git add . + echo git commit -m "Release ${version}" + echo git push --set-upstream ${samples_upstream} "ehcache${version}" +fi +popd + +echo "Now please open a PR over branch ehcache${version} https://github.com/ehcache/ehcache3.org-site/pulls" +pause +popd + +echo "Website deployment is done every 15 minutes" +echo "If you want to start it manually: http://jenkins.terracotta.eur.ad.sag:8080/job/websites/job/ehcache.org-site-publisher/" + +echo +echo "Now please update the current and next release version in README.adoc" +echo +pause + +echo +echo "Finally, close the GitHub issue and the milestone" +echo +pause + +echo +echo "We now need to deploy the docker images" +echo +read -e -p "What is the terracotta platform version to deploy?" terracotta_version +read -e -p "What is the Terracotta-OSS docker clone located (default:../docker)?" docker_dir +read -e -p "Which previous image do you want to base your image on?" template_image + +escaped_template_image=${template_image//./\\.} + +echo "You now need to create the appropriate triggers on Docker hub" + +echo "Open https://hub.docker.com/r/terracotta/sample-ehcache-client/~/settings/automated-builds/" +echo "Change the Dockerfile location of the latest tag to /${terracotta_version}/sample-ehcache-client" +echo "Add a line with tag ${terracotta_version} and Dockerfile location /${terracotta_version}/sample-ehcache-client" + +echo "Open https://hub.docker.com/r/terracotta/terracotta-server-oss/~/settings/automated-builds/" +echo "Change the Dockerfile location of the latest tag to /${terracotta_version}/server" +echo "Add a line with tag ${terracotta_version} and Dockerfile location /${terracotta_version}/server" + +pushd $docker_dir + +cp -r $template_image $terracotta_version +sed -i '' "s/${escaped_template_image}/${terracotta_version}/g" ${terracotta_version}/sample-ehcache-client/README.md +sed -i '' "s/ehcache-clustered-[0-9]\.[0-9]\.[0-9]-kit.tgz/ehcache-clustered-${version}-kit.tgz/g" ${terracotta_version}/sample-ehcache-client/Dockerfile +sed -i '' "s/ehcache-clustered\/[0-9]\.[0-9]\.[0-9]/ehcache-clustered\/${version}/" ${terracotta_version}/sample-ehcache-client/Dockerfile + +sed -i '' "s/${escaped_template_image}/${terracotta_version}/g" ${terracotta_version}/server/README.md +sed -i '' "s/ehcache-clustered-[0-9]\.[0-9]\.[0-9]-kit.tgz/ehcache-clustered-${version}-kit.tgz/g" ${terracotta_version}/server/Dockerfile +sed -i '' "s/ehcache-clustered\/[0-9]\.[0-9]\.[0-9]/ehcache-clustered\/${version}/" ${terracotta_version}/server/Dockerfile + +sed -i '' "s/${escaped_template_image}/${terracotta_version}/g" ${terracotta_version}/README.md +sed -i '' "s/${escaped_template_image}/${terracotta_version}/g" ${terracotta_version}/docker-compose.yml + +sed -i '' "s/ehcache [0-9]\.[0-9]\.[0-9] \/ Terracotta Server OSS ${escaped_template_image}/ehcache ${version} \/ Terracotta Server OSS ${terracotta_version}/" README.md +sed -i '' "s/\[\/\/\]: # (needle_version)/* [${terracotta_version}](\/${terracotta_version}), matches Ehcache ${version}, available from : https:\/\/github.com\/ehcache\/ehcache3\/releases\\ +[\/\/]: # (needle_version)/g" README.md + +if [ -z "$dryRun" ]; then + git add . + git commit -m "Release $terracotta_version using Ehcache $version" + git push origin master +else + echo git add . + echo git commit -m "Release $terracotta_version using Ehcache $version" + echo git push origin master +fi +popd + +echo "Images should appear on Docker Hub in https://hub.docker.com/r/terracotta" +echo "Please check" +pause + +if [ $is_latest_version ]; then + echo + echo "And last but not least, upgrade the samples" + echo + read -e -p "Where is the ehcache3-samples clone located? (default:../ehcache3-samples): " samples_dir + + if [ "$samples_dir" == "" ]; then + samples_dir='../ehcache3-samples' + fi + + pushd $samples_dir + + if [ -z "$dryRun" ]; then + git checkout master + git pull origin master + git checkout -b "ehcache${version}" + else + echo git checkout master + echo git pull origin master + echo git checkout -b "ehcache${version}" + fi + + sed -i '' "s/.*<\/ehcache3\.version>/${version}<\/ehcache3.version>/" pom.xml + + sed -i '' "s/terracotta-server-oss:.*/terracotta-server-oss:${terracotta_version}/g" fullstack/README.md + sed -i '' "s/terracotta-server-oss:.*/terracotta-server-oss:${terracotta_version}/g" fullstack/src/main/docker/terracotta-server-ha.yml + sed -i '' "s/terracotta-server-oss:.*/terracotta-server-oss:${terracotta_version}/g" fullstack/src/main/docker/terracotta-server-single.yml + + echo "Make sure the JCache version hasn't changed. If yes, update $samples_dir/pom.xml" + pause + + git add . + read -e -p "What is the upstream repository name? " samples_upstream + + if [ -z "$dryRun" ]; then + git commit -m "Upgrade to Ehcache ${version}" + git push --set-upstream ${samples_upstream} "ehcache${version}" + else + echo git commit -m "Upgrade to Ehcache ${version}" + echo git push --set-upstream ${samples_upstream} "ehcache${version}" + fi + popd + + echo "Now please open a PR over branch ehcache${version} https://github.com/ehcache/ehcache3-samples/pulls" + pause +fi + +echo "All done!" +echo "If needed, call ./start_next_version.sh to bump the version to the next one" +echo +echo "Have a good day!" diff --git a/dist/.gitignore b/dist/.gitignore deleted file mode 100755 index ae3c172604..0000000000 --- a/dist/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/bin/ diff --git a/dist/build.gradle b/dist/build.gradle deleted file mode 100644 index fec683687b..0000000000 --- a/dist/build.gradle +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -group = 'org.ehcache' -archivesBaseName = 'ehcache' - -dependencies { - compileOnly project(':api') - compileOnly project(':core') - compileOnly project(':impl') - compileOnly project(':107') - compileOnly project(':xml') -} - -apply plugin: EhDistribute - -dependencies { - shadowCompile "org.slf4j:slf4j-api:$parent.slf4jVersion" -} diff --git a/dist/gradle.properties b/dist/gradle.properties deleted file mode 100644 index 0c433da83d..0000000000 --- a/dist/gradle.properties +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -subPomName = Ehcache -subPomDesc = End-user ehcache3 jar artifact -javadocExclude = **/core/**, **/impl/**, **/xml/**, **/jsr107/**, **/transactions/**, **/management/**, **/tck/** -osgi = {"Import-Package" : ["!org.ehcache.*", "!org.terracotta.*"]} diff --git a/docs/README.adoc b/docs/README.adoc new file mode 100644 index 0000000000..4f6503a88d --- /dev/null +++ b/docs/README.adoc @@ -0,0 +1,31 @@ += How to work on documentation + +The documentation is all in link:http://www.methods.co.nz/asciidoc/[asciidoc]. + +== Build + +The documentation is built with + + ./gradlew :docs:asciidoctor + +You will then find the result in `docs/build/asciidoc/user`. + +== Live reload + +To get live reload while editing the documentation, you need two shells. + +In the first one, do a continuous buid of the documentation. + + ./gradlew -t :docs:asciidoctor + +In the second launch the live reload. + + ./gradlew -t :docs:livereload + +The local port will be printed on the console. The usual URL is link:http://localhost:35729[http://localhost:35729]. + +== Add a section + +1. Create a new adoc file +2. Copy the header used in the other pages to it +3. Reference it in `menu.adoc` and `index.adoc` diff --git a/docs/build.gradle b/docs/build.gradle index a980e66ef1..42563d66f0 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -14,46 +14,49 @@ * limitations under the License. */ -buildscript { - repositories { - jcenter() - } +import org.asciidoctor.gradle.jvm.AsciidoctorJBasePlugin +import org.asciidoctor.gradle.jvm.AsciidoctorTask - dependencies { - classpath 'org.asciidoctor:asciidoctor-gradle-plugin:1.5.2' - } +plugins { + id 'org.ehcache.build.conventions.base' + id 'org.asciidoctor.jvm.base' } -apply plugin: 'org.asciidoctor.convert' - -configurations.asciidoctor.dependencies.matching({it.group == 'org.asciidoctor' && it.name == 'asciidoctorj-groovy-dsl'}).all { - exclude group:'org.asciidoctor', module:'asciidoctorj' -} - -task copyCSS(type: Copy) { - from ('css') { - include '**' +asciidoctorj { + safeMode 'UNSAFE' + attributes 'skip-front-matter': 'true' + fatalWarnings ~/.*/ + modules { + diagram.version '1.5.18' } - into("${buildDir}/asciidoc/user/css") } -task copyImages(type: Copy) { - from('src/docs/asciidoc/user/images') { - include '**' +def createCopyCssTask(def asciidocTask) { + return tasks.register("copy${asciidocTask.name}CSS", Sync) { + from ('css') { + include '**' + } into("${asciidocTask.outputDir}/css") } - into("${buildDir}/asciidoc/user/images") } -asciidoctor.dependsOn copyCSS, copyImages - -asciidoctor { - separateOutputDirs = false; - attributes 'skip-front-matter': 'true' - +tasks.withType(AsciidoctorTask) { + group = AsciidoctorJBasePlugin.TASK_GROUP resources { from('fonts') { include '*' - into('./user/fonts') - } + } into('./fonts') } + dependsOn createCopyCssTask(it) +} + +tasks.register('userDoc', AsciidoctorTask) { + description = 'Generate the user documentation' + sourceDir file('src/docs/asciidoc/user') + outputDir file("$buildDir/asciidoc/user") +} + +tasks.register('developerDoc', AsciidoctorTask) { + description = 'Generate the developer documentation' + sourceDir file('src/docs/asciidoc/developer') + outputDir file("$buildDir/asciidoc/developer") } diff --git a/docs/css/ehcache.css b/docs/css/ehcache.css index 39e0ce70aa..4abd60af06 100644 --- a/docs/css/ehcache.css +++ b/docs/css/ehcache.css @@ -270,8 +270,8 @@ select { #preamble { position: fixed; - top: 0px; - left: 0px; + top: 0; + left: 0; height: 100%; background-color: #5d6e7e; padding: 15px 10px; @@ -643,15 +643,12 @@ p a > code:hover { #header, #content, #footnotes, #footer { width: 100%; - margin-left: auto; - margin-right: auto; - margin-top: 0; - margin-bottom: 0; + margin: 0 auto; max-width: 62.5em; *zoom: 1; position: relative; padding-left: .9375em; - padding-right: .9375em + padding-right: .9375em; background-color: #fff; z-index:10; } @@ -870,9 +867,7 @@ p a > code:hover { } #content #toc { - border-style: solid; - border-width: 1px; - border-color: #e0e0dc; + border: 1px solid #e0e0dc; margin-bottom: 1.25em; padding: 1.25em; background: #f8f8f7; @@ -1005,9 +1000,7 @@ table.tableblock #preamble > .sectionbody > .paragraph:first-of-type p { } .exampleblock > .content { - border-style: solid; - border-width: 1px; - border-color: #e6e6e6; + border: 1px solid #e6e6e6; margin-bottom: 1.25em; padding: 1.25em; background: #fff; @@ -1024,9 +1017,7 @@ table.tableblock #preamble > .sectionbody > .paragraph:first-of-type p { } .sidebarblock { - border-style: solid; - border-width: 1px; - border-color: #e0e0dc; + border: 1px solid #e0e0dc; margin-bottom: 1.25em; padding: 1.25em; background: #f8f8f7; @@ -1237,7 +1228,7 @@ table.pyhltable .linenodiv { } .verseblock pre { - font-family: "Open Sans", "DejaVu Sans", sans; + font-family: "Open Sans", "DejaVu Sans", sans-serif; font-size: 1.15rem; color: rgba(0, 0, 0, .85); font-weight: 300; @@ -1432,9 +1423,7 @@ ul.checklist li > p:first-child > input[type="checkbox"]:first-child { } ul.inline { - margin: 0 auto .625em auto; - margin-left: -1.375em; - margin-right: 0; + margin: 0 0 .625em -1.375em; padding: 0; list-style: none; overflow: hidden diff --git a/docs/gradle.properties b/docs/gradle.properties deleted file mode 100644 index 63ef54e3be..0000000000 --- a/docs/gradle.properties +++ /dev/null @@ -1,2 +0,0 @@ -subPomName = Ehcache 3 Documentation module -subPomDesc = The Documentation module of Ehcache 3 \ No newline at end of file diff --git a/docs/images/design/basics/Faulting.png b/docs/images/design/basics/Faulting.png deleted file mode 100644 index 21adaa36d9..0000000000 Binary files a/docs/images/design/basics/Faulting.png and /dev/null differ diff --git a/docs/images/design/basics/baseTypes.png b/docs/images/design/basics/baseTypes.png deleted file mode 100644 index 41d1511efe..0000000000 Binary files a/docs/images/design/basics/baseTypes.png and /dev/null differ diff --git a/docs/images/design/basics/cacheStore.png b/docs/images/design/basics/cacheStore.png deleted file mode 100644 index dfb4520b35..0000000000 Binary files a/docs/images/design/basics/cacheStore.png and /dev/null differ diff --git a/docs/images/design/basics/config.png b/docs/images/design/basics/config.png deleted file mode 100644 index 9b80a5f59a..0000000000 Binary files a/docs/images/design/basics/config.png and /dev/null differ diff --git a/docs/images/design/basics/persistentStateTransitions.jpg b/docs/images/design/basics/persistentStateTransitions.jpg deleted file mode 100644 index 79d04ad686..0000000000 Binary files a/docs/images/design/basics/persistentStateTransitions.jpg and /dev/null differ diff --git a/docs/images/design/basics/stateTransitions.png b/docs/images/design/basics/stateTransitions.png deleted file mode 100644 index a9626ddaa8..0000000000 Binary files a/docs/images/design/basics/stateTransitions.png and /dev/null differ diff --git a/docs/images/design/basics/userManagedCache.png b/docs/images/design/basics/userManagedCache.png deleted file mode 100644 index 2a1a85e69b..0000000000 Binary files a/docs/images/design/basics/userManagedCache.png and /dev/null differ diff --git a/docs/images/design/clustered/putIfAbsentUml.png b/docs/images/design/clustered/putIfAbsentUml.png deleted file mode 100644 index 6dc878eb0b..0000000000 Binary files a/docs/images/design/clustered/putIfAbsentUml.png and /dev/null differ diff --git a/docs/src/docs/asciidoc/developer/clustered-events.adoc b/docs/src/docs/asciidoc/developer/clustered-events.adoc new file mode 100644 index 0000000000..20ee7f6b72 --- /dev/null +++ b/docs/src/docs/asciidoc/developer/clustered-events.adoc @@ -0,0 +1,82 @@ += Ehcache events from clustered caches + +This document describes the core design of what is required to implement ehcache events from caches backed by a +clustered store. + +== High-level requirements + +* Ehcache supports five types of cache events: on *eviction*, on *expiry*, on *removal*, on *update* and on *creation*. +* When an event is fired, every connected client with a registered listener has to receive it. +* Events must be delivered once and only once as long as the client(s), server(s) and network-in-between are all +healthy. +* What happens when there is a client disconnect, a passive take over, a split brain or any other hazard is yet to be +determined. +* No performance impact when the feature isn't used. + +=== Recommandations + +It must be made clear (documentation?) that the eventing mechanism is going to have a performance impact. + +Some features are undesirable because they are unlikely to be practical: + +* Synchronous events would require waiting for a round-trip to all clients before achieving a cache operation. This +would pretty much make such cache unusably slow. Ordered events aren't impossible to do, but would require a serious +engineering effort to get right as keeping the events in a strict order isn't trivial, so it's been left out. +Such configs throw an exception when attempted. +* Guaranteeing event delivery in all cases would require some form of stable store and a fairly complex and costly +2-phase logic. This would also have an unsustainable performance impact. Instead, clients should have a way to figure +out when such hazard happen to compensate for the possible lack of event delivery. + +== Technical facts + +Because of the current implementation: + +* All events find their source in a server: *creation*, *update*, *removal* and *expiration* are additions to the chain. +*Eviction* happens when the server is running low on resource and are detected and notified by the chain. + +This means a cluster-wide listener mechanism has to be created with the following features: + +* Clients can register and unregister themselves. This is because events have a performance impact when enabled. +* Events can be fired from any server. +* Clients have to interpret the server-send event that actually are chain operations and resolve those into +client-facing events. E.g.: the appending of a `RemoveOperation` translates to a *removal* event. + +== The transport mechanism + +An event delivery mechanism must be built to transport the events from the clients and servers to all clients registered +as listeners. It requires the following: + +* API to (un)register a client as a listener, through all layers, down to the chain on the server +* API for a server to fire an `append` event +* Modify existing API for a server to fire an `eviction` event to include the necessary data to fire the client-side +equivalent event(s). + +== The straightforward bits + +Modify the `ClusterTierActiveEntity` listener mechanism: +`ServerStoreEvictionListener` already contains `void onEviction(long key)`. Add an extra `Chain evictedChain` parameter. +Then add `void onAppend(ByteBuffer appended, Chain beforeAppend)` and finally rename the interface to +`ServerStoreEventListener`. + +Assuming the plumbing for firing the above notifications from the servers to the clients is done, the resolver of the +client needs to be modified so that it can be used to interpret the appended to and/or evicted chains. + +== The complicated bits + +The following cases are going to be more complicated to implement: + +* Expiration can't be fired once and only once with the existing chain `Operation` s. A new one has to be introduced +for this purpose: `TimestampOperation`. This basically is a noop that indicates that an expiration has been detected +by a client. +* `TimestampOperation` cannot be interpreted by older clients. Fortunately, the codepath on which it's going to be +added is robust enough to intercept such failure which will en up calling the `ResilienceStrategy` to evict the +value. This is slightly odd, but still correct and we don't expect lots of cases where old clients will be mixed in. +* The new `onAppend` callback is not cheap to perform. Simple appends eventually transform into getAndAppend at the +very lowest layer of the chain to be able to make this call. This also means materializing an offheap chain onto the +heap and sending kilobytes (maybe dozens of them) over the wire. This means it must only be materialized when at least +one client has an event listener configured and only forwarded to clients that do have a configured event listener. +This is going to require a dynamic enabling/disabling mechanism as well as some carefully placed if-not-null checks. +* The new `Chain evictedChain` parameter of the `onEviction` callback isn't cheap to generate (it needs to be +materialized from off-heap onto the heap) nor to transport (can easily reach hundreds of KB) so null must be passed +when it isn't needed, exactly for the same reasons as for `onAppend` above. + diff --git a/docs/src/docs/asciidoc/developer/clustered.adoc b/docs/src/docs/asciidoc/developer/clustered.adoc new file mode 100644 index 0000000000..977f0c8dfe --- /dev/null +++ b/docs/src/docs/asciidoc/developer/clustered.adoc @@ -0,0 +1,174 @@ += Clustered Ehcache Server Side Data Store and Operation API Proposal + +This document outlines a proposal for storing, retrieving and mutating cache +state stored within the Voltron server. First I will outline the basics of the +proposal, then I will give some examples of its usage to implement a variety of +single key operations on the cache. Finally I will outline some implementation +decisions we may face around performance tradeoffs with the proposal. + +== The Proposal + +Ehcache supports an open type system on its keys. This means users can use any +'serializable' type as a key, this in turn means the server cannot perform +equals comparisons on keys (or values in the case of concurrent operations). +Therefore to support equality comparison on clients without performing CAS loops +or taking exclusive locks across a network this scheme has been proposed. + +== Server Side Data Structure + +The server will store cache data as a multi-map where the key is the cache-key +hash (minimally just the Java hashcode, but potentially something wider than +that), and the value is a sequence of binary payloads: + +["ditaa", "server-data-structure", "png"] +-------------------------------------------------------------------------------- + +----+ +----------------------------------------------+ + | | | +-----------+ +-----------+ +-----------+ | + |hash| => | |binary blob| |binary blob| |binary blob| ...| + | | | +-----------+ +-----------+ +-----------+ | + +----+ +----------------------------------------------+ + + +----+ +----------------------------------------------+ + | | | +-----------+ +-----------+ +-----------+ | + |hash| => | |binary blob| |binary blob| |binary blob| ...| + | | | +-----------+ +-----------+ +-----------+ | + +----+ +----------------------------------------------+ + + +----+ +----------------------------------------------+ + | | | +-----------+ +-----------+ +-----------+ | + |hash| => | |binary blob| |binary blob| |binary blob| ...| + | | | +-----------+ +-----------+ +-----------+ | + +----+ +----------------------------------------------+ +-------------------------------------------------------------------------------- + +The server supports the following operations: + + * `List get(hash)`: returns the list for the given hash + * `void append(hash, binary-blob)`: adds the given binary blob to the end of + the list for the given hash + * `list getAndAppend(hash, binary-blob)`: the same as append but + also returns the list immediately prior to mutation (atomically). + * `void replace(hash, list, list)`: replaces the + first occurrence of the given subsequence with the supplied sequence + +This is the entirety of the functionality and data structure exposed to the +client that is of relevance to single key operations. + +== Client Side Interpretation + +The server side view of the data structure above is fairly primitive. The +client however, understands the structure within the binary blobs, and so has +a much richer picture of what this data structure presents. + +["ditaa", "client-data-structure", "png"] +-------------------------------------------------------------------------------- + +----+ +----------------------------------------------------------+ + | | | +------+ /----------\ +------+ /----------\ /----------\ | + |hash| => | |k1, v1| |k1, f(k,v)| |k2, v1| |k1, g(k,v)| |k2, f(k,v)| | + | | | +------+ \----------/ +------+ \----------/ \----------/ | + +----+ +----------------------------------------------------------+ + | + V + +----------------------------------------------------------+ + | +------+ /----------\ | + | |k2, v1| |k2, f(k,v)| | + | +------+ \----------/ | + +----------------------------------------------------------+ + + + +----------------------------------------------------------+ + | +------+ /----------\ /----------\ | + | |k1, v1| |k1, f(k,v)| |k1, g(k,v)| | + | +------+ \----------/ \----------/ | + +----------------------------------------------------------+ +-------------------------------------------------------------------------------- + +The structure above is interpreted by the client as meaning the current value of +the k1 mapping is `g(f({k1, v1}))`, and that of the k2 mappings is +`f({k2, v1})`. More generally the current value of a mapping is always equal +to a recursive application of the functions in its sequence (treating the +'value' as an initial generator function). + +At this point we define a critical client side operation: + +[source] +---- +value resolve(chain, key) { + resolved = []; + transform = () -> {}; + for (function f : chain)) { + if (f.for(key)) { + transform = transform.andThen(f); + } else { + resolved.add(f); + } + } + value = transform.apply(); + resolved.add(value) + + asynchronously(server.replace(key.hash, chain, resolved)); + + return value; +} +---- + +Invocation of this operation resolves the current value of the key, and +asynchronously updates the servers representation: + +["ditaa", "resolve-operation", "png"] +-------------------------------------------------------------------------------- + +----+ +----------------------------------------------------------+ + | | | +------+ /----------\ +------+ /----------\ /----------\ | + |hash| => | |k1, v1| |k1, f(k,v)| |k2, v1| |k1, g(k,v)| |k2, f(k,v)| | + | | | +------+ \----------/ +------+ \----------/ \----------/ | + +----+ +----------------------------------------------------------+ + | + + resolve(chain, k1) + + | + V + +--------------------------------+ + | +------+ /----------\ +------+ | + | |k2, v1| |k2, f(k,v)| |k1, v2| | + | +------+ \----------/ +------+ | + +--------------------------------+ + | + + resolve(chain, k2) + + | + V + +-------------------+ + | +------+ +------+ | + | |k1, v2| |k2, v2| | + | +------+ +------+ | + +-------------------+ +-------------------------------------------------------------------------------- + +== Example Operations + +[source] +---- +void put(key, value) { + server.append(key.hash, function(put(key, value))); +} + +value get(key) { + return resolve(server.get(key.hash), key); +} + +value putIfAbsent(key, value) { + return resolve(server.getAndAppend(key.hash, operation(putIfAbsent(key, value))), key); +} +---- + +== Something to Ponder + +Mutative operations will have to trigger invalidation of the clients cached +entries for the corresponding hash. In eventual consistency the operation can +proceed and 'complete' without waiting for the other clients invalidations to +finish. When in strong consistency we have to wait for invalidations to +complete before the originating operation can complete. None of this is new, +but the interesting question surrounds when the invalidations should be +triggered. Generally this is a tradeoff between strong mutative operation and +get operation latency. diff --git a/docs/src/docs/asciidoc/developer/design.basics.adoc b/docs/src/docs/asciidoc/developer/design.basics.adoc new file mode 100644 index 0000000000..1721682d14 --- /dev/null +++ b/docs/src/docs/asciidoc/developer/design.basics.adoc @@ -0,0 +1,334 @@ += Ehcache Design Fundamentals + +:toc: + +== `CacheManager`, `Cache` and their dependencies + +As in the 1.x & 2.x line, Ehcache has the notion of a `CacheManager`, who manages `Cache` instances. Managing a `Cache` +means fulfilling a couple of roles: + + - Life cycling it: e.g. `.init()`, `.closing()` the `Cache`; + - Providing it with `Service` instance: A `CacheManager` comes with a set of base abstract services `Cache` can use + and that it will lifecycle too; but the `CacheManager` can lifecycle any amount of additional `Service` types that + gets registered with it. These `Service` can then be looked up, e.g. by `Cache` or other `Service` instances, using the + `ServiceProvider` interface; + - Finally, the `CacheManager` acts as a repository of alias'ed `Cache` instances. Unlike in the previous versions, `Cache` + instances aren't named, but are registered with the `CacheManager` under an _alias_. The `Cache` is never aware of this. + +This diagram tries to summarize the different roles: + +[plantuml] +.... +@startuml +interface CacheManager +interface Cache +interface Service +interface ServiceProvider + +CacheManager ..> Cache : <> +Cache ..> Service : <> +CacheManager ..> Service : <> +CacheManager ..> ServiceProvider : <> +ServiceProvider ..> Service : <> + +hide members +@enduml +.... + +A user will only mostly interact with the `CacheManager` and `Cache` API types... He may need to configure specific +`Service` types for his `Cache` instances to use. See <> + +=== The `CacheManager` + +While the `CacheManager` does act as a repository, it is _not_ possible to add a `Cache` directly to a `CacheManager`. +A `Cache` can be created by a `CacheManager`, which will then keep a reference to it, alias'ed to a user provided name. +To remove that `Cache` from the `CacheManager`, it has to be explicitly removed using `CacheManager.removeCache(String)`. +Upon that method successfully returning, the `Cache` 's status will be `Status.UNINITIALIZED` and as such will not be +usable anymore, see <> section below. + +=== The `Cache` + +A `Cache` is backed by a `Store` where all cached entries (i.e. key to value mappings) are held. The `Cache` doesn't know +what topology this `Store` is using; whether it's storing these entries on the JVM's heap, off the heap, on disk, on a remote +JVM or any combination of the above. + +When a `Cache` is being constructed, e.g. by the `CacheManager` on a `.createCache()` method invoke, the `CacheManager` +will lookup a `Store.Provider` which is one of the bundled `Service` types of Ehcache, asking it to create a `Store` based +on the `CacheConfiguration` used to configure the given `Cache`. That indirection, makes both the `Cache` as well as the +`CacheManager` ignorant of what topology this `Cache` is to use. Ehcache comes with a `DefaultStoreProvider` that will +be loaded by the `ServiceProvider`, should none be explicitly provided. That in turn will resolve the required `Store` +instance to be provided to the `Cache` being created. + +[plantuml] +.... +@startuml +interface Cache +interface Store + +Cache *--> Store +Store <|-- OnHeapStore +Store <|-- OffHeapStore + +package "on createCache()" <> { + interface CacheManager + interface ServiceProvider + interface Service + interface Store.Provider + + CacheManager ..> Cache : <> + CacheManager ..> ServiceProvider : <> + CacheManager ..> Store.Provider : <> + Service "*" <--* ServiceProvider + Service <|-- Store.Provider +} + +hide members +@enduml +.... + +The `Cache` also tries to never _fails_ on operations invoked, e.g. a get shouldn't result in throwing an exception if the +`Store` that backs it up uses serialization and fails to retrieve the mapping. Instead, Ehcache tries to be resilient and +will, by default, try to clear that mapping from its `Store` and return `null` instead to the user. It is the responsibility of the +`Cache` to handle the exceptions a `Store` may throw (the `Store` interface explicitly declares it throws +`CacheAccessException`, which is a checked exception). The `Cache` will delegate failures to the `ResilienceStrategy`, +which in turn is responsible for handling the failure. + +Currently, Ehcache only has a single `ResilienceStrategy`, which is supporting single-JVM deployments, and will try to +_heal_ the `Store` on failure and making the invoking action on a `Cache` a no-op. We'll add more `ResilienceStrategy` +and will make it pluggable, when we move on to distributed topologies. + +=== The new `UserManagedCache` + +The `UserManagedCache` are, as the name implies, managed by the user instead of being managed by a `CacheManager`. While +these instances are meant to be lightweight, short-lived ones, nothing prohibits a user from building a distributed +`UserManagedCache` if so desired. + +As the user manages that instance himself, he needs to provide all `Service` instances required by the `UserManagedCache`. +Also he'll need to invoke lifecycle methods on it (see <>) and finally keep a reference to it, as it +won't available in any `CacheManager`. + +[plantuml] +.... +@startuml +interface Cache +interface UserManagedCache +interface Service + +UserManagedCache --|> Cache +UserManagedCache ..> Service : <> + +hide members +@enduml +.... + +== State transitions + +A lifecycled instance, e.g. a `CacheManager` or a `UserManagedCache`, has three states represented by the +`org.ehcache.Status` enum: + + . `UNINITIALIZED`: The instance can't be used, it probably just got instantiated or got `.close()` invoked on it; + . `MAINTENANCE`: The instance is only usable by the thread that got the _maintenance lease_ for it. Special maintenance + operations can be performed on the instance; + . `AVAILABLE`: The operational state of the instance, all operations can be performed by any amount of threads. + +[plantuml] +.... +@startuml +[*] --> UNINITIALIZED + +UNINITIALIZED --> UNINITIALIZED : transition failure + +UNINITIALIZED --> AVAILABLE : init() +UNINITIALIZED --> MAINTENANCE : toMaintenance() + +AVAILABLE --> UNINITIALIZED : shutdown() +MAINTENANCE --> UNINITIALIZED : shutdown() +hide empty description +@enduml +.... + +State should only be maintained at the _higher_ user-visible API instance, e.g. a concrete `Cache` instance like `Ehcache`. +That means that it is the warrant for blocking operations during state transitions or on an _illegal state_. No need for +the underlying data structure to do so too (e.g. `Store`), as this would come to much higher cost during runtime. + +NOTE: A generic utility class `StatusTransitioner` encapsulate that responsibility and should be reusable across types that +require enforcing lifecycle constraints. + +== Configuration types and builders + +In the most generic sense, configuration types are used to _configure_ a given service, either _while_ it is being constructed or when it is used. +A builder exposes a user-friendly DSL to configure and build _runtime instances_ (e.g. `CacheManager`). Finally runtime +configuration types are configured from configuration types and used at runtime by the actual configured instance, +providing a way for the user to mutate the behavior of that instance at runtime in limited ways. + +=== Configuring stuff + +You don't necessarily ever get exposed to a _configuration_ for a given type being constructed. The builder can hide it +all from you and will create the actual configuration at `.build()` invocation time. Configuration types are always +immutable. Instances of these types are used to configure some part of the system (e.g. `CacheManager`, `Cache`, +`Service`, ...). If a given configured type has a requirement to modify it's configuration, an additional _runtime +configuration_ is introduced, e.g. `RuntimeCacheConfiguration`. That type will expose additional mutative methods for +attributes that are mutable. Internally it will also let consumers of the type register listener for these attributes. + +[plantuml] +.... +@startuml +interface CacheManagerBuilder +interface Configuration +interface CacheConfiguration +interface ServiceConfiguration +interface ServiceCreationConfiguration + +CacheManagerBuilder ..> Configuration : <> +Configuration *--> "*" ServiceCreationConfiguration +Configuration *--> "*" CacheConfiguration +CacheConfiguration *--> "*" ServiceConfiguration +hide members + +package runtime <> { + interface CacheManager + interface Service + interface RuntimeCacheConfiguration + interface Cache + + CacheManager <.. CacheManagerBuilder : <> + CacheManager ..> Service : <> + Cache ..> Service : <> + Cache <.. CacheManager : <> + Cache ..> RuntimeCacheConfiguration : <> +} +@enduml +.... + +==== Services creation, `ServiceCreationConfiguration`, `ServiceProvider` and `ServiceConfiguration` + +A special type of configuration is the `ServiceCreationConfiguration` type. +That configuration type indicates to the system to lookup the `ServiceFactory` to use to create the `Service` that's being configured. +Subclasses of that configuration type are accepted at the outermost level of configuration, `CacheManager` or `UserManagedCacheBuilder`, which is the only place where services will be looked up from a configuration. + +This is what happens underneath that call when the `CacheManager` looks up `Service` instances: + +For each `ServiceCreationConfiguration` + + . The service subsystem looks up whether it already has that `Service` + .. If it does, that instance returned + .. If it doesn't, it looks up all `ServiceFactory` it has for one that creates instances of that `Service` type. + ... If one is found in that `ServiceFactory` repository, it uses that to create the instance with the configuration + ... If none is found, it uses the JDK's `java.util.ServiceLoader` service to load `ServiceFactory` and recheck + .. If nothing could be found, an Exception is thrown + +After this, services are started and can be consummed by the different components. +For this, the `ServiceProvider` is passed to `Service` instances at start point. +Form there, calling into `ServiceProvider.getService(Class serviceType)` will enable to retrieve a defined service. + +NOTE: When `Service.start(ServiceProvider serviceProvider)` is called, the service subsystem is currently starting. +So while all `Service` instances are defined, they are not necessarily started which means your code in `start(...)` needs to limit itself to service lookups and not consumption. + +The `ServiceConfiguration` interface enables to define extra configuration to a `Service` when using it. + +=== Builder design guidelines + + - Copy the instance, apply modification and return the copy. Never modify and return `this` + - Accept other builders as input, instead of just the actual "other thing's" configuration + - Provide names methods for boolean or `Enum` based settings. Apply this while keeping in mind that we do not want + method explosion on the builder as a whole. + - Default values are to be handled inside the configuration classes and not duplicated inside the builder. + +== `javax.cache` API implications + +While we know we don't want to strictly go by the JSR-107 (aka JCache) API contract in the Ehcache3 APIs (e.g. `CacheLoader` & +`CacheWriter` contracts when concurrent methods on the `Cache` are invoked), we still need a way to have our JCache +implementation pass the TCK. It is important to at least read the specification with regards to any feature that's being +implemented and list dissimilarities as well as how they'll be addressed in the 107 module. + +== The `PersistentCacheManager` + +The `PersistentCacheManager` interface adds lifecycle methods to the `CacheManager` type. Those lifecycle methods enable +the user to completely destroy `Cache` instances from a given `CacheManager` (e.g. destroy the clustered state of a `Cache` entirely, +or remove all the data of a `Cache` from disk); as well as go into _maintenance mode_ (see <> section). + +=== `CacheManagerBuilder.with()` 's extension point + +A `CacheManagerBuilder` builds _at least_ a `CacheManager`, but its +`.with(CacheManagerConfiguration): CacheManagerBuilder` let's you build any subtype of `CacheManager` (currently +the supported types are a closed set of defined subtypes, but this could be extended to an open set later). + +[source,java] +---- + +PersistentCacheManager cm = newCacheManagerBuilder() // <1> + .with(new CacheManagerConfiguration()) // <2> + .build(true); // <3> +---- + +<1> the `T` of `CacheManagerBuilder` is still of `CacheManager` +<2> the `CacheManagerConfiguration` passed in to `.with` now narrows `T` down to `PersistentCacheManager` +<3> returns the instance of `T` built + +=== Locally persistent + +When building a `PersistentCacheManager` the `CacheManagerConfiguration` passed to the builder +would let one configure all persistent related aspects of `Cache` instances managed by the `CacheManager`, e.g. root +location for writing cached data to. + +=== Clustered topology + +In a Terracotta clustered scenario, all clustered `Cache` instances are considered persistent (i.e. will survive the +_client_ JVM restart). So the idea is to provide all clustered configuration passing such a +`CacheManagerConfiguration` instance, with all the Terracotta client configuration stuff, to the +`CacheManagerBuilder` at construction time. + +==== Persistence configuration + +Any given persistent `Cache` uses the lifecycle as described above in <>. Yet the data on disk, or +datastructures on disk to store. We think of states of those structures in these terms: + + . Inexistent, nothing there: nothing can be stored until these exist; + . Online: the datastructures are present (with or without any data), referenced by the `Store` and the `Cache` is usable; + . Offline: the datastructures are present (with or without data), not referenced by any `Store` and nothing accesses it. + +[plantuml] +.... +@startuml +state store { +[*] --> UNINITIALIZED + +UNINITIALIZED --> UNINITIALIZED : transition failure + +UNINITIALIZED --> AVAILABLE : init +UNINITIALIZED --> MAINTENANCE : toMaintenance + +AVAILABLE --> UNINITIALIZED : shutdown +MAINTENANCE --> UNINITIALIZED : shutdown + +MAINTENANCE --> MAINTENANCE : destroy +MAINTENANCE --> MAINTENANCE : create +} + +state data { + [*] --> ONLINE : create + ONLINE --> [*] : destroy + + OFFLINE --> ONLINE : init + ONLINE --> OFFLINE : shutdown + OFFLINE --> [*] : destroy + OFFLINE --> OFFLINE : transition failure +} +hide empty description +@enduml +.... + +The user can fallback to the maintenance mode and the `Maintainable` instance returned when transitioning to the +maintenance state. That `Maintainable` can be used to: + + - `Maintainable.create()`, moving from nothing to online; _or_ + - `Maintainable.destroy()`, moving from offline to nothing + +the associated data for a given `Cache` on disk or within the Terracotta Server stripe(s). + +We also want to provide with configuration based _modes_ to automatically: + + - Create the persistent data structures if it doesn't already exit; + - Drop the persistent data structures if it exists, and create it anew; + - Verify the persistent data structures is there, otherwise fail fast; + - Create the persistent data structures expecting them to not be there, otherwise fail fast. diff --git a/docs/src/docs/asciidoc/developer/design.bootstrapping.adoc b/docs/src/docs/asciidoc/developer/design.bootstrapping.adoc new file mode 100644 index 0000000000..a0623020b3 --- /dev/null +++ b/docs/src/docs/asciidoc/developer/design.bootstrapping.adoc @@ -0,0 +1,11 @@ += Bootstrapping design doc + +:toc: + +== Configuration + +=== Extending the config + +== Services + +The +CacheManager+ will create a +org.ehcache.spi.ServiceProvider+. The +ServiceProvider+ will use Java's +java.util.ServiceLoader+ to find all +ServiceFactory+ on the classpath. diff --git a/docs/src/docs/asciidoc/developer/design.resilience.adoc b/docs/src/docs/asciidoc/developer/design.resilience.adoc new file mode 100644 index 0000000000..dc12c8fd90 --- /dev/null +++ b/docs/src/docs/asciidoc/developer/design.resilience.adoc @@ -0,0 +1,182 @@ += Resilience + +:toc: + +== Configuration + +This is a summary of all the possible configurations. It's useful to you if you already have read the rest of the document +and want to see the configuration summarized in one place. If you haven't read the rest of the document yet, you won't +understand a thing. + +Read timeout:: + How long to wait on a read to the cluster. Applies to `get`, `getAll` and `containsKey` +Write timeout:: + How long to wait on a write operation on the cluster. Applies to everything else +Connection timeout:: + How long to wait when first establishing the connection to the server +Lease time:: + Duration of a lease between a client and a server as given by the server +Resilience strategy:: + Interface implementation telling how to answer when a given cache method fails due to a failing tier +Loader-writer resilience strategy:: + A resilience strategy that is aware of the loader-writer and so can take advantage of it in case of failure +Ledger queue length:: + How many mutative elements we keep on the client when waiting for the server to come back +Caching tier behavior in case of failure:: + How should the caching tier behave when the authoritative tier is gone (clear, server when possible, process updates locally) +Caching tier behavior in case of lease expiration and reconnect:: + How should the caching tier behave when the lease expires or when reconnecting to the authoritative tier (clear, reconcile, keep) + +== What can go wrong + +* Server fails to get an answer erratically because of network or something +* Server fails to send messages to the client erratically +* Server goes down +* Server failover +* Loader-writer backend fails + +== Strong vs Eventual + +Strong means: If the client asks for a value that was updated by some other server, it will always get the +latest value. + +In particular, it means that as soon as an update from the server might be missed, we need to clear +the caching tier. It also means that if we fail to set a value to the authoritative tier, we can't use +the cache anymore. On this key at least. + +Eventual means the correct value will eventually be there. So if we lose contact with the server, it is possible to keep +using a possibly stale cache entry. This means that a client with eventual consistency shouldn't need to clear its cache +at all. However, it means two things: + +* We need to be able to update the caching tier in that case +* We need to get updates from the server when the situation resumes + +=== Being too eventual + +We can argue that if a client stays disconnected for too long, it means the caching tier content is now too obsolete. There +are two ways to handle that: + +* Expiration should take care of it. So entries should have an expiration set to their expected time validity. +* We configure "too long" which will clear the cache + +== Clustering + +When using a distributed cache, your Ehcache client will connect to a remote Terracotta server (or multiple in case +of stripping) which act as the authoritative tier. + +This server data can be replicated to other servers called mirrors. How your client will handle being cut from +the cluster is defined in `ClusteringServiceConfiguration`. + +=== Lease + +A client has a lease with the server. And there's a heartbeat making sure nobody died. So if the client gets no heartbeat +from the server, it will decide that it is now on its own. When the heartbeat comes back, it will resume operation. + +From the other side, it also means that the server won't accept any write operation without getting an acknowledgment from +the client. So, until the lease expires, the client is guaranteed to stay consistent with the server, even if the server +is lost. + +=== Timeouts + +If a client doesn't receive an answer in the configured time, it will timeout. This can happen for different reasons +including a network cut, a long full GC or a server down. + +Three different types of timeouts have been defined. + +* Read: Operations reading data from the server +* Write: Operations modifying data on the server +* Connection: Establishing the connection with the server + +By default, all operation timeouts are set to 5 seconds. The default connection timeout is 150 seconds. + +Note that there is no lifecyle timeout. It doesn't feel useful. For a cache creation, not being able to create the cache +means you can't do anything anyway. For destruction, well... we rarely destroy. The leasing will take care of the timeout. + +Internally, while waiting for the timeout, the client might decide to retry, sleep between retries or do whatever it feels +useful to provide an answer. + +=== In case of failure + +A store can fail in two ways. + +First, it failed by itself. In that case, it will launch a `StoreAccessException` that will be caught by the resilience strategy. + +Second, something underneath failed. For instance, a call to a loader-writer failed. In that case, the original exception +is wrapped in a `StorePassThroughException`. This will let it pass right thought the store and be unwrapped and thrown +by the cache to the caller. + +=== After a failure + +We will now consider a server with no mirror. + +A failure can just be a hiccup. If that's the case, the call will timeout and resilience strategy will take care of the rest. + +If we lost connection, the client won't be able to renew the lease. It will go in resilience mode. It means + +* clearing the caching tier +* answering everything with the resilience strategy +* try to renew the lease in background + +**Unimplemented yet:** +Instead of clearing the caching tiers, more flexible strategies can be implemented. Here is a beginning of discussion. + +It is important to notice that the client won't fallback to the resilience strategy when the caching tier answers. +It also means that the caching tier might not receive updates from the server and become out of sync. + +This is independent of the consistency configured and configurable. You can pick the following strategies: + +* Rely on tier forever, so even if the server is officially lost +* Rely on tier on hiccups. The server will keep the caching tier until it declares the server lost for good +* Don't rely on tier. + +When the lease is renewed, a reconciliation could occur with the server to sync the caching tier. + +=== Active and mirror setup + +**Note:** Actual behavior to be tested + +It works the same as with a single server. Except that during a failover, the client will behaves. like if the underlying +server is having hiccups. Or is down if the failover takes too long (longer than the lease). + +The new active server will notify the client when ready. + +=== Reconnection + +When a server goes down and back again. On the same URL. The client will silently reconnect to it. + +== Loader writer + +A failing loader-writer throws exception to caller. A given implementation could implement it's own resilience strategy. + +Also, a resilience strategy can use the loader-write to answer. This is what the default Ehcache resilience strategy does +in presence of a loader-writer. + +=== Write behind + +The loader-writer should make sure, the write behind case is covered is using it. + +== Interruptions + +**Note:** To be tested + +When waiting on a call to a store, an interruption should allow to get out. It will then probably rely on the resilience +strategy (I'm not sure about that) or throw an exception right away. + +== Resilience Strategy + +The default resilience strategy (`RobustResilienceStrategy`) will behaves like a store that expires everything it receives +right away. + +* Return null on a read +* Write nothing +* Remove the failing key from all tiers + +In presence of a loader-writer, we use the `RobustLoaderWriterResilienceStrategy`. It behaves like if there was no +cache at all. + +* Load the value from the backend on a read +* Write the value to the backend on a write +* Remove the failing key from all tiers + + +We could imagine providing more built-in strategies. diff --git a/docs/src/docs/asciidoc/developer/design.tiering.adoc b/docs/src/docs/asciidoc/developer/design.tiering.adoc new file mode 100644 index 0000000000..b6c66f975f --- /dev/null +++ b/docs/src/docs/asciidoc/developer/design.tiering.adoc @@ -0,0 +1,68 @@ += Ehcache Tiering Model + +:toc: + +The `Store` in Ehcache is designed to hold data in a tiered model. +A cache will always have at least one tier, possible multiple. +As soon as there are multiple tiers, they are immediately divided between a single `AuthoritativeTier` and one or more `CachingTier`. +It is the `TieredStore` that wires the authority and caching tiers and provides a unified view of the `Store` for the Cache. + +. What is `CachingTier` ? + +- In a multi-tiered cache, this tier holds the most recently used data. Conceptually +the cost of getting a value from the Caching Tier is low compared to other lower +tiers. All the data that is present in `CachingTier`, has to be present in `AuthoritativeTier`. +The authority will not evict an entry which is present in a `CachingTier`, which is +always a subset of the authority. +- In a two tiers `CachingTier`, mappings are present in one tier or the other but never both at the same time. This contract is enforced by having all operations happen atomically from the higher tier's point of view. + +. What is `AuthoritativeTier` ? + +- This tier holds all the cached mappings at any given point in time, it thus acts as the authority. + +. How are they wired ? + +- All multi-tiered `Cache` have only one store i.e. `TieredStore` which wires the + logic between the `CachingTier` and the `AuthoritativeTier`. All the mutative operations + are written in a way that the mutating thread uninstalls the entry from `CachingTier` + after the mutation is complete in the authority. The `Store` has an invalidation listener + through which the authority listens on all the invalidations done in caching tier. + +. Faulting + +- Whenever an entry is not found in CachingTier it is faulted from AuthoritativeTier. +Even if there are multiple threads trying to get a value, all the threads will fight +the race for the lock and the winning thread will install a fault for that key and wait +for value to be fetched from the authority by the fault. All other +threads will read the most recently fetched value installed in CachingTier. +The important point is that whenever an entry is faulted from authority, the contract +ensures that till the entry resides in caching tier , it will not be removed/evicted +from authority. + +. Invalidations and Flushing + +- In situations when caching tier decides that an entry needs to be evicted/expired, +it has to inform authority that it can now evict that entry. Therefore, whenever an +entry is expired/evicted from caching tier, it flushes that information to authority +using invalidation listener. All the flushed entries are now marked as evictable in +authority. + +[plantuml] +.... +include::{includedir}/../../uml/put.puml[] +.... + * All writes directly go to the Authoritative tier.This tier has all the data such that caching tier always has a subset of authority + * On return, the mapping in the caching tier is invalidated. + * On eviction or expiry in the caching tier, the mapping is flushed to the authority. + +[plantuml] +.... +include::{includedir}/../../uml/get.puml[] +.... + * Any reading thread tries to get data from the caching tier and returns + * If the key is not found in caching tier, it is faulted from authority + * On return, the mapping faulted from the authority is installed in the caching tier. The install may fail under heavy contention to preserve correctness. + + + + diff --git a/docs/src/docs/asciidoc/developer/index.adoc b/docs/src/docs/asciidoc/developer/index.adoc new file mode 100644 index 0000000000..5a260204a5 --- /dev/null +++ b/docs/src/docs/asciidoc/developer/index.adoc @@ -0,0 +1,34 @@ += Ehcache 3 Developer Documentation + +== Code layout + +The core code of Ehcache is split in to three modules: + + . the link:module.api{outfilesuffix}[API module], which contains mainly interfaces. They are the core API to Ehcache (e.g. +Cache+, +CacheManager+) that users depend on. It also contains the entry points in terms of SPI (e.g. +Service+, +Store+, +AuthoritativeTier+ & +CachingTier+), that are used by other modules that in turn provide their implementations; + . the link:module.core{outfilesuffix}[Core module], that is composed of the plumbery that wires the API used by users with the SPI implementations present in packaged modules; and + . the link:module.impl{outfilesuffix}[Implementation module], containing the default implementation of +Cache+ and +CacheManager+, as well as implementations of core SPIs, such as +HeapResource+ that lets you create on-heap +Cache+ and +CachingTier+ instances. + +== Fundamental concepts + +=== Modular approach + +==== Services + +A `CacheManager` manages `Cache` instances, but also `Service` instances that can be used by either `Cache` or other `Service` instances. An example of `Service` being the `org.ehcache.spi.cache.Store.Provider`, it's the `Service` the `CacheManager` will use to create the `Store` instance underlying your `Cache`. + +`Service` are created by using the Java's https://docs.oracle.com/javase/6/docs/api/java/util/ServiceLoader.html[`java.util.ServiceLoader` service-provider loading facility]. It is used to locate all `org.ehcache.spi.service.ServiceFactory` implementations on the classpath. These are in turn used to create `Service` instances. Each `CacheManager` uses its own `org.ehcache.spi.ServiceLocator` facility to locate `Service` instances, which it then in turn life cycles. + +`Service` instances are configured by their own respective `ServiceConfiguration` at `Service.start()` invocation time. `CacheManager` and its `Service` instances can then use these services. In the case of the `org.ehcache.spi.cache.Store.Provider` instance, it is being used by the `CacheManager` to create a `Store` to back a `Cache`. Being a direct dependency of `Ehcache`, that `Service` interface is part of the core SPI. It defines a `createStore()` method that will be invoked by the `CacheManager` at `Cache` creation time. The `Store.Provider` implementation can introspect not only the `Store.Configuration` passed, but also all `ServiceConfiguration` instances associated with the `Cache` being created in order to decide what `Store` should be created (note: currently it only creates `OnHeapStore`, as this is the only topology supported... more to come). + +=== Configuration + ++org.ehcache.config.CacheConfiguration+, +org.ehcache.spi.service.ServiceConfiguration+, +org.ehcache.config.xml.XmlConfigurationParser+ + +NOTE: For more information on how the configuration is parsed, resolved and services are then bootstrapped, please read the link:design.bootstrapping{outfilesuffix}#configuration[Bootstrapping design doc]. + +== How to extend? + +=== SPIs ++org.ehcache.spi.service+, +org.ehcache.spi.cache+ + +=== Extension points diff --git a/docs/src/docs/asciidoc/developer/module.api.adoc b/docs/src/docs/asciidoc/developer/module.api.adoc new file mode 100644 index 0000000000..d29838d63d --- /dev/null +++ b/docs/src/docs/asciidoc/developer/module.api.adoc @@ -0,0 +1,33 @@ += API Module + +:toc: + +== API Module + +Contains all the interfaces, as well as exceptions, an Ehcache user would use explicitly. As a rule of thumb, you rarely want any actual code in this module. And when adding an interface, it should be a care one to the library, either from a user perspective, or from a SPI one. + +=== `CacheManager` and its `Cache` instances + +The `CacheManager` not only acts as a repository of `Cache` instances, but it also lifecycles them. The `CacheManager` is also responsible for lifecycling `Service` instances. These `Service` instances are provided by the `CacheManager` for its `Cache` instances to use. The `Cache` and `Service` instances can look up any `Service` by it's type by querying the `ServiceLocator`. + +`CacheManager` and `Cache` are two of the most important interfaces into Ehcache. They get supplemented by other interfaces such as `Expiry`, `CacheEvent` and other satellite types. + +=== Configuration types + +In order to create a `CacheManager`, you need to have a `CacheConfiguration` for each and every `Cache` instance you want to use. You also may need to configure some additional `Service` instances one or some of your `Cache` need (see below). + +A `CacheConfiguration` is read-only and as such immutable. The new `RuntimeCacheConfiguration` type exposes additional mutative methods on parameters that are tunable at runtime (i.e. once a `Cache` is in `Status.AVAILABLE`). + +=== SPI types + +In + +==== User-facing SPIs + +==== Generic `Service` instances and their `ServiceConfiguration` + +==== The `ServiceLocator` + +Every `CacheManager` has its own `ServiceLocator` instance. The `ServiceLocator` acts as a repository of concrete `Service` which can then be looked up by type. The `Service` instances it knows about are the ones provided to it at `CacheManager` construction time. `Service` instances can be injected directly, but will more generally be declared for use by providing a matching `ServiceConfiguration` to the `CacheManagerBuilder`. Not all `Service` are necessarily required to be user facing (through one of their `ServiceConfiguration` for instance). If a requested `Service` can't be found, it'll be looked up using Java's `ServiceLoader` facility from the classpath. + +The `ServiceLoader` fallback for `Service` lookups can be used by the end-user, but is expected to be used mainly by _anyone_ building an *Ehcache Distribution* (i.e. a so-called _überjar_ created by some mean from all the different modules that composes it). The idea is to only couple different `Service` to the minimal and the strictly required level. diff --git a/docs/src/docs/asciidoc/developer/module.clustering.adoc b/docs/src/docs/asciidoc/developer/module.clustering.adoc new file mode 100644 index 0000000000..00102bff0a --- /dev/null +++ b/docs/src/docs/asciidoc/developer/module.clustering.adoc @@ -0,0 +1,72 @@ += Terracotta's Clustering Module + +:toc: + +== Voltron + +What's Voltron? +What services does it provide? +Remoting? +Provider of resources to `Entity` instances? +Versioning? +How is _persistence_ addressed? +_Replication_? +_HA_? + +=== Voltron Storage Spec + + . Voltron provides `StorageService` service for as storage service for entities. + . The Storage service provided is sandboxed which will be used by the Entities to get `StorageChunk` s. + The allocation done for a particular Entity is scoped in a way, so that no other Entity + uses it or interferes with it. + . These chunks are fixed size resource(offheap, disk etc) allocations. + . To create a `StorageChunk` from storage service, one needs to provide the size of the chunk, + an identifier and the server pool id. + . The `StorageChunk`'s lifecycle is managed by the Entity'. It can create/destroy the chunks as per the need. + . An Entity can request for any number of `StorageChunk` s, limited only by the size of + the server side pool. + . Voltron internally sandboxes each individual chunk. (entityId:poolId:chunkId -> + provided `StorageChunk`) + . The `StorageChunk` provides apis to create, destroy and get `KeyValueStorage` s. It can request + for any number of `KeyValueStorage` s from the provided `StorageChunk`. + . The storage limit of `KeyValueStorage` is defined by the capacity of the `StorageChunk` it + it belongs to. + +=== Client Communication Service + . Client Communication service enables the entities to interact with the client. + . If an Entity needs to send messages to the client, the client communication service can be + invoked to send messages. + . The Client Communication Service is a different channel than the regular Endpoint which + clients use to send payload to the server. + . The point is that Client Communication service should not be misunderstood with the Endpoint + as it is not related to send acknowledgements back to the client. + . All the client -> server communications which happen through Endpoint channel are totally + different from the server -> client communications that happens through Client Communicator Service. + . Some of the use cases where this service can be used is sending invalidations about stale data, + requesting information from client, etc. + +=== Server-side entity + + . Actual providers of functionality; + . Responsible for: + .. Maintaining their state? + .. Acquiring releasing of resources? + .. ... ? + +=== Client-side entity + + . (Stateful)? Proxies to the server-side entities + . Rejoin? + . Reconnect? + . ... ? + +== Ehcache's clustering model + +=== Operations sequence diagrams + +==== putIfAbsent + +[plantuml] +.... +include::{includedir}/../../uml/putIfAbsentUml.puml[] +.... diff --git a/docs/src/docs/asciidoc/developer/module.core.adoc b/docs/src/docs/asciidoc/developer/module.core.adoc new file mode 100644 index 0000000000..5156d75178 --- /dev/null +++ b/docs/src/docs/asciidoc/developer/module.core.adoc @@ -0,0 +1,39 @@ += Core Module + +:toc: + +== Core Module + +Contains the +ServiceLocator+, as well as core SPI interfaces. + +=== `CacheManager` and its `Cache` instances + +The `CacheManager` not only acts as a repository of `Cache` instances, but it also lifecycles them. The `CacheManager` is also responsible for lifecycling `Service` instances. These `Service` instances are provided by the `CacheManager` for its `Cache` instances to use. The `Cache` and `Service` instances can look up any `Service` by it's type by querying the `ServiceLocator`. + +==== The `ServiceLocator` + +Every `CacheManager` has its own `ServiceLocator` instance. The `ServiceLocator` acts as a repository of concrete `Service` which can then be looked up by type. The `Service` instances it knows about are the ones provided to it at `CacheManager` construction time. `Service` instances can be injected directly, but will more generally be declared for use by providing a matching `ServiceConfiguration` to the `CacheManagerBuilder`. Not all `Service` are necessarily required to be user facing (through one of their `ServiceConfiguration` for instance). If a requested `Service` can't be found, it'll be looked up using Java's `ServiceLoader` facility from the classpath. + +The `ServiceLoader` fallback for `Service` lookups can be used by the end-user, but is expected to be used mainly by _anyone_ building an *Ehcache Distribution* (i.e. a so-called _überjar_ created by some mean from all the different modules that composes it). The idea is to only couple different `Service` to the minimal and the strictly required level. + +=== Configuration types + +==== Minimal configurations + +In order to create a `CacheManager`, you need to configure `CacheConfiguration` for each and every `Cache` instance you want to use. You also may need to configure some additional `Service` instances one or some of your `Cache` need. Whether configured directly at the `CacheManager` level, or at the `Cache` level, a given `ServiceConfiguration` will be used by a single `Service` instance. But the `Service` itself may configure itself further for a given `Cache`. + +==== The `interface Store.Provider extends Service` as an example + +There will be only one `Store.Provider` instance registered for that type that the `CacheManager` will lookup using the `ServiceLocator` when creating a `Cache` instance. The `Store.Provider` acts as a _factory_ for `Store` instances. That factory may delegate to specialized implementations of `Store.Provider` it _knows_ about (e.g. `OnHeapStoreProvider`, `DiskStoreProvider`, ...) by looking at the `Store.Configuration` it's been asked to configure the `Store` instance for. + +What that `Store.Provider` instance it'll be can either be specified by injecting one directly into the `ServiceLocator` backing up the `CacheManager` to create the `Cache` instances. Or have it fallback to the _classpath_ of the application to resolve one. Where the `ServiceLoader` facility is only resorted to, if none injected instance could be found. + +=== Services + +==== ServiceProvider + +=== SPI's + +==== Cache Store + +==== Tiering interfaces diff --git a/docs/src/docs/asciidoc/developer/module.impl.adoc b/docs/src/docs/asciidoc/developer/module.impl.adoc new file mode 100644 index 0000000000..8927b0b887 --- /dev/null +++ b/docs/src/docs/asciidoc/developer/module.impl.adoc @@ -0,0 +1,23 @@ += Implementation Module + +:toc: + +== Implementation Module + +=== Configuration + ++Configuration+ and +CacheConfiguration+ aren't used to create +CacheManager+ and +Cache+ instances, respectively. Instead they are used to reflect the configuration currently in use by a given +CacheManager+ or +Cache+. + +To instantiate new +CacheManager+ or +Cache+, you should use +CacheManagerBuilder+ or +CacheBuilder+ instances instead. + +==== Builders + +==== Mutability of 'configuration' instance + +=== Internals + +==== Store + +==== Tiering + +==== Serialization diff --git a/docs/src/docs/asciidoc/developer/module.xa.adoc b/docs/src/docs/asciidoc/developer/module.xa.adoc new file mode 100644 index 0000000000..9e87a91094 --- /dev/null +++ b/docs/src/docs/asciidoc/developer/module.xa.adoc @@ -0,0 +1,60 @@ += Transactions module + +:toc: + +== High level design + +Transaction support works through wrapping the `Store` that backs the `Cache` and storing a specific datastructure, a `SoftLock`, wrapping the cache values themselves. + +NOTE: No cache interactions are allowed outside of a transaction context. + +=== Operations under a transaction + +In the context of a transaction, cache operations are translated to `Commands` and stored in a transaction specific context. + +These commands cause `Store` mutations at specific stages in the lifecycle of the transaction. + +==== Prepare + +During the _prepare_ phase, all commands have their values, new and old if available, transformed into `SoftLock` instances. +These are saved in the `Store`. +A `SoftLock` from the prepare phase will carry the following information: + +* Transaction information allowing to identify which transaction created it, +* The new mapping value to install - can be `null` to indicate removal, +* The old mapping value when relevant. +Such `SoftLock` are considered _in doubt_. + +==== Commit + +During the _commit_ phase, _in doubt_ `SoftLock` will be retrieved from the `Store`. +That will be the case for all keys that were touched by the transaction. +As long as the retrieved `SoftLock` is linked to this transaction context, they will be replaced in the underlying `Store` with a `SoftLock` instance: + +* Holding the value for the mapping, identified as the _old_ value, +* disconnected from any transaction context. + +===== Commit in 1 phase + +In that case, we go directly from the command to the disconnected `SoftLock` but the principle remains the same. + +==== Rollback + +In case of _rollback_, _in doubt_ `SoftLock` will be retrieved from the `Store`. +That will be the case for all keys that were touched by the transaction. +As long as the retrieved `SoftLock` is linked to this transaction context, they will be replaced in the underlying `Store` with a `SoftLock` instance: + +* Holding the _old_ value, +* disconnected from any transaction context. + +===== Rollback before prepare + +Commands are simply dropped from the transaction context. + +== Interactions with other Ehcache features + +=== Eviction advisor + +By default the `XAStore` uses the `EvictionAdvisor` mechanism to protect all _in doubt_ `SoftLock`. + +Integration with user provided `EvictionAdvisor` is limited to `SoftLock` that are not in doubt and which by definition only contain an _old_ value, which will be the value passed to the `adviseAgainstEviction(K key, V value)` method. diff --git a/docs/src/docs/asciidoc/user/.asciidoctorconfig b/docs/src/docs/asciidoc/user/.asciidoctorconfig new file mode 100644 index 0000000000..8bfbdba299 --- /dev/null +++ b/docs/src/docs/asciidoc/user/.asciidoctorconfig @@ -0,0 +1,3 @@ + +:includedir: . +:gradle-rootdir: ../../../../.. diff --git a/docs/src/docs/asciidoc/user/107.adoc b/docs/src/docs/asciidoc/user/107.adoc index 9fd5851b67..6c627a94af 100644 --- a/docs/src/docs/asciidoc/user/107.adoc +++ b/docs/src/docs/asciidoc/user/107.adoc @@ -1,12 +1,12 @@ --- --- = The Ehcache 3.x JSR-107 Provider -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] [[overview]] @@ -39,7 +39,7 @@ You can use the JCache API to develop a complete application, without the need t === Setting up Ehcache as the Caching Provider for JCache -To use Ehcache as the caching provider for your application, add the file `javax.cache:cache-api:1.y.y.jar` (where `y.y` is a version-dependent string) to your application's classpath. +To use Ehcache as the caching provider for your application, add the file `javax.cache:cache-api:1.x.y.jar` (where `x.y` is a version-dependent string) to your application's classpath. This is of course assuming Ehcache is already on that same classpath. No other setup steps are required. @@ -59,7 +59,7 @@ Here is a code sample that demonstrates the usage of the basic JCache configurat [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=basicConfigurationExample] +include::{sourcedir39}/ehcache-107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=basicConfigurationExample] ---- <1> Retrieves the default CachingProvider implementation from the application's classpath. @@ -91,7 +91,7 @@ you can still get to the underlying Ehcache `CacheRuntimeConfiguration`: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=mutableConfigurationExample] +include::{sourcedir39}/ehcache-107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=mutableConfigurationExample] ---- <1> Create a JCache cache using the `MutableConfiguration` interface from the JCache specification. @@ -109,7 +109,7 @@ The way you do this is as follows: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheCacheManagerConfigurationExample] +include::{sourcedir39}/ehcache-107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheCacheManagerConfigurationExample] ---- <1> Cast the `CachingProvider` into the Ehcache specific implementation `org.ehcache.jsr107.EhcacheCachingProvider`, <2> Create a configuration using the specific Ehcache `DefaultConfiguration` and pass it some `CacheManager` level configurations, @@ -122,7 +122,7 @@ When using this mechanism, no JCache `CompleteConfiguration` is used and so you [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheBasedConfigurationExample] +include::{sourcedir39}/ehcache-107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheBasedConfigurationExample] ---- <1> Create an Ehcache `CacheConfiguration`. @@ -142,14 +142,14 @@ The following is an example of an XML configuration: [source%nowrap,xml,indent=0] ---- -include::{sourcedir34}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-config.xml[] +include::{sourcedir39}/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-jsr107-config.xml[] ---- Here is an example of how to access the XML configuration using JCache: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=jsr107UsingXMLConfigExample] +include::{sourcedir39}/ehcache-107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=jsr107UsingXMLConfigExample] ---- <1> Invoke `javax.cache.spi.CachingProvider.getCacheManager(java.net.URI, java.lang.ClassLoader)` @@ -179,7 +179,7 @@ You can do this at two different levels: [source%nowrap,xml,indent=0] ---- -include::{sourcedir34}/107/src/test/resources/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml[lines=17..-1] +include::{sourcedir39}/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml[lines=17..-1] ---- <1> Using the JCache service extension, you can enable MBeans by default. @@ -187,6 +187,7 @@ include::{sourcedir34}/107/src/test/resources/org/ehcache/docs/ehcache-107-mbean <3> The cache `overrideCache` will have both MBeans disabled, overriding the service configuration. <4> The cache `overrideOneCache` will have the statistics MBean disabled, whereas the management MBean will be enabled according to the service configuration. +[[supplement-jsr-107-configurations]] ==== Supplementing JCache cache configurations using Ehcache XML extensions You can also create `cache-templates`. @@ -201,7 +202,7 @@ To do this, add a `jsr107` service in your XML configuration file: [source%nowrap,xml,indent=0] ---- -include::{sourcedir34}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml[] +include::{sourcedir39}/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml[] ---- <1> First, declare a namespace for the JCache extension, e.g. `jsr107`. @@ -218,7 +219,7 @@ Using the above configuration, you can not only supplement but also override the [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=jsr107SupplementWithTemplatesExample] +include::{sourcedir39}/ehcache-107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=jsr107SupplementWithTemplatesExample] ---- <1> Assume existing JCache configuration code, which is store-by-value by default @@ -283,5 +284,5 @@ If you need _Ehcache through JCache_ behaviour, the following shows the relevant [source%nowrap,xml,indent=0] ---- -include::{sourcedir34}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-cache-through.xml[tag=cacheThroughCAS] +include::{sourcedir39}/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-jsr107-cache-through.xml[tag=cacheThroughCAS] ---- diff --git a/docs/src/docs/asciidoc/user/cache-event-listeners.adoc b/docs/src/docs/asciidoc/user/cache-event-listeners.adoc index 73317ed74a..6938d36340 100644 --- a/docs/src/docs/asciidoc/user/cache-event-listeners.adoc +++ b/docs/src/docs/asciidoc/user/cache-event-listeners.adoc @@ -1,16 +1,14 @@ --- --- = Cache Event Listeners -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] -NOTE: Clustering is not yet compatible with event listeners. - [[introduction]] == Introduction @@ -20,7 +18,7 @@ Listeners are registered at the cache level - and therefore only receive events [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cacheEventListener] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cacheEventListener] ---- <1> Create a `CacheEventListenerConfiguration` using the builder indicating the listener and the events to receive (in this case create and update events) @@ -59,7 +57,7 @@ Cache event listeners may also be added and removed while the cache is being use [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=registerListenerAtRuntime] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=registerListenerAtRuntime] ---- <1> Create a `CacheEventListener` implementation instance. @@ -75,7 +73,7 @@ Advanced users may want to tune the level of concurrency which may be used for d [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=configuringEventProcessingQueues] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=configuringEventProcessingQueues] ---- <1> Indicate the level of concurrency desired diff --git a/docs/src/docs/asciidoc/user/caching-concepts.adoc b/docs/src/docs/asciidoc/user/caching-concepts.adoc index e891eeb4f2..1cd3192ebc 100644 --- a/docs/src/docs/asciidoc/user/caching-concepts.adoc +++ b/docs/src/docs/asciidoc/user/caching-concepts.adoc @@ -1,12 +1,12 @@ --- --- = Concepts Related to Caching -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] [[data-freshness-and-expiration]] @@ -61,7 +61,29 @@ The remote server may optionally have a failover server providing improved high Since clustered storage comes with performance penalties due to such factors as network latency as well as for establishing client/server consistency, this tier, by nature, is slower than local off-heap storage. -image::EhcacheTerminology.png[] +[ditaa] +.... + +-----------------------------------+ + |cBE7 Application | + | | + | +-------------------------------+ | + | |c7DE Cache Manager | | Applications may have one or + | | | | more Cache Managers + | | +-------------------------+ | | + | | |c7AE Cache | | | A Cache Manager can manage + | | |+-----------------------+| | | many Caches + | | ||cFA4 Heap Tier || | | + | | |+-----------------------+| | | + | | || || | | Caches are configured to utilize + | | ||cFA4 Off Heap Tier || | | one or more Tiers for storing + | | || || | | cache entries + | | |+-----------------------+| | | + | | ||cA8D Disk Tier || | | Ehcache keeps the hotter data + | | |+-----------------------+| | | in faster tiers + | | +-------------------------+ | | + | +-------------------------------+ | + +-----------------------------------+ +.... == Topology Types @@ -78,7 +100,47 @@ This topology offers offers a selection of consistency options. A distributed topology is the recommended approach in a clustered or scaled-out application environment. It provides the best combination of performance, availability, and scalability. -image::ClusteredEhcacheTopology.png[] +[ditaa] +.... ++------------------------------+ +------------------------------+ +| Application | | Application | +| cBE7| | cBE7| +|+----------------------------+| |+----------------------------+| +|| Cache Manager c7DE|| || Cache Manager c7DE|| +||+--------------------------+|| ||+--------------------------+|| +||| Cache c7AE||| ||| Cache c7AE||| +|||+------------------------+||| |||+------------------------+||| +|||| Heap Tier cFA4|||| |||| Heap Tier cFA4|||| +|||+------------------------+||| |||+------------------------+||| +|||| |||| |||| |||| +|||| Off Heap Tier |||| |||| Off Heap Tier |||| +|||| cFA4|||| |||| cFA4|||| +|||+------------------------+||| |||+------------------------+||| +|||| Clustered Tier cA8D||||<--+ +-->|||| Clustered Tier cA8D|||| +|||+------------------------+||| | | |||+------------------------+||| +||+--------------------------+|| | | ||+--------------------------+|| +|+----------------------------+| | | |+----------------------------+| ++------------------------------+ | | +------------------------------+ + | | + V V + +------------------------------+ + | Terracotta Server | + | c7AE| + |+----------------------------+| + || Cache Clustered || + || Tier Manager cF55|| + |+----------------------------+| + || || + || Off Heap || + || Data Storage || + || cFA4|| + |+----------------------------+| + +------------------------------+ +.... +* Hot data is cached locally, hotter data in faster tiers +* Data cached by one application instance is available to all cluster members. +* Full data is available in the cluster. +* One or more mirror servers may be deployed to provide HA It is common for many production applications to be deployed in clusters of multiple instances for availability and scalability. However, without a distributed cache, application clusters exhibit a number of undesirable behaviors, such as: diff --git a/docs/src/docs/asciidoc/user/caching-patterns.adoc b/docs/src/docs/asciidoc/user/caching-patterns.adoc index deb0f435d1..b640647156 100644 --- a/docs/src/docs/asciidoc/user/caching-patterns.adoc +++ b/docs/src/docs/asciidoc/user/caching-patterns.adoc @@ -1,12 +1,12 @@ --- --- = Cache Usage Patterns -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] There are several common access patterns when using a cache. diff --git a/docs/src/docs/asciidoc/user/caching-terms.adoc b/docs/src/docs/asciidoc/user/caching-terms.adoc index 01ca2fe755..d1581a042a 100644 --- a/docs/src/docs/asciidoc/user/caching-terms.adoc +++ b/docs/src/docs/asciidoc/user/caching-terms.adoc @@ -1,12 +1,12 @@ --- --- = Terms Related to Caching -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] == Basic Terms @@ -18,7 +18,8 @@ computation. Data that is already in the cache can be repeatedly accessed with m resources. === Cache Entry -A cache entry consists of a key and its mapped data value within the cache. +A cache entry consists of a key and its mapped data value within the cache. This is also sometimes referred to as a +_cache mapping_. === Cache Hit When a data entry is requested from cache and the entry exists for the given key, it is referred to as a cache hit @@ -28,6 +29,18 @@ When a data entry is requested from cache and the entry exists for the given key When a data entry is requested from cache and the entry does not exist for the given key, it is referred to as a cache miss (or simply, a _miss_). +=== Cache Access +Any time a data entry is requested from cache, the cache is _accessed_ no matter if the outcome was a _hit_ or a _miss_. + +=== Hit Ratio +It is the ratio of cache hits over cache accesses, or _hits_ / _accesses_. For instance, a cache accessed 100 times +that hit 90 times has a hit ratio of: _90_ / _100_ or _0.9_ or _90%_. + +=== Miss Ratio +It is the ratio of cache misses over cache accesses, or _misses_ / _accesses_. For instance, a cache accessed 100 times +that missed 30 times has a miss ratio of: _30_ / _100_ or _0.3_ or _30%_. It is the corollary of _hit ratio_ as +_hit ratio_ + _miss ratio_ is always _1.0_ or _100%_. + === System-of-Record (SoR) The authoritative source of truth for the data. The cache acts as a local copy of data retrieved from or stored to the system-of-record (SOR). The SOR is often a traditional database, although it might be a specialized file system or some @@ -41,7 +54,22 @@ data storage capacity). The removal of entries from the cache after some amount of time has passed, typically as a strategy to avoid stale data in the cache. +=== Off-Heap +When large caches put too much pressure on the GC, a common resort is to store the caches' contents _off-heap_, i.e: +still in the memory of the JVM process but out of reach of the garbage collector. The off-heap implementation Ehcache +uses is https://github.com/Terracotta-OSS/offheap-store/[Terracotta's port] of +http://gee.cs.oswego.edu/dl/html/malloc.html[dlmalloc] to Java backed by NIO direct ``ByteBuffer``s. + === Hot Data Data that has recently been used by an application is very likely to be accessed again soon. Such data is considered -_hot_. A cache may attempt to keep the _hottest_ data most quickly available, while attemping to choose the -_least hot_ data for eviction. +_hot_. A cache may attempt to keep the _hottest_ data most quickly available, while attempting to choose the +_least hot_ data for eviction. Following the Pareto Distribution, you ideally want all your hot data to fit into your +caches. + +=== Pareto Distribution +According to https://www.statisticshowto.datasciencecentral.com/pareto-distribution/[Data Science Central], _the Pareto +distribution is a skewed distribution with heavy, or “slowly decaying” tails (i.e. much of the data is in the tails)_. +This is more commonly known as the 80% / 20% rule. +The entire concept of caching is based on the Pareto Distribution, as caches are only effective when their hit ratio +reaches a certain level, i.e.: as a general rule of thumb 80% of your transactions should be served with cached data and +the remaining 20% by data coming from other, more expensive means. diff --git a/docs/src/docs/asciidoc/user/class-loading.adoc b/docs/src/docs/asciidoc/user/class-loading.adoc index 9bf5d96214..f5a6a1b768 100644 --- a/docs/src/docs/asciidoc/user/class-loading.adoc +++ b/docs/src/docs/asciidoc/user/class-loading.adoc @@ -1,12 +1,12 @@ --- --- = Class loading -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] [[about]] diff --git a/docs/src/docs/asciidoc/user/clustered-cache.adoc b/docs/src/docs/asciidoc/user/clustered-cache.adoc index 1e200b7f66..e16f1e2217 100644 --- a/docs/src/docs/asciidoc/user/clustered-cache.adoc +++ b/docs/src/docs/asciidoc/user/clustered-cache.adoc @@ -1,19 +1,59 @@ --- --- = Clustered Cache -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] == Introduction Distributed caching allows you to harness additional benefits of horizontal scale-out, without losing on low latency offered by local on-heap tiers. -image::ClusteredEhcacheTopology.png[] +[ditaa] +.... ++------------------------------+ +------------------------------+ +| Application | | Application | +| cBE7| | cBE7| +|+----------------------------+| |+----------------------------+| +|| Cache Manager c7DE|| || Cache Manager c7DE|| +||+--------------------------+|| ||+--------------------------+|| +||| Cache c7AE||| ||| Cache c7AE||| +|||+------------------------+||| |||+------------------------+||| +|||| Heap Tier cFA4|||| |||| Heap Tier cFA4|||| +|||+------------------------+||| |||+------------------------+||| +|||| |||| |||| |||| +|||| Off Heap Tier |||| |||| Off Heap Tier |||| +|||| cFA4|||| |||| cFA4|||| +|||+------------------------+||| |||+------------------------+||| +|||| Clustered Tier cA8D||||<--+ +-->|||| Clustered Tier cA8D|||| +|||+------------------------+||| | | |||+------------------------+||| +||+--------------------------+|| | | ||+--------------------------+|| +|+----------------------------+| | | |+----------------------------+| ++------------------------------+ | | +------------------------------+ + | | + V V + +------------------------------+ + | Terracotta Server | + | c7AE| + |+----------------------------+| + || Cache Clustered || + || Tier Manager cF55|| + |+----------------------------+| + || || + || Off Heap || + || Data Storage || + || cFA4|| + |+----------------------------+| + +------------------------------+ +.... + * Hot data is cached locally, hotter data in faster tiers + * Data cached by one application instance is available to all cluster members. + * Full data is available in the cluster. + * One or more mirror servers may be deployed to provide HA To enable clustering with Terracotta, you will have to deploy a Terracotta server configured with clustered cache storage. For convenience Ehcache 3.1 introduced a downloadable kit that contains the Terracotta Server and also the required client libraries. @@ -58,42 +98,50 @@ Consequently, when resource capacity is reached and triggers eviction, the evict Here is a pictorial representation of the concepts explained above: -image::StoragePools.png[] +[ditaa] +.... ++-----------------------------------------+ +| Primary Server Resource | +| (196 GB) | +| | +| /---------------\ /---------------\ | +| | Shared Pool A | | Shared Pool B | | +| | (32 GB)cBEC| | (24 GB)cBEC| | +| \---------------/ \---------------/ | +| | +| /---------------\ /---------------\ | +| | Fixed Pool C | | Fixed Pool D | | +| | (32 GB)cA8D| | (16 GB)cA8D| | +| \---------------/ \---------------/ | +| cFAA| ++-----------------------------------------+ +| Secondary Server Resource | +| (96 GB) | +| | +| /---------------\ /---------------\ | +| | Shared Pool E | | Fixed Pool F | | +| | (28 GB)cBEC| | (12 GB)cA8D| | +| \---------------/ \---------------/ | +| cF55| ++-----------------------------------------+ +.... [[starting-server]] == Starting the Terracotta Server -You can start the Terracotta Server with the following configuration. -It contains the bare minimum configuration required for the samples in the rest of the document to work. +The snippet below defines two offheap resources named `primary-server-resource` and `secondary-server-resource` having +sizes `128MB` and `96MB` respectively: -[source,xml,indent=0] +[listing] ---- -include::{sourcedir34}/clustered/client/src/test/resources/configs/docs/tc-config.xml[] +offheap-resources=primary-server-resource:128MB,secondary-server-resource:96MB ---- -The above configuration defines two named server off-heap resources: - -<1> An off-heap resource of 128 MB size named `primary-server-resource`. -<2> Another off-heap resource named `secondary-server-resource` with 96 MB capacity. - -The rest of the document explains in detail how you can configure cache managers and caches to consume the server's off-heap resources. +This can either be defined in config properties file or during server startup. Assuming that you have the clustered Ehcache kit available locally, start with extracting the *ehcache-clustered* kit. -Change to your extracted directory and then execute the *start-tc-server* script as below to start the Terracotta server with the above configuration: - -On Windows: -[source,cmd] ----- -cd /server/bin -start-tc-server.bat -f /tc-config.xml ----- - -On Unix/Mac: -[source,bash] ----- -cd /server/bin -./start-tc-server.sh -f /tc-config.xml ----- +Change to your extracted directory and then execute the *start-tc-server* script located under `$KIT_DIR/server/bin` to start the Terracotta server. +You will then need to activate the cluster using `activate` command of config tool which is located under `$KIT_DIR/tools/bin`. NOTE: You will need to have `JAVA_HOME` point to a Java 8 installation while starting the Terracotta server. @@ -109,13 +157,13 @@ Here is a code sample that shows how to configure a cache manager with clusterin [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerExample] +include::{sourcedir39}/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerExample] ---- <1> Returns the `org.ehcache.config.builders.CacheManagerBuilder` instance. <2> Use the `ClusteringServiceConfigurationBuilder` static method `.cluster(URI)` for connecting the cache manager to the clustered storage at the `URI` specified that returns the clustering service configuration builder instance. Sample `URI` provided in the example is pointing to the clustered storage instance named "my-application" on the Terracotta server (assuming the server is running on localhost and port 9410). -<3> Auto-create the clustered storage if it doesn't already exist. +<3> Auto-create the clustered storage if it doesn't already exist. We also allow auto-create on reconnection since the cluster is not persistent. <4> Returns a fully initialized cache manager that can be used to create clustered caches. <5> Close the cache manager. @@ -126,7 +174,7 @@ This code sample demonstrates the usage of the concepts explained in the previou [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerWithServerSideConfigExample] +include::{sourcedir39}/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerWithServerSideConfigExample] ---- <1> `defaultServerResource(String)` on `ClusteringServiceConfigurationBuilder` instance sets the default server off-heap resource for the cache manager. @@ -143,6 +191,9 @@ In this example, a dedicated pool of 32MB is allocated for clustered-cache from <7> Configures another cache (`shared-cache-2`) that shares the resource pool (`resource-pool-a`) with `shared-cache-1`. <8> Creates fully initialized cache manager with the clustered caches. +NOTE: When a cache is allocated a block of memory from a shared pool, it is retained forever and would never get +reallocated to another cache sharing the pool. + [[cluster-tier-manager-lifecycle]] == Ehcache Cluster Tier Manager Lifecycle @@ -150,15 +201,16 @@ When configuring a cache manager to connect to a cluster tier manager there are [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerLifecycle] +include::{sourcedir39}/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerLifecycle] ---- - -<1> In auto-create mode if no cluster tier manager exists then one is created with the supplied configuration. +<1> In auto create mode if no cluster tier manager exists then one is created with the supplied configuration. If it exists and its configuration matches the supplied configuration then a connection is established. If the supplied configuration does not match then the cache manager will fail to initialize. -<2> In expected mode if a cluster tier manager exists and its configuration matches the supplied configuration then a connection is established. +<2> In auto create on reconnect mode we additionally support auto creation of any necessary entities when reconnecting to a cluster. +This behavior is useful in an non-persistent cluster in case the cluster loses its state due to a restart (planned or accidental). +<3> In expected mode if a cluster tier manager exists and its configuration matches the supplied configuration then a connection is established. If the supplied configuration does not match or the cluster tier manager does not exist then the cache manager will fail to initialize. -<3> In config-less mode if a cluster tier manager exists then a connection is established without regard to its configuration. +<4> In config-less mode if a cluster tier manager exists then a connection is established without regard to its configuration. If it does not exist then the cache manager will fail to initialize. [[clustered-cache]] @@ -168,7 +220,7 @@ If it does not exist then the cache manager will fail to initialize. [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheTieredExample] +include::{sourcedir39}/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheTieredExample] ---- <1> Configuring the heap tier for cache. @@ -178,7 +230,7 @@ The equivalent XML configuration is as follows: [source%nowrap,xml,indent=0] ---- -include::{sourcedir34}/clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml[tag=tieringSample] +include::{sourcedir39}/clustered/ehcache-client/src/test/resources/configs/docs/ehcache-clustered.xml[tag=tieringSample] ---- <1> Specify the heap tier for cache. @@ -201,7 +253,7 @@ This comes with a latency penalty on the write operation required to give this g [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheConsistency] +include::{sourcedir39}/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheConsistency] ---- <1> Specify the consistency level through the use of additional service configuration, using _strong_ consistency here. @@ -211,7 +263,7 @@ The equivalent XML configuration is as follows: [source%nowrap,xml,indent=0] ---- -include::{sourcedir34}/clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml[tag=consistencySample] +include::{sourcedir39}/clustered/ehcache-client/src/test/resources/configs/docs/ehcache-clustered.xml[tag=consistencySample] ---- <1> Specify the consistency level through a custom service configuration from the `clustered` namespace. @@ -241,12 +293,12 @@ The example code below shows how this can be implemented. [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=unspecifiedClusteredCacheExample] +include::{sourcedir39}/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=unspecifiedClusteredCacheExample] ---- -<1> Configure the first cache manager with auto create +<1> Configure the first cache manager with auto create on reconnect <2> Build a cache configuration for a clustered `dedicated` resource pool <3> Create cache `my-dedicated-cache` using the cache configuration -<4> Configure the second cache manager as _expecting_ (auto create off) +<4> Configure the second cache manager as _expecting_ <5> Build a cache configuration for a clustered _unspecified_ resource pool, which will use the previously configured clustered _dedicated_ resource pool. <6> Create cache with the same name `my-dedicated-cache` and use the clustered _unspecified_ cache configuration diff --git a/docs/src/docs/asciidoc/user/common.adoc b/docs/src/docs/asciidoc/user/common.adoc index bd4e6fb781..54d2e69a7b 100644 --- a/docs/src/docs/asciidoc/user/common.adoc +++ b/docs/src/docs/asciidoc/user/common.adoc @@ -1,6 +1,7 @@ --- --- -ifndef::sourcedir34[] +ifndef::sourcedir39[] +:version: 3.9 :notBuildingForSite: true ifdef::basebackend-html[:outfilesuffix: .html] :source-highlighter: coderay @@ -10,9 +11,9 @@ ifdef::basebackend-html[:outfilesuffix: .html] :icons: font :iconfont-remote!: :iconfont-name: font-awesome.min -:sourcedir34: ../../../../../ +:sourcedir39: {gradle-rootdir} :imagesdir: images :sectanchors: :idprefix: :idseparator: - -endif::sourcedir34[] +endif::sourcedir39[] diff --git a/docs/src/docs/asciidoc/user/config-derive.adoc b/docs/src/docs/asciidoc/user/config-derive.adoc new file mode 100644 index 0000000000..dd4f35e94a --- /dev/null +++ b/docs/src/docs/asciidoc/user/config-derive.adoc @@ -0,0 +1,302 @@ +--- +--- += Configuration Derivation +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] + +ifdef::notBuildingForSite[] +include::{includedir}/menu.adoc[] +endif::notBuildingForSite[] + +== Principles + +The configuration derivation features allows a new Ehcache configuration object to be derived via a transformation on +an existing configuration object. This can be useful for: + + * pre-processing an externally sourced configuration, adding additional settings before creating the cache manager. + * processing the configuration of an existing cache manager to generate a new configuration. + +The basis of the configuration derivation API is the `Configuration.derive()` method that generates a builder seeded +with the configurations values + +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/ConfigurationDerivation.java[tag=deriveContract] +---- +<1> Creates a builder seeded with the configuration's state. +<2> Configurations built using the builder are then functionally identical to the original configuration. + +== Core Configuration Changes + +The configuration builder returned by the derive method provide direct methods for modifying core configuration concepts: + +.setting a custom classloader: +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/ConfigurationDerivation.java[tag=customClassLoader] +---- + +.adding a cache: +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/ConfigurationDerivation.java[tag=withCache] +---- +[cols=".^~a,^.^~d,.^~a"] +|=== +|[source,xml] +---- + + +---- +|→ +|[source,xml] +---- + + + Long.class + Object.class + 10 + + +---- +|=== + +.removing a cache: +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/ConfigurationDerivation.java[tag=withoutCache] +---- +[cols=".^~a,^.^~d,.^~a"] +|=== +|[source,xml] +---- + + + Long.class + Object.class + 10 + + +---- +|→ +|[source,xml] +---- + + +---- +|=== + +Updating a cache configuration uses a `UnaryOperator` that is run against a cache configuration +builder seeded using the existing cache configuration. + +.updating a cache, by adding a resource: +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/ConfigurationDerivation.java[tag=updateCache] +---- +[cols=".^~a,^.^~d,.^~a"] +|=== +|[source,xml] +---- + + + Long.class + Object.class + 10 + + +---- +|→ +|[source,xml] +---- + + + Long.class + Object.class + + 10 + 100 + + + +---- +|=== + +== Extended Configuration Changes + +Ehcache is a pluggable system, so modifying many of the more complex configurations requires modifying both service +creation configurations and service configurations: + +.adding a service creation configuration (constraining the default thread pool) +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/ConfigurationDerivation.java[tag=withServiceCreation] +---- +[cols=".^~a,^.^~d,.^~a"] +|=== +|[source,xml] +---- + + +---- +|→ +|[source,xml] +---- + + + + + +---- +|=== + +.updating a service creation configuration (changing the persistence path) +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/ConfigurationDerivation.java[tag=updateServiceCreation] +---- +[cols=".^~a,^.^~d,.^~a"] +|=== +|[source,xml] +---- + + + +---- +|→ +|[source,xml] +---- + + + +---- +|=== + +.adding a service configuration (setting a resilience strategy) +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/ConfigurationDerivation.java[tag=withService] +---- +[cols=".^~a,^.^~d,.^~a"] +|=== +|[source,xml] +---- + + + Long.class + Object.class + 10 + + +---- +|→ +|[source,xml] +---- + + + Long.class + Object.class + 10 + + com.example.ThrowingResilienceStrategy + + + +---- +|=== + +.updating a service configuration (changing a clustered cache's consistency) +[source,java,indent=0] +---- +include::{sourcedir39}/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/ConfigurationDerivation.java[tag=updateService] +---- +[cols=".^~a,^.^~d,.^~a"] +|=== +|[source,xml] +---- + + + + + + + + + + 50 + + + + +---- +|→ +|[source,xml] +---- + + + + + + + + + + 50 + + + + +---- +|=== + +=== Removing a service +Removing a service often involves removing both service creation and a service configuration instances since a service +instance its configuration are usually strongly coupled: + +.removing a service (making a cache manager non-clustered) +[source,java,indent=0] +---- +include::{sourcedir39}/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/ConfigurationDerivation.java[tag=removeService] +---- +<1> From all cache configurations... +<2> remove any existing `ClusteredStoreConfiguration` instances. +<3> Create a new resource pool builder... +<4> From the existing resource pools... +<5> filter out any clustered resources. +<6> Add all remaining pools to the new resource pools instance +<7> Finally remove the clustering service creation configuration + +[cols=".^~a,^.^~d,.^~a"] +|=== +|[source,xml] +---- + + + + + + + + + + 100 + 50 + + + + +---- +|→ +|[source,xml] +---- + + + + 100 + + + +---- +|=== diff --git a/docs/src/docs/asciidoc/user/eviction-advisor.adoc b/docs/src/docs/asciidoc/user/eviction-advisor.adoc index 4f76b0816b..e87a8021bf 100644 --- a/docs/src/docs/asciidoc/user/eviction-advisor.adoc +++ b/docs/src/docs/asciidoc/user/eviction-advisor.adoc @@ -1,12 +1,12 @@ --- --- = Eviction Advisor -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] == Introduction @@ -29,7 +29,7 @@ If the eviction is advised against, Ehcache will try to honor the preference of [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cacheEvictionAdvisor] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cacheEvictionAdvisor] ---- <1> Configure a constrained heap, as the eviction advisor is only relevant when mappings get evicted from the cache. diff --git a/docs/src/docs/asciidoc/user/examples.adoc b/docs/src/docs/asciidoc/user/examples.adoc index fa97e86f0e..ad9199d418 100644 --- a/docs/src/docs/asciidoc/user/examples.adoc +++ b/docs/src/docs/asciidoc/user/examples.adoc @@ -1,12 +1,12 @@ --- --- = Examples -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] == Peeper - a simple message board @@ -22,7 +22,7 @@ is not running. While running, information about the operation of the Peeper ap While the sample application may be run, the application is _very_ simplistic -- the code implementing the sample is the interesting bit. Running the sample application requires the -use of https://gradle.org/[Gradle]. This sample may be accessed from GitHub by _cloning_ +use of https://gradle.org/[Gradle]. This sample may be accessed from GitHub by _cloning_ the Ehcache 3 git repository: [source,bash] @@ -35,7 +35,7 @@ git clone https://github.com/ehcache/ehcache3.git The first sample, located in +demos/00-NoCache+, is a base Peeper application that does *not* use caching. Each peep is stored in the database and all peeps are read -from the database to display the Peeper web page. To run this implementation: +from the database to display the Peeper web page. To run this implementation: [source,bash] ---- @@ -44,8 +44,7 @@ cd ehcache3/demos/00-NoCache ---- This builds the necessary components, starts a http://eclipse.org/jetty/[Jetty] web service, -and displays the URL of the web server on the console. The URL will be something like -+http://localhost:8080/ehcache-demos/00-NoCache/+. +and displays the URL of the web server on the console. The URL should be +http://localhost:8080/+. While running, lines like the following are displayed to the console: [source] @@ -79,8 +78,7 @@ cd ehcache3/demos/01-CacheAside ---- This builds the necessary components, starts a http://eclipse.org/jetty/[Jetty] web service, -and displays the URL of the web server on the console. The URL will be something like -+http://localhost:8080/ehcache-demos/01-CacheAside/+. +and displays the URL of the web server on the console. The URL, again, should be +http://localhost:8080/+. While running, lines like the following are displayed to the console: [source] @@ -104,5 +102,5 @@ Note the presence of the +Filling cache with peeps+, +Clearing peeps cache+, and [source,xml,indent=0] ---- -include::{sourcedir34}/107/src/test/resources/ehcache-example.xml[] +include::{sourcedir39}/ehcache-107/src/test/resources/ehcache-example.xml[] ---- diff --git a/docs/src/docs/asciidoc/user/expiry.adoc b/docs/src/docs/asciidoc/user/expiry.adoc index 7438d03764..5eae004e1a 100644 --- a/docs/src/docs/asciidoc/user/expiry.adoc +++ b/docs/src/docs/asciidoc/user/expiry.adoc @@ -1,18 +1,18 @@ --- --- = Expiry -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] == Introduction As conveyed in the <> documentation, this is one of the key aspects of caching. -In Ehcache 3 this is addressed with the `Expiry` interface and its use in controlling the age of cache mappings. +In Ehcache 3 this is addressed with the `ExpiryPolicy` interface and its use in controlling the age of cache mappings. == Configuration @@ -20,15 +20,15 @@ Expiry is configured at the cache level, in Java or in XML: [source,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=expiry] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=expiry] ---- -<1> Expiry is configured at the cache level, so start by defining a cache configuration, -<2> then add to it an `Expiry`, here using the predefined _time-to-live_ one, configured with the required `Duration`. +<1> Expiry policy is configured at the cache level, so start by defining a cache configuration, +<2> then add to it an `ExpiryPolicy`, here using the predefined _time-to-live_ one, configured with the required `Duration`. [source,xml,indent=0] ---- -include::{sourcedir34}/xml/src/test/resources/configs/docs/expiry.xml[tags=expiry] +include::{sourcedir39}/ehcache-xml/src/test/resources/configs/docs/expiry.xml[tags=expiry] ---- <1> At the cache level, using the predefined _time-to-live_ again. @@ -40,17 +40,17 @@ no expiry:: this means cache mappings will never expire, time-to-live:: this means cache mappings will expire after a fixed duration following their creation, time-to-idle:: this means cache mappings will expire after a fixed duration following the time they were last accessed. -For Java, see `org.ehcache.expiry.Expirations` and the XSD for XML. +For Java, see `org.ehcache.config.builders.ExpiryPolicyBuilder` and the XSD for XML. Read on to implement your own expiration scheme. == Custom expiry -Supporting your own expiration scheme simply means implementing the `Expiry` interface: +Supporting your own expiration scheme simply means implementing the `ExpiryPolicy` interface: [source,java,indent=0] ---- -include::{sourcedir34}/api/src/main/java/org/ehcache/expiry/Expiry.java[lines=21..-1] +include::{sourcedir39}/ehcache-api/src/main/java/org/ehcache/expiry/ExpiryPolicy.java[lines=21..-1] ---- The main points to remember on the return value from these methods: @@ -65,13 +65,13 @@ Note that you can access the details of the mapping, thus providing expiration t Also when used from XML, Ehcache expects your expiry implementation to have a _no-arg_ constructor. -Once you have implemented your own expiry, simply configure it. +Once you have implemented your own expiry policy, simply configure it. In Java: [source,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=customExpiry] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=customExpiry] ---- <1> Simply pass your custom expiry instance into the cache builder. @@ -80,7 +80,7 @@ In XML: [source,xml,indent=0] ---- -include::{sourcedir34}/xml/src/test/resources/configs/docs/expiry.xml[tags=customExpiry] +include::{sourcedir39}/ehcache-xml/src/test/resources/configs/docs/expiry.xml[tags=customExpiry] ---- <1> Simply pass the fully qualified class name of your custom expiry. diff --git a/docs/src/docs/asciidoc/user/getting-started.adoc b/docs/src/docs/asciidoc/user/getting-started.adoc index f5fed08de3..6721782379 100644 --- a/docs/src/docs/asciidoc/user/getting-started.adoc +++ b/docs/src/docs/asciidoc/user/getting-started.adoc @@ -1,13 +1,11 @@ ---- ---- -= Ehcache 3.4 Documentation -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] += Ehcache 3.9 Documentation +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] We feel that the Ehcache 3.x API is a great improvement over the Ehcache 2.x API that has been used by millions of developers. We hope you enjoy this new generation of Ehcache! ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] == Configuring Ehcache @@ -27,7 +25,7 @@ As with the previous versions of Ehcache, the canonical way of dealing with `Cac [source,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cachemanagerExample] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cachemanagerExample] ---- <1> The static method `org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder` returns a new `org.ehcache.config.builders.CacheManagerBuilder` instance. @@ -62,7 +60,7 @@ Here is a shorter version featuring 3 important things: [source,java,indent=0] ---- -include::{sourcedir34}/integration-test/src/test/java/org/ehcache/docs/GettingStartedWithStaticImports.java[tag=java7Example] +include::{sourcedir39}/integration-test/src/test/java/org/ehcache/docs/GettingStartedWithStaticImports.java[tag=java7Example] ---- <1> A `CacheManager` implements `Closeable` so can be closed automatically by a try-with-resources. A `CacheManager` must be closed cleanly. In a `finally` block, with a `try-with-resources` or (more frequent for normal applications) in some shutdown hook. @@ -76,7 +74,7 @@ You can create an XML file to configure a `CacheManager`. [source,xml,indent=0] ---- -include::{sourcedir34}/xml/src/test/resources/configs/docs/getting-started.xml[tags=gettingStarted] +include::{sourcedir39}/ehcache-xml/src/test/resources/configs/docs/getting-started.xml[tags=gettingStarted] ---- <1> Declares a `Cache` aliased to `foo`. @@ -110,7 +108,7 @@ In addition, for creating the cache manager with clustering support, you will ne [source,java,indent=0] ---- -include::{sourcedir34}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerExample] +include::{sourcedir39}/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerExample] ---- <1> Returns the `org.ehcache.config.builders.CacheManagerBuilder` instance; @@ -139,7 +137,7 @@ A classical example would be using 3 tiers with a persistent disk storage. [source,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=threeTiersCacheManager] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=threeTiersCacheManager] ---- <1> If you wish to use disk storage (like for persistent `Cache` instances), you'll have to provide a @@ -158,7 +156,7 @@ The following illustrates how to configure a _time-to-live_ expiry. [source,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=expiry] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=expiry] ---- <1> Expiry is configured at the cache level, so start by defining a cache configuration, diff --git a/docs/src/docs/asciidoc/user/images/ClusteredEhcacheTopology.png b/docs/src/docs/asciidoc/user/images/ClusteredEhcacheTopology.png deleted file mode 100644 index 4bcdb053c2..0000000000 Binary files a/docs/src/docs/asciidoc/user/images/ClusteredEhcacheTopology.png and /dev/null differ diff --git a/docs/src/docs/asciidoc/user/images/EhcacheTerminology.png b/docs/src/docs/asciidoc/user/images/EhcacheTerminology.png deleted file mode 100644 index 2b8d5e1829..0000000000 Binary files a/docs/src/docs/asciidoc/user/images/EhcacheTerminology.png and /dev/null differ diff --git a/docs/src/docs/asciidoc/user/images/Get.png b/docs/src/docs/asciidoc/user/images/Get.png deleted file mode 100644 index 50d7082f20..0000000000 Binary files a/docs/src/docs/asciidoc/user/images/Get.png and /dev/null differ diff --git a/docs/src/docs/asciidoc/user/images/Put.png b/docs/src/docs/asciidoc/user/images/Put.png deleted file mode 100644 index 37b9552222..0000000000 Binary files a/docs/src/docs/asciidoc/user/images/Put.png and /dev/null differ diff --git a/docs/src/docs/asciidoc/user/images/StoragePools.png b/docs/src/docs/asciidoc/user/images/StoragePools.png deleted file mode 100644 index 66df0bbf3b..0000000000 Binary files a/docs/src/docs/asciidoc/user/images/StoragePools.png and /dev/null differ diff --git a/docs/src/docs/asciidoc/user/images/TiersHierarchy.png b/docs/src/docs/asciidoc/user/images/TiersHierarchy.png deleted file mode 100644 index bb3e7be448..0000000000 Binary files a/docs/src/docs/asciidoc/user/images/TiersHierarchy.png and /dev/null differ diff --git a/docs/src/docs/asciidoc/user/index.adoc b/docs/src/docs/asciidoc/user/index.adoc index 93cb8d99ba..62f1869de4 100644 --- a/docs/src/docs/asciidoc/user/index.adoc +++ b/docs/src/docs/asciidoc/user/index.adoc @@ -1,17 +1,15 @@ ---- ---- -= Ehcache 3.4 Documentation Overview -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] += Ehcache {version} Documentation Overview +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] == Table of Contents -The Table of Contents provides an overview of the Ehcache 3.4 documentation on this site. +The Table of Contents provides an overview of the Ehcache 3.9 documentation on this site. Each topic below corresponds to a menu item at the left. === Basic Topics @@ -19,13 +17,13 @@ Each topic below corresponds to a menu item at the left. |=== | Topic | Description -|link:getting-started.html[Getting Started]|Learn the essentials -|link:tiering.html[Tiering options]|Using resources other than heap memory, combining them or not -|link:examples.html[Examples]|Examples of using Ehcache APIs -|link:xml.html[XML Configuration]|How to configure Ehcache using XML -|link:107.html[JCache aka JSR-107]|Using Ehcache as a JCache (javax.cache) aka JSR-107 provider -|link:xsds.html[Configuration XSD]|Reference XSD for configuration -|link:clustered-cache.html[Clustering with Terracotta]|Using Terracotta to enable clustering of caches in Ehcache +|link:getting-started{outfilesuffix}[Getting Started]|Learn the essentials +|link:tiering{outfilesuffix}[Tiering options]|Using resources other than heap memory, combining them or not +|link:examples{outfilesuffix}[Examples]|Examples of using Ehcache APIs +|link:xml{outfilesuffix}[XML Configuration]|How to configure Ehcache using XML +|link:107{outfilesuffix}[JCache aka JSR-107]|Using Ehcache as a JCache (javax.cache) aka JSR-107 provider +|link:xsds{outfilesuffix}[Configuration XSD]|Reference XSD for configuration +|link:clustered-cache{outfilesuffix}[Clustering with Terracotta]|Using Terracotta to enable clustering of caches in Ehcache |=== === General Topics @@ -33,9 +31,9 @@ Each topic below corresponds to a menu item at the left. |=== | Topic | Description -|link:caching-terms.html[Caching Terms]|Learn terms that relate to caching -|link:caching-concepts.html[Caching Concepts]|Learn concepts that relate to caching -|link:caching-patterns.html[Caching Patterns]|Learn common patterns of cache usage +|link:caching-terms{outfilesuffix}[Caching Terms]|Learn terms that relate to caching +|link:caching-concepts{outfilesuffix}[Caching Concepts]|Learn concepts that relate to caching +|link:caching-patterns{outfilesuffix}[Caching Patterns]|Learn common patterns of cache usage |=== @@ -44,14 +42,18 @@ Each topic below corresponds to a menu item at the left. |=== | Topic | Description -|link:migration-guide.html[Migration Guide]|Ehcache2 to Ehcache3 Migration Guide -|link:expiry.html[Expiry]|Data freshness and expiration -|link:thread-pools.html[Thread Pools]|Using and controlling threads in Ehcache 3 -|link:serializers-copiers.html[Serializers and Copiers]|Understanding and configuring serializers and copiers -|link:xa.html[Transactions Support]|Using Ehcache with a JTA transaction manager -|link:writers.html[Cache Writers]|Using cache writers (write-through and write-behind) -|link:usermanaged.html[User Managed Caches]|Creating and making use of caches directly -|link:cache-event-listeners.html[Cache Event Listeners]|Getting notified about events within the cache -|link:eviction-advisor.html[Eviction Advisor]|Affecting the way entries are chosen for eviction -|link:class-loading.html[Class loading]|Ehcache and `ClassLoader` interactions +|link:migration-guide{outfilesuffix}[Migration Guide]|Ehcache2 to Ehcache3 Migration Guide +|link:expiry{outfilesuffix}[Expiry]|Data freshness and expiration +|link:resilience{outfilesuffix}[Resilience]|Cache resilience in case of failures +|link:thread-pools{outfilesuffix}[Thread Pools]|Using and controlling threads in Ehcache 3 +|link:serializers-copiers{outfilesuffix}[Serializers and Copiers]|Understanding and configuring serializers and copiers +|link:xa{outfilesuffix}[Transactions Support]|Using Ehcache with a JTA transaction manager +|link:writers{outfilesuffix}[Cache Writers]|Using cache writers (write-through and write-behind) +|link:usermanaged{outfilesuffix}[User Managed Caches]|Creating and making use of caches directly +|link:cache-event-listeners{outfilesuffix}[Cache Event Listeners]|Getting notified about events within the cache +|link:eviction-advisor{outfilesuffix}[Eviction Advisor]|Affecting the way entries are chosen for eviction +|link:class-loading{outfilesuffix}[Class loading]|Ehcache and `ClassLoader` interactions +|link:osgi{outfilesuffix}[OSGi Deployment]|How to use Ehcache in an OSGi Environment +|link:config-derive{outfilesuffix}[Configuration Derivation]|How to derive a new configuration from an existing one +|link:performance{outfilesuffix}[Performance Tuning]|Ehcache Performance Tuning |=== diff --git a/docs/src/docs/asciidoc/user/management.adoc b/docs/src/docs/asciidoc/user/management.adoc index c2fca5967d..14b2c33069 100644 --- a/docs/src/docs/asciidoc/user/management.adoc +++ b/docs/src/docs/asciidoc/user/management.adoc @@ -1,12 +1,12 @@ --- --- = Management and Monitoring -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] == Intro @@ -30,7 +30,7 @@ cache manager builder as a service: [source,java,indent=0] ---- -include::{sourcedir34}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=usingManagementRegistry] +include::{sourcedir39}/ehcache-management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=usingManagementRegistry] ---- <1> Optional: give a name to your cache manager by using a custom configuration <2> Create an instance of `org.ehcache.management.registry.DefaultManagementRegistryService`. This is only required because the service is used below. @@ -50,7 +50,7 @@ and a cache name to uniquely identify the cache on which you want to query stats [source,java,indent=0] ---- -include::{sourcedir34}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=capabilitiesAndContexts] +include::{sourcedir39}/ehcache-management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=capabilitiesAndContexts] ---- <1> Query the `ManagementRegistry` for the registered managed objects' capabilities. <2> Each capability has a unique name you will need to refer to it. @@ -74,7 +74,7 @@ a managed object. Examples of actions could be: clear caches, get their configur [source,java,indent=0] ---- -include::{sourcedir34}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=actionCall] +include::{sourcedir39}/ehcache-management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=actionCall] ---- <1> Put something in a cache. <2> Call the 'clear' action on the managed cache. Refer to the descriptors of the provider to get the exact list of @@ -92,7 +92,7 @@ manager by default, but sometimes you may want one `ManagementRegistry` to manag [source,java,indent=0] ---- -include::{sourcedir34}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=managingMultipleCacheManagers] +include::{sourcedir39}/ehcache-management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=managingMultipleCacheManagers] ---- <1> Create an instance of `org.ehcache.management.SharedManagementService` <2> Pass it as a service to the first cache manager diff --git a/docs/src/docs/asciidoc/user/menu.adoc b/docs/src/docs/asciidoc/user/menu.adoc index c4d1c74b4c..b6623d5837 100644 --- a/docs/src/docs/asciidoc/user/menu.adoc +++ b/docs/src/docs/asciidoc/user/menu.adoc @@ -17,6 +17,7 @@ General topics:: Advanced topics:: - link:./migration-guide{outfilesuffix}[Migration Guide] - link:./expiry{outfilesuffix}[Expiry] +- link:./resilience{outfilesuffix}[Resilience] - link:./thread-pools{outfilesuffix}[Thread Pools] - link:./serializers-copiers{outfilesuffix}[Serializers and Copiers] - link:./xa{outfilesuffix}[Transactions support] @@ -25,6 +26,9 @@ Advanced topics:: - link:./cache-event-listeners{outfilesuffix}[Cache Event Listeners] - link:./eviction-advisor{outfilesuffix}[Eviction Advisor] - link:./class-loading{outfilesuffix}[Class loading in Ehcache] +- link:./osgi{outfilesuffix}[OSGi Deployment] +- link:./config-derive{outfilesuffix}[Configuration Derivation] +- link:./performance{outfilesuffix}[Performance Tuning] Not published:: - link:./management{outfilesuffix}[Management and Monitoring] diff --git a/docs/src/docs/asciidoc/user/migration-guide.adoc b/docs/src/docs/asciidoc/user/migration-guide.adoc index f6d2ccdd74..324684b5a3 100644 --- a/docs/src/docs/asciidoc/user/migration-guide.adoc +++ b/docs/src/docs/asciidoc/user/migration-guide.adoc @@ -1,12 +1,12 @@ --- --- = Migration Guide -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] == Introduction @@ -25,9 +25,32 @@ Here we are creating a cache manager that has a default time-to-live (TTL) expir Before adding, we verify the expiry and set it on the `Element` only when different than the `Cache` expiry. -[source%nowrap,java,indent=0] +[source,java] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/Ehcache2.java[tag=CustomExpiryEhcache2] +int defaultCacheTTLInSeconds = 20; + +CacheManager cacheManager = initCacheManager(); +CacheConfiguration cacheConfiguration = new CacheConfiguration().name("cache") + .maxEntriesLocalHeap(100) + .timeToLiveSeconds(defaultCacheTTLInSeconds); // <1> +cacheManager.addCache(new Cache(cacheConfiguration)); + +Element element = new Element(10L, "Hello"); + +int ttlInSeconds = getTimeToLiveInSeconds((Long)element.getObjectKey(), (String)element.getObjectValue()); // <2> + +if (ttlInSeconds != defaultCacheTTLInSeconds) { // <3> + element.setTimeToLive(ttlInSeconds); +} + +cacheManager.getCache("cache").put(element); + +System.out.println(cacheManager.getCache("cache").get(10L).getObjectValue()); + +sleep(2100); // <4> + +// Now the returned element should be null, as the mapping is expired. +System.out.println(cacheManager.getCache("cache").get(10L)); ---- <1> Expiry duration defined at the cache level. @@ -42,7 +65,7 @@ having dedicated logic in the methods called during the lifecycle of added and u [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/Ehcache3.java[tag=CustomExpiryEhcache3] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/Ehcache3.java[tag=CustomExpiryEhcache3] ---- <1> Defining custom expiry to be called during the lifecycle of added mappings. diff --git a/docs/src/docs/asciidoc/user/osgi.adoc b/docs/src/docs/asciidoc/user/osgi.adoc new file mode 100644 index 0000000000..f80b3a8a4b --- /dev/null +++ b/docs/src/docs/asciidoc/user/osgi.adoc @@ -0,0 +1,97 @@ +--- +--- += OSGi Deployment +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] + +ifdef::notBuildingForSite[] +include::{includedir}/menu.adoc[] +endif::notBuildingForSite[] + +== OSGi Compatible Bundles + +The following Ehcache artifacts are also valid OSGi bundles: + +* `org.ehcache:ehcache` ++ +includes: `api`, `core`, `impl`, `xml`, `107` + +* `org.ehcache:ehcache-clustered` +* `org.ehcache:ehcache-transactions` +* `org.ehcache.modules:ehcache-api` +* `org.ehcache.modules:ehcache-core` +* `org.ehcache.modules:ehcache-impl` +* `org.ehcache.modules:ehcache-xml` +* `org.ehcache.modules:ehcache-107` + +== Ehcache Service Lookup & OSGi + +To allow for the extension of its feature set and to support the internal modularity of the source code, Ehcache uses +a `java.util.ServiceLoader` based lookup system to discover the set of available functionalities at runtime. When +deployed as bundles in an OSGi environment this lookup mechanism is replaced by a lookup mechanism based on OSGi +components. Activation of the bundle containing the Ehcache core code will result in the following logging +[source,log] +---- +org.ehcache[org.ehcache.core.osgi.EhcacheActivator] : Detected OSGi Environment (core is in bundle: org.ehcache [13]): Using OSGi Based Service Loading +---- +In this mode, to enable transactional and/or clustered caching it is sufficient to just provision the +`org.ehcache:ehcache-transactions` and/or `org.ehcache:ehcache-clustered` bundles alongside the `org.ehcache:ehcache` +main bundle + +When in this mode of operation it is also possible to provision the Ehcache modules as independent bundles. A minimal +Ehcache configuration will need: + +* `org.ehcache.modules:ehcache-api` +* `org.ehcache.modules:ehcache-core` +* `org.ehcache.modules:ehcache-impl` + +Additional features can then be added by including one or more of: + +* `org.ehcache.modules:ehcache-xml` +* `org.ehcache.modules:ehcache-107` +* `org.ehcache:ehcache-clustered` +* `org.ehcache:ehcache-transactions` + +=== Reverting to JDK Service Lookup + +If the `org.ehcache.core.osgi` property is set to `"false"` as either a framework or system property then Ehcache will +fall back to the JDK based service lookup mechanism. This will result in the following log line: +[source,log] +---- +org.ehcache[org.ehcache.core.osgi.EhcacheActivator] : Detected OSGi Environment (core is in bundle: org.ehcache [13]): OSGi Based Service Loading Disabled Via System/Framework Property - Extensions Outside This Bundle Will Not Be Detected +---- +Enabling debug logging will show the detected set of services: +[source,log] +---- +org.ehcache[org.ehcache.core.osgi.EhcacheActivator] : JDK Service Loading Sees: + org.ehcache.impl.internal.store.heap.OnHeapStore$Provider + org.ehcache.impl.internal.store.offheap.OffHeapStore$Provider + org.ehcache.impl.internal.store.disk.OffHeapDiskStore$Provider + org.ehcache.impl.internal.store.tiering.TieredStore$Provider + org.ehcache.impl.internal.store.tiering.CompoundCachingTier$Provider + org.ehcache.core.spi.time.TimeSourceService + org.ehcache.spi.serialization.SerializationProvider + org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider + org.ehcache.core.events.CacheEventListenerProvider + org.ehcache.core.spi.service.ExecutionService + org.ehcache.core.spi.service.LocalPersistenceService + org.ehcache.impl.persistence.DefaultDiskResourceService + org.ehcache.spi.loaderwriter.WriteBehindProvider + org.ehcache.impl.internal.events.CacheEventDispatcherFactoryImpl + org.ehcache.spi.copy.CopyProvider + org.ehcache.core.spi.store.heap.SizeOfEngineProvider + org.ehcache.core.spi.service.StatisticsService + org.ehcache.spi.resilience.ResilienceStrategyProvider +---- + +In this configuration only features in the bundle with the Ehcache core classes are available. Using this service lookup +mechanism only the `org.ehcache:ehcache` (bundle symbolic name: `org.ehcache`) bundle can be successfully deployed. Use +of this bundle provides for most of the regular Ehcache features, but *does not support transactional or clustered +caching*. + +In order to use transactional or clustered caches in this environment the user must create their own bundle by merging +the `org.ehcache:ehcache` bundle with the `org.ehcache:ehcache-clustered` and/or `org.ehcache:ehcache-transactions` +bundles. Care must be taken when creating the custom bundle that all of the `META-INF/services` files are correctly +merged to allow for correct service discovery. + diff --git a/docs/src/docs/asciidoc/user/performance.adoc b/docs/src/docs/asciidoc/user/performance.adoc new file mode 100644 index 0000000000..f92b36d0ea --- /dev/null +++ b/docs/src/docs/asciidoc/user/performance.adoc @@ -0,0 +1,144 @@ +--- +--- += Performance Tuning +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] + +ifdef::notBuildingForSite[] +include::{includedir}/menu.adoc[] +endif::notBuildingForSite[] + +== Introduction + +Ehcache is fast. Really fast. +It was thought and made to be. +For example, a straight `get(key)` from the cache should be under 500ns. +That's fast and way sufficient for most purpose. + +Keep that in mind. Especially when comparing caching frameworks with a benchmark. +If something looks 10% faster it actually means 50ns faster. +Do you care that much about these 50ns to lose your precious time in benchmarking? + +That said, the way you configure Ehcache can have an impact on performance. +This document is a work in progress. +It will give you the performance impact of classical Ehcache configuration. +It will also give you some advance tuning possibilities. + +In the future, we will add some figures of what "slower" means. +However, always do your own benchmark. + +== Stores + +We probably know that the fastest store is on-heap. +Until you overwhelm the garbage collector. + +Your next best bet is off-heap. + +Try to avoid disk. +Use a remote drive or even an HDD at your own risk. + +We won't talk about clustering here because it's a different realm and its performance is based on many factors. + +The next question would be: "Should I use a single tier?" Is using a single-tier off-heap faster than two-tiers? +The answer depends on what you do with it. +Having two tiers is a bit slower on writing. +It is also a bit slower on reading when the data is not found in the caching tier (on-heap). +However, it will be faster for an entry that is indeed found in a higher tier. + +So again, it depends. +The more you follow the caching hypothesis that the same data is always reused (and so in the caching +tier), the more interesting having two or more tiers will be. As a general rule of thumb, a tier that has a +hit ratio of 50% or less is going to slow down the cache. Either enlarge such tier or completely get rid of it. + +== Byte Sizing + +A on-heap tier can be limited to a number of entries or a number of bytes. +When using bytes, we need to calculate the size of every object added to the cache. +This is of course much slower than calculating the number of entries. + +Size calculation is done using the https://github.com/ehcache/sizeof[SizeOf] library. +This library uses multiple magic tricks to do so. It selects the fastest one for a given environment. +Make sure of what is used to confirm you can't use a faster way on your platform. + +== Serialization + +Off-heap, disk and clustering need to serialize keys and values before storing them. +By default, custom super-fast serializers are used for `Long`, `Integer`, `Character`, `Double`, `Float`, `byte[]` and +`String` objects. +A slightly faster, customized Java serialization is used for other object types. +It is well-known for not being the fastest thing around. +Ehcache uses it because it is supported out of the box. +However, you can increase performance by providing your own serializers. + +== Copier + +By default, on-heap storage stores the entries by reference. +You might want to use a copier to store entries by value for whatever reason. +Out of the box, Ehcache bundles a copier that makes use of its serialization mechanism: +`org.ehcache.impl.copy.SerializingCopier`. +This can be much slower so watch out. + +== Loader-Writer + +Loader-writer is interesting for many reasons. +First, it protects you against the Thundering Herd. +However, it needs to pass through more complicated code to do so. + +We are expecting it to be a tiny bit slower. +But nothing noticeable enough to prevent you from using it. + +== Expiration + +A cache with no expiration will always be faster. + +=== Time to Live + +If you need to set an expiration time, TTL will be the faster one. +This is because the expiration time of an entry is calculated and updated ony when the entry is inserted or updated in the cache. +But it still requires an expiration check at access time. + +So you can expect a 2% drop in performance when using TTL. + +=== Time to Idle + +TTI is slower than TTL. +We need to recalculate and update the expiration time each time the entry is accessed. + +=== Custom + +In general, using a custom `ExpiryPolicy` will be the slowest. +Ehcache had optimised the handling to handle other cases. +When using a custom policy, you are on your own. + +== Allocation rate + +Ehcache won't allocate any object during a simple on-heap `get()`. +However, keep in mind that your configuration might do so. + +For instance, let's say you define an expiry policy like this. + +[source,java,indent=0] +---- +include::{sourcedir39}/integration-test/src/test/java/org/ehcache/docs/Performance.java[tag=expiryAllocation] +---- + +<1> Will instantiate a `Duration` every time an entry is accessed + +In this case, putting the `Duration` as a constant would solve the problem. + +== Time Source + +By default, Ehcache uses a `TimeSource` that will retrieve the system time at every call. It is fast but not super +duper fast. But it is super duper accurate. + +You can trade accuracy for speed by using a `TickingTimeSource`. Please read the javadoc for details but the concept is +that a timer will increase time instead of always retrieving system time. + +Switching to `TickingTimeSource`, even with a granularity of 1ms, can improve the performance of a `get` as high as 30%. + +The drawback is that a timer will continuously run. +Also, time might drift from the real time a bit. +Especially if the granularity of the `systemUpdatePeriod` is big. +Is your expiration needs to be really tightly linked with real time, it can be a problem. +But in most cases, the drifting doesn't matter much. diff --git a/docs/src/docs/asciidoc/user/resilience.adoc b/docs/src/docs/asciidoc/user/resilience.adoc new file mode 100644 index 0000000000..977fce5a7c --- /dev/null +++ b/docs/src/docs/asciidoc/user/resilience.adoc @@ -0,0 +1,96 @@ +--- +--- += Resilience +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] + +ifdef::notBuildingForSite[] +include::{includedir}/menu.adoc[] +endif::notBuildingForSite[] + +== Introduction + +In case of failures, Ehcache will do its best to do two things + +* Keep a coherent state for every tier +* Answer requests + +No failures should occur from Ehcache itself. However, the surrounding environment can fail. e.g.: + +* Disk failure of a disk tier +* Server loss of a clustered tier +* Network failure of a clustered tier + +[[resilience-strategy]] +== Resilience strategy + +A cache provides an answer to the caller even if the underlying tiers fail to do so. +For instance, if the cache fails to `get()` a value, it will return `null`. + +This behavior is handled by the `ResilienceStrategy`. +Each time a backend tier fails, it throws a `StoreAccessException` that is then handled by the resilience strategy. + +Ehcache provides two implementations by default. One use by the classical cache called the `RobustResilienceStrategy` and one +for a cache with a link:writers{outfilesuffix}[loader-writer] called the `RobustLoaderWriterResilienceStrategy`. + +The `RobustResilienceStrategy` behaves like an always empty cache where everything added to it is immediately evicted. +The result is that the caller will more or less behave as if the cache was disabled. + +The `RobustLoaderWriterResilienceStrategy` knows about the loader-writer and will try to keep it coherent. +It will also answer by calling it. So a `get()` will load the value from the loader-writer. +A `putIfAbsent()` will load the value from the loader-writer and see if it's there. +If not, it will write it, if it is, it will return it. + +Both strategies will also try to clean up the store that failed by removing the failed key or keys. + +[[clustering-resilience]] +== Clustering resilience + +Let's be honest, your on-heap storage won't fail. Your off-heap won't either. +Your disk storage might, rarely, unless you used a network drive. But then, you are asking for it. + +So, what will fail is clustering. + +=== Timeouts + +There are 3 timeouts that can be configured. + +* Read operation: For any read-only operations: get, contains, getAll, iterator step (default: 5 seconds) +* Write operation: For any write operation: put, remove, putAll, removeAll, clear, putIfAbsent, remove, replace (default: 5 seconds) +* Connection: When establishing connection to the server (default: 150 seconds) + +Timeouts can be configured using a dedicated builder or in XML. + +[source%nowrap,java,indent=0] +---- +include::{sourcedir39}/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/Resilience.java[tag=timeoutsExample] +---- + +<1> Start setting timeouts using the build +<2> Set the read timeout to 10 seconds +<3> Set the write timeout to the default. This line could actually be skipped since that's what the builder will set it to anyway +<4> Set the connection timeout to be infinite + +=== Lease + +A client establishes a lease with the server. A lease is a contract between the client and the server. +It means the server can't make a change without getting the client acknowledgment. + +The client takes care of renewing the lease before it expires. +The connection is closed by one of the two parties if it fails to do so. +As soon as this happens, link:tiering{outfilesuffix}[caching tiers] are cleared. +The resilience strategy will then start answering every call. + +By default, a lease lasts 150 seconds. +It is decided by the server and can be overridden in the server configuration. + +=== Reconnect + +When a client gets disconnected, it will try to reconnect periodically. +As soon as it manages to reconnect, it will resume operation. +The caching tier will start filling again. + +This should rarely occur in a production environment. +It generally means there was a network failure that cuts the client from its server. +It won't occur if the active server goes down since we are expecting a failover to a mirror. diff --git a/docs/src/docs/asciidoc/user/serializers-copiers.adoc b/docs/src/docs/asciidoc/user/serializers-copiers.adoc index bfc5e72e3e..d91664e230 100644 --- a/docs/src/docs/asciidoc/user/serializers-copiers.adoc +++ b/docs/src/docs/asciidoc/user/serializers-copiers.adoc @@ -1,12 +1,12 @@ --- --- = Serializers and Copiers -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] [[overview]] @@ -119,7 +119,7 @@ Implement the following interface, from package `org.ehcache.spi.serialization`: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/api/src/main/java/org/ehcache/spi/serialization/Serializer.java[lines=20..-1] +include::{sourcedir39}/ehcache-api/src/main/java/org/ehcache/spi/serialization/Serializer.java[lines=20..-1] ---- As the Javadoc states, there are some constructor rules, see the section <> for that. @@ -256,7 +256,7 @@ Implement the following interface: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/api/src/main/java/org/ehcache/spi/copy/Copier.java[lines=19..-1] +include::{sourcedir39}/ehcache-api/src/main/java/org/ehcache/spi/copy/Copier.java[lines=19..-1] ---- * `T copyForRead(T obj)` is invoked when a copy must be made upon a read operation (like a cache `get()`), diff --git a/docs/src/docs/asciidoc/user/thread-pools.adoc b/docs/src/docs/asciidoc/user/thread-pools.adoc index a867b78da0..d3967f4a6f 100644 --- a/docs/src/docs/asciidoc/user/thread-pools.adoc +++ b/docs/src/docs/asciidoc/user/thread-pools.adoc @@ -1,12 +1,12 @@ --- --- = Thread Pools -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] [[introduction]] @@ -88,7 +88,7 @@ Following are examples of describing how to configure the thread pools the diffe [source%nowrap,java] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=diskStore] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=diskStore] ---- <1> Configure the thread pools. Note that the default one (`dflt`) is required for the events even when no event listener is configured. @@ -99,7 +99,7 @@ include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag= [source%nowrap,java] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=writeBehind] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=writeBehind] ---- <1> Configure the thread pools. Note that the default one (`dflt`) is required for the events even when no event listener is configured. @@ -110,7 +110,7 @@ include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag= [source%nowrap,java] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=events] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=events] ---- <1> Configure the thread pools. Note that there is no default one so all thread-using services must be configured with explicit defaults. @@ -124,7 +124,7 @@ Following is an example describing how to configure the thread pools the differe [source%nowrap,xml] ---- -include::{sourcedir34}/xml/src/test/resources/configs/docs/thread-pools.xml[tags=threadPools] +include::{sourcedir39}/ehcache-xml/src/test/resources/configs/docs/thread-pools.xml[tags=threadPools] ---- <1> Configure the thread pools. Note that there is no default one. diff --git a/docs/src/docs/asciidoc/user/tiering.adoc b/docs/src/docs/asciidoc/user/tiering.adoc index 0211ca9d1d..2f63824594 100644 --- a/docs/src/docs/asciidoc/user/tiering.adoc +++ b/docs/src/docs/asciidoc/user/tiering.adoc @@ -1,12 +1,13 @@ --- --- = Ehcache Tiering Options -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] +>>>>>>> Upgrade to Asciidoctor Gradle Plugin 2.0.0 ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] == Introduction @@ -48,7 +49,7 @@ For this, simply define the single resource in the cache configuration: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/Tiering.java[tag=offheapOnly] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/Tiering.java[tag=offheapOnly] ---- <1> Start with defining the key and value type in the configuration builder. @@ -65,7 +66,7 @@ A heap tier can be sized by entries or by size. [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/Tiering.java[tag=heap] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/Tiering.java[tag=heap] ---- <1> Only 10 entries allowed on heap. Eviction will occur when full. @@ -83,7 +84,7 @@ NOTE: Byte sizing has a runtime performance impact that depends on the size and [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/Tiering.java[tag=byteSizedTieredCache] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/Tiering.java[tag=byteSizedTieredCache] ---- <1> This will limit the amount of memory used by the heap tier for storing key-value pairs. @@ -103,7 +104,7 @@ If you wish to use off-heap, you'll have to define a resource pool, giving the m [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/Tiering.java[tag=offheap] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/Tiering.java[tag=offheap] ---- <1> Only 10 MB allowed off-heap. @@ -126,7 +127,7 @@ The faster and more dedicated the disk is, the faster accessing the data will be [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/Tiering.java[tag=persistentCacheManager] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/Tiering.java[tag=persistentCacheManager] ---- <1> To obtain a `PersistentCacheManager` which is a normal `CacheManager` but with the ability to @@ -166,7 +167,7 @@ In some cases, you might want to reduce the concurrency and save resources by re [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/Tiering.java[tag=diskSegments] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/Tiering.java[tag=diskSegments] ---- <1> Define an `OffHeapDiskStoreConfiguration` instance specifying the required number of segments. @@ -202,8 +203,22 @@ This leads to the typical pyramid shape for a multi-tiered setup. -- [.left] .Tiers hierarchy -image::TiersHierarchy.png[Tiers hierarchy] - +[ditaa] +.... + +-------------------+ + |cBE7 Heap Tier | + +-+-------------------+-+ + |cFA4 | + | Off Heap Tier | + | | ++-+-----------------------+-+ +|cA8D | +| | +| Disk Tier | +| | +| | ++---------------------------+ +.... Ehcache requires the size of the heap tier to be smaller than the size of the offheap tier, and the size of the offheap tier to be smaller than the size of the disk tier. While Ehcache cannot verify at configuration time that a count-based sizing for heap will be smaller than a byte-based sizing for another tier, you should make sure that is the case during testing. -- @@ -220,7 +235,7 @@ Here is an example using heap, offheap and clustered. [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/Tiering.java[tag=threeTiersCacheManager] +include::{sourcedir39}/clustered/ehcache-client/src/test/java/org/ehcache/clustered/client/docs/Tiering.java[tag=threeTiersCacheManager] ---- <1> Clustered specific information telling how to connect to the Terracotta cluster @@ -239,7 +254,7 @@ Let's revisit an example used earlier: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/Tiering.java[tag=threeTiersCacheManager] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/Tiering.java[tag=threeTiersCacheManager] ---- This is a cache using 3 tiers (heap, offheap, disk). @@ -253,7 +268,7 @@ Consider for instance this code: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/Tiering.java[tag=notShared] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/Tiering.java[tag=notShared] ---- You will end up with two caches that can contain 10 entries each. @@ -270,7 +285,7 @@ Thus you can't change the sizing of off-heap or disk tiers. [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/Tiering.java[tag=updateResourcesAtRuntime] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/Tiering.java[tag=updateResourcesAtRuntime] ---- <1> You will need to create a new `ResourcePools` object with resources of the required size, using `ResourcePoolsBuilder`. @@ -297,18 +312,21 @@ This method destroys a given cache. The cache shouldn't be in use by another cac [[multi-tier-sequence-flow]] == Sequence Flow for Cache Operations with Multiple Tiers - -In order to understand what happens for different cache operations when using multiple tiers, here are examples of _Put_ and _Get_ operations. -The sequence diagrams are oversimplified but still show the main points. - -[.float-group] +[.right] -- -[.left] .Multiple tiers using Put -image::Put.png[Put] -[.left] +[plantuml] +.... +include::{sourcedir39}/docs/src/docs/uml/put.puml[] +.... .Multiple tiers using Get -image::Get.png[Get] +[plantuml] +.... +include::{sourcedir39}/docs/src/docs/uml/get.puml[] +.... +-- +In order to understand what happens for different cache operations when using multiple tiers, here are examples of _Put_ and _Get_ operations. +The sequence diagrams are oversimplified but still show the main points. You should then notice the following: @@ -318,5 +336,5 @@ You should then notice the following: * A full cache miss (the value isn't on any tier) will always go all the way down to the authoritative tier. NOTE: The slower your authoritative tier, the slower your `put` operations will be. -For a normal cache usage, it usually doesn't matter since`get` operations are much more frequent than`put` opreations. +For a normal cache usage, it usually doesn't matter since`get` operations are much more frequent than`put` operations. The opposite would mean you probably shouldn't be using a cache in the first place. diff --git a/docs/src/docs/asciidoc/user/usermanaged.adoc b/docs/src/docs/asciidoc/user/usermanaged.adoc index d963b1311e..87aac48e79 100644 --- a/docs/src/docs/asciidoc/user/usermanaged.adoc +++ b/docs/src/docs/asciidoc/user/usermanaged.adoc @@ -1,12 +1,12 @@ --- --- = User managed caches -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] [[overview]] @@ -44,7 +44,7 @@ The interface definition is shown in this code: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/api/src/main/java/org/ehcache/UserManagedCache.java[lines=17..-1] +include::{sourcedir39}/ehcache-api/src/main/java/org/ehcache/UserManagedCache.java[lines=17..-1] ---- === User Managed Persistent Cache @@ -63,7 +63,7 @@ The interface definition is shown in this code: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/api/src/main/java/org/ehcache/PersistentUserManagedCache.java[lines=17..-1] +include::{sourcedir39}/ehcache-api/src/main/java/org/ehcache/PersistentUserManagedCache.java[lines=17..-1] ---- [[code-examples]] @@ -75,7 +75,7 @@ Here is a simple example showing a basic lifecycle of a user managed cache: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] ---- <1> Create a `UserManagedCache` instance. You can either pass `true` to have the builder `init()` it for you, @@ -102,7 +102,7 @@ If you want to use disk persistent cache, you will need to create and lifecycle [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=persistentUserManagedCache] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=persistentUserManagedCache] ---- <1> Create the persistence service to be used by the cache for storing data on disk. @@ -125,7 +125,7 @@ For more information on cache event listeners, see the section < Provide the `ExecutorService` for ordered and unordered event delivery. diff --git a/docs/src/docs/asciidoc/user/writers.adoc b/docs/src/docs/asciidoc/user/writers.adoc index c748bdd676..e180f52f8e 100644 --- a/docs/src/docs/asciidoc/user/writers.adoc +++ b/docs/src/docs/asciidoc/user/writers.adoc @@ -1,19 +1,17 @@ --- --- = Cache Loaders and Writers -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] [[introduction]] == Introduction to Cache Loaders and Writers -NOTE: Ehcache clustering is not yet compatible with _cache-through_. - This section documents the specifics behind the cache-through implementation in Ehcache. Refer to the section <> if you are not familiar with terms like _cache-through_, _read-through_, _write-through_ or _system of record_. @@ -76,7 +74,7 @@ After this time has elapsed, the batch is processed even if incomplete. [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeThroughCache] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeThroughCache] ---- <1> We register a sample `CacheLoaderWriter` that knows about the mapping ("41L" maps to "zero"). @@ -88,7 +86,7 @@ The returned mapping will populate the cache and be returned to the caller. [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeBehindCache] +include::{sourcedir39}/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeBehindCache] ---- <1> For write-behind you need a configured `CacheLoaderWriter`. @@ -98,3 +96,5 @@ include::{sourcedir34}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t <5> Define the concurrency level of write-behind queue(s). This indicates how many writer threads work in parallel to update the underlying system of record asynchronously. <6> Enable the write coalescing behavior, which ensures that only one update per key per batch reaches the underlying system of record. + +NOTE: `BatchedWriteBehindConfigurationBuilder` configurations are not honoured by clustered caches. diff --git a/docs/src/docs/asciidoc/user/xa.adoc b/docs/src/docs/asciidoc/user/xa.adoc index 665d07e990..e03fb2b3d9 100644 --- a/docs/src/docs/asciidoc/user/xa.adoc +++ b/docs/src/docs/asciidoc/user/xa.adoc @@ -1,12 +1,12 @@ --- --- = XA transactional caches -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] == Introduction @@ -65,7 +65,7 @@ Here is an example: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testSimpleXACache] +include::{sourcedir39}/ehcache-transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testSimpleXACache] ---- <1> First start the Bitronix transaction manager. By default, Ehcache will auto-detect it but will throw an exception during the cache manager initialization if BTM isn't started. @@ -96,7 +96,7 @@ Nothing special needs to be configured for this to happen, just ensure that the [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithWriteThrough] +include::{sourcedir39}/ehcache-transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithWriteThrough] ---- <1> First start the Bitronix transaction manager. @@ -122,7 +122,7 @@ Any attempt to access one outside of such context will result in `XACacheExcepti [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testNonTransactionalAccess] +include::{sourcedir39}/ehcache-transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testNonTransactionalAccess] ---- <1> First start the Bitronix transaction manager. @@ -151,7 +151,7 @@ Here is an example: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithThreeTiers] +include::{sourcedir39}/ehcache-transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithThreeTiers] ---- <1> First start the Bitronix transaction manager. @@ -174,7 +174,7 @@ You can create a XML file to configure a `CacheManager`, lookup a specific trans [source%nowrap,xml,indent=0] ---- -include::{sourcedir34}/transactions/src/test/resources/docs/configs/xa-getting-started.xml[tags=gettingStarted] +include::{sourcedir39}/ehcache-transactions/src/test/resources/docs/configs/xa-getting-started.xml[tags=gettingStarted] ---- <1> Declare a `TransactionManagerLookup` that will lookup your transaction manager. @@ -185,7 +185,7 @@ In order to parse an XML configuration, you can use the XmlConfiguration type: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithXMLConfig] +include::{sourcedir39}/ehcache-transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithXMLConfig] ---- <1> The Bitronix transaction manager must be started before the cache manager is initialized. @@ -198,7 +198,7 @@ And here is what the BitronixTransactionManagerLookup implementation looks like: [source%nowrap,java,indent=0] ---- -include::{sourcedir34}/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixTransactionManagerLookup.java[tag=BitronixLookup] +include::{sourcedir39}/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixTransactionManagerLookup.java[tag=BitronixLookup] ---- <1> The `TransactionManagerLookup` interface must be implemented and the offer a `no-arg` constructor. diff --git a/docs/src/docs/asciidoc/user/xml.adoc b/docs/src/docs/asciidoc/user/xml.adoc index 0f7796b346..622e4d15b9 100644 --- a/docs/src/docs/asciidoc/user/xml.adoc +++ b/docs/src/docs/asciidoc/user/xml.adoc @@ -1,12 +1,12 @@ --- --- = XML Configuration -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] == Introduction @@ -79,14 +79,13 @@ A `` element may contain all the same child elements as a `> to inspire you. -== Property replacement in XML configuration files +WARNING: Processing of cache template configurations can be triggered lazily by actions that dynamically bind new caches + to existing cache templates. Errors within such templates may not be revealed until this lazy processing is triggered. -Java system properties can be referenced inside XML configuration files. -The property value will replace the property reference during the configuration parsing. +== Property replacement in XML configuration files -This is done by using the `${prop.name}` syntax. -It is supported in all attributes and elements values that accept the `${}` characters as legal characters. -This currently rules out all numbers, mostly used in sizing things, and identifiers, such as cache and template names. +Certain elements inside XML configuration files can use `${property-name}` syntax. The value of the given +system-property will replace the property reference during the configuration parsing. WARNING: If the system property does not exist, this will make the configuration parsing fail. @@ -98,6 +97,15 @@ A classical use case for this feature is for disk files location inside the `dir ---- <1> Here `user.home` will be replaced by the value of the system property, something like `/home/user` +Attributes within the core configuration that can use system properties are: + + * Local persistence directory (supports substitution within a string). + * Thread pool minimum and maximum size attributes. + * Write-behind queue size, concurrency, batch size and maximum batch delay. + * Cache TTI and TTL + * Core resource sizes (heap, offheap and disk) + * Disk store writer concurrency and segment count + == XML programmatic parsing NOTE: If you are obtaining your `CacheManager` through the JSR-107 API, what follows is done automatically @@ -105,7 +113,7 @@ NOTE: If you are obtaining your `CacheManager` through the JSR-107 API, what fol [source,java,indent=0] ---- -include::{sourcedir34}/xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlConfig] +include::{sourcedir39}/ehcache-xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlConfig] ---- <1> Obtain a `URL` to your XML file's location <2> Instantiate an `XmlConfiguration` passing the XML file's URL to it @@ -118,14 +126,136 @@ to use a `` element from an XML file, e.g. the `/my-config.xml` [source,xml,indent=0] ---- -include::{sourcedir34}/xml/src/test/resources/configs/docs/template-sample.xml[tag=templateSample] +include::{sourcedir39}/ehcache-xml/src/test/resources/configs/docs/template-sample.xml[tag=templateSample] ---- Creating a `CacheConfigurationBuilder` of that `example` `` element, would be done as follows: [source,java,indent=0] ---- -include::{sourcedir34}/xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlTemplate] +include::{sourcedir39}/ehcache-xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlTemplate] ---- <1> Creates a builder, inheriting the capacity constraint of 200 entries <2> The inherent properties can be overridden by simply providing a different value prior to building the `CacheConfiguration` + +== Programmatic configuration to XML + +Just like getting a cache manager configuration from its XML equivalent, the reverse translation is supported too. +You can instantiate an `XmlConfiguration` object by passing the cache manager `Configuration` +and the string representation of that object will give you the XML equivalent of the provided `Configuration`. + +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlTemplate] +---- +<1> Creates a builder, inheriting the capacity constraint of 200 entries +<2> The inherent properties can be overridden by simply providing a different value prior to building the `CacheConfiguration` + +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlTranslation] +---- +<1> Instantiate an `XmlConfiguration` passing the cache manager `Configuration` +<2> Retrieve the XML representation using the `toString` method. + +Not every programmatic configuration can be translated to its XML equivalent in this manner. +Translation is not supported if the cache manager configuration contains a cache with any one of the following configured: + +. `EvictionAdvisor` +. A custom `ExpiryPolicy` other than `timeToLiveExpiration` or `timeToIdleExpiration` from ExpiryPolicyBuilder` +. Using an instance of the following instead of their classes: +.. `Serializer` +.. `Copier` +.. `CacheLoaderWriter` +.. `CacheEventListener` +.. `ResilienceStrategy` + +== Multiple XML Configurations In One Document + +The `XmlMultiConfiguration` class and the associated `ehcache-multi.xsd` XML schema provide support for multiple Ehcache +configurations to be housed within a single XML container format. + +=== Multiple Ehcache Manager Configurations + +The simplest use of the multi-configuration features is to embed multiple cache manager configurations in a single xml +file: +[source,xml,indent=0] +---- +include::{sourcedir39}/ehcache-xml/src/test/resources/configs/docs/multi/multiple-managers.xml[] +---- +<1> A top-level `` container with namespace declarations for the `multi` and core schemas +<2> Each Ehcache configuration is embedded inside a `configuration` tag with a required (unique) `identity` attribute + +These embedded configurations can then be retrieved via an `XmlMultiConfiguration` instance built from the XML document. +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-xml/src/test/java/org/ehcache/docs/MultiGettingStarted.java[tag=multipleManagers] +---- +<1> The `XmlMultiConfiguration` is assembled from the XML resource. +<2> Once assembled the configuration is built. +<3> `Configuration` instances can then be retrieved using their identities. + +=== Multiple Cache Manager Variants + +Multiple variant configurations for a given manager can be provided by including a sequence of `` tags, each +with a required `type` attribute: +[source,xml,indent=0] +---- +include::{sourcedir39}/ehcache-xml/src/test/resources/configs/docs/multi/multiple-variants.xml[tag=variants] +---- + +A specific cache configuration can then be retrieved by choosing both a variant and an identity explicitly on retrieval. +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-xml/src/test/java/org/ehcache/docs/MultiGettingStarted.java[tag=multipleVariants] +---- +The samples above are just samples, variant types can be used to represent any kind of variation: development +vs production, clustered vs unclustered, red vs blue, etc. + +[NOTE] +Configurations with multiple variants must have a variant type specified when they are retrieved otherwise an +`IllegalStateException` will be thrown. Configurations without multiple variants will always return their single +configuration for all requested variants. + +=== Multiple Cache Manager Retrieval + +Multiple cache managers can be retrieved from an `XmlMultiConfiguration` by iterating over the configurations +`identities()`: +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-xml/src/test/java/org/ehcache/docs/MultiGettingStarted.java[tag=multipleRetrieval] +---- +<1> From a stream over the set of identities in a mult-configuration. +<2> Map each identity to it's unique (non-varianted) configuration. +<3> Alternatively, map each identity to a specific variant configuration. + +=== Building XML Multi Configurations + +`XmlMultiConfiguration` instance can be assembled and modified using the associated builder API. The previous examples +of parsing XML multi-configuration documents are all just simple invocations of the richer builder API. + +Configurations can be built from scratch as show below: +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-xml/src/test/java/org/ehcache/docs/MultiGettingStarted.java[tag=building] +---- +<1> Starting with an initially empty set of configurations. +<2> Add a configuration without variants. +<3> Add a configuration with two different variants: heap and offheap. +<4> Build the final configuration instance. + +They can also be built from existing configurations: +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-xml/src/test/java/org/ehcache/docs/MultiGettingStarted.java[tag=modifying] +---- +<1> Starting with an existing `XmlMultiConfiguration`. +<2> Remove the configuration with identity `"foo"`. + +Once built a multi-configuration can be retrieved in XML form: +[source,java,indent=0] +---- +include::{sourcedir39}/ehcache-xml/src/test/java/org/ehcache/docs/MultiGettingStarted.java[tag=rendering] +---- +<1> Retrieving the XML as a rendered string. +<2> Retrieving the XML as a DOM (`org.w3c.Document`). diff --git a/docs/src/docs/asciidoc/user/xsds.adoc b/docs/src/docs/asciidoc/user/xsds.adoc index 18a500db6a..879fc7c3e7 100644 --- a/docs/src/docs/asciidoc/user/xsds.adoc +++ b/docs/src/docs/asciidoc/user/xsds.adoc @@ -1,12 +1,12 @@ --- --- = Ehcache XSDs -ifndef::sourcedir34[] -include::common.adoc[] -endif::sourcedir34[] +ifndef::sourcedir39[] +include::{includedir}/common.adoc[] +endif::sourcedir39[] ifdef::notBuildingForSite[] -include::menu.adoc[] +include::{includedir}/menu.adoc[] endif::notBuildingForSite[] == XSD namespaces and locations @@ -17,29 +17,53 @@ endif::notBuildingForSite[] ** Location for 3.2: `http://www.ehcache.org/schema/ehcache-core-3.2.xsd` ** Location for 3.3: `http://www.ehcache.org/schema/ehcache-core-3.3.xsd` ** Location for 3.4: `http://www.ehcache.org/schema/ehcache-core-3.4.xsd` +** Location for 3.5: `http://www.ehcache.org/schema/ehcache-core-3.5.xsd` +** Location for 3.6: `http://www.ehcache.org/schema/ehcache-core-3.6.xsd` +** Location for 3.7: `http://www.ehcache.org/schema/ehcache-core-3.7.xsd` +** Location for 3.8: `http://www.ehcache.org/schema/ehcache-core-3.8.xsd` +** Location for 3.9: `http://www.ehcache.org/schema/ehcache-core-3.9.xsd` +// needle_for_core_xsd * JSR-107 namespace: `http://www.ehcache.org/v3/jsr107` ** Location for 3.0: `http://www.ehcache.org/schema/ehcache-107-ext-3.0.xsd` ** Location for 3.1: `http://www.ehcache.org/schema/ehcache-107-ext-3.1.xsd` ** Location for 3.2: `http://www.ehcache.org/schema/ehcache-107-ext-3.2.xsd` ** Location for 3.3: `http://www.ehcache.org/schema/ehcache-107-ext-3.3.xsd` ** Location for 3.4: `http://www.ehcache.org/schema/ehcache-107-ext-3.4.xsd` +** Location for 3.5: `http://www.ehcache.org/schema/ehcache-107-ext-3.5.xsd` +** Location for 3.6: `http://www.ehcache.org/schema/ehcache-107-ext-3.6.xsd` +** Location for 3.7: `http://www.ehcache.org/schema/ehcache-107-ext-3.7.xsd` +** Location for 3.8: `http://www.ehcache.org/schema/ehcache-107-ext-3.8.xsd` +** Location for 3.9: `http://www.ehcache.org/schema/ehcache-107-ext-3.9.xsd` +// needle_for_107_xsd * Transactions namespace: `http://www.ehcache.org/v3/tx` ** Location for 3.0: `http://www.ehcache.org/schema/ehcache-tx-ext-3.0.xsd` ** Location for 3.1: `http://www.ehcache.org/schema/ehcache-tx-ext-3.1.xsd` ** Location for 3.2: `http://www.ehcache.org/schema/ehcache-tx-ext-3.2.xsd` ** Location for 3.3: `http://www.ehcache.org/schema/ehcache-tx-ext-3.3.xsd` ** Location for 3.4: `http://www.ehcache.org/schema/ehcache-tx-ext-3.4.xsd` +** Location for 3.5: `http://www.ehcache.org/schema/ehcache-tx-ext-3.5.xsd` +** Location for 3.6: `http://www.ehcache.org/schema/ehcache-tx-ext-3.6.xsd` +** Location for 3.7: `http://www.ehcache.org/schema/ehcache-tx-ext-3.7.xsd` +** Location for 3.8: `http://www.ehcache.org/schema/ehcache-tx-ext-3.8.xsd` +** Location for 3.9: `http://www.ehcache.org/schema/ehcache-tx-ext-3.9.xsd` +// needle_for_transactions_xsd * Clustering namespace: `http://www.ehcache.org/v3/clustered` ** Location for 3.1: `http://www.ehcache.org/schema/ehcache-clustered-ext-3.1.xsd` ** Location for 3.2: `http://www.ehcache.org/schema/ehcache-clustered-ext-3.2.xsd` ** Location for 3.3: `http://www.ehcache.org/schema/ehcache-clustered-ext-3.3.xsd` ** Location for 3.4: `http://www.ehcache.org/schema/ehcache-clustered-ext-3.4.xsd` +** Location for 3.5: `http://www.ehcache.org/schema/ehcache-clustered-ext-3.5.xsd` +** Location for 3.6: `http://www.ehcache.org/schema/ehcache-clustered-ext-3.6.xsd` +** Location for 3.7: `http://www.ehcache.org/schema/ehcache-clustered-ext-3.7.xsd` +** Location for 3.8: `http://www.ehcache.org/schema/ehcache-clustered-ext-3.8.xsd` +** Location for 3.9: `http://www.ehcache.org/schema/ehcache-clustered-ext-3.9.xsd` +// needle_for_clustered_xsd === Usage example [source,xml,indent=0] ---- -include::{sourcedir34}/107/src/test/resources/org/ehcache/docs/public-xsds-location.xml[tag=xsdLocations] +include::{sourcedir39}/ehcache-107/src/test/resources/org/ehcache/docs/public-xsds-location.xml[tag=xsdLocations] ---- [[core]] @@ -47,7 +71,7 @@ include::{sourcedir34}/107/src/test/resources/org/ehcache/docs/public-xsds-locat [source,xsd,indent=0] ---- -include::{sourcedir34}/xml/src/main/resources/ehcache-core.xsd[lines=18..-1] +include::{sourcedir39}/ehcache-xml/src/main/schema/ehcache-core.xsd[lines=18..-1] ---- [[jsr-107-extension]] @@ -55,12 +79,12 @@ include::{sourcedir34}/xml/src/main/resources/ehcache-core.xsd[lines=18..-1] [source,xsd,indent=0] ---- -include::{sourcedir34}/107/src/main/resources/ehcache-107ext.xsd[lines=18..-1] +include::{sourcedir39}/ehcache-107/src/main/resources/ehcache-107-ext.xsd[lines=18..-1] ---- == XA transactions extension [source,xsd,indent=0] ---- -include::{sourcedir34}/transactions/src/main/resources/ehcache-tx-ext.xsd[lines=18..-1] +include::{sourcedir39}/ehcache-transactions/src/main/resources/ehcache-tx-ext.xsd[lines=18..-1] ---- diff --git a/docs/src/docs/uml/cache-through-sequence.puml b/docs/src/docs/uml/cache-through-sequence.puml new file mode 100644 index 0000000000..694b3059a4 --- /dev/null +++ b/docs/src/docs/uml/cache-through-sequence.puml @@ -0,0 +1,60 @@ +@startuml + +participant "Client A" as A +participant "Server" as S +participant "Client B" as B + +participant "SOR" as SOR + +[o-> B: put(k, u) +activate B + +[o-> A: put(k, v) +activate A + +B-> S: append(put(k, u)) +activate S +B<--S +deactivate S + +A-> S: append(put(k, v)) +activate S +A<--S +deactivate S + +critical +B-> S: get(k.hash) +activate B +activate S +B<--S: chain +deactivate S + +B-> SOR: write(k, u) +B-> SOR: write(k, v) + +B-> S: replace(k.hash, chain, {k, v}) +activate S +B<--S: success +deactivate S +deactivate B +end +[<--B +deactivate B + +critical +A-> S: get(k.hash) +activate A +activate S +A<--S: chain +deactivate S + +A-> S: replace(k.hash, {k, v}, {k, v}) +activate S +A<--S: success (no-op) +deactivate S +deactivate A +end +[<--A: +deactivate A + +@enduml diff --git a/docs/src/docs/uml/clustered-loader-writer.puml b/docs/src/docs/uml/clustered-loader-writer.puml new file mode 100644 index 0000000000..fb0ffb4fa1 --- /dev/null +++ b/docs/src/docs/uml/clustered-loader-writer.puml @@ -0,0 +1,61 @@ +@startuml + +participant "SERVER" as S + +title LOADER-WRITER + +[o-> C1: PUT(K1, V1) +critical +C1 -> S: LOCK(H(K1)) +activate S +C1 -> SOR: WRITE(K1, V1) +C1 -> S: APPEND(K1, V1) +C1 -> S: UNLOCK(K1) +deactivate S +end + +[o-> C2: PUT(K1, V2) +critical +C2 -> S: LOCK(H(K1)) +activate S +C2 -> SOR: WRITE(K1, V2) +C2 -> S: APPEND(K1, V2) +C2 -> S: UNLOCK(K1) +deactivate S +end + +== GETS == + +[o-> C1: GET(K1) +C1 -> S: GET(K1) +alt if its a miss +critical +C1 -> S: LOCK(H(K1)) +activate S +C1 -> SOR: LOAD(K1) +C1 -> S: APPEND(K1, V3) +C1 -> S: UNLOCK(K1) +deactivate S +end +end + +newpage WRITE-BEHIND + +[o-> C1: PUT(K1, V1) +C1 -> S: APPENDWITHWRITER(K1, V1) +...Asynchronously with replaceAtHead... +loop through chain until resolved +critical +C1 -> S: LOCK(H(K1)) +activate S +C1 -> SOR: WRITE(K1, V1) +C1 -> S: UNLOCK(K1) +end +deactivate S +end + +[o-> C2: PUT(K1, V2) +C2 -> S: APPENDWITHWRITER(K1, V2) + +@enduml + diff --git a/docs/src/docs/uml/clustered-writebehind.puml b/docs/src/docs/uml/clustered-writebehind.puml new file mode 100644 index 0000000000..2db6de8d5c --- /dev/null +++ b/docs/src/docs/uml/clustered-writebehind.puml @@ -0,0 +1,73 @@ +@startuml +skinparam sequenceMessageAlign center + +participant "SERVER" as S + +title CLUSTERED WRITE-BEHIND + +||| +== PUT == + +[o-> C1: PUT(K1, V1) +C1 -> S: APPENDWITHWRITER(K1, V1) + +note over S + Server pins the hash entry +end note + +||| +== GET == + +[o-> C1: GET(K1) +C1 -> S: GET(H(K1)) + +note over C1 + Client resolve the chain as usual + but replaceAtHead won't be called +end note + +alt if its a miss + critical + C1 -> S: LOCK(H(K1)) + note over S + LOCK returns chain + end note + alt if chain is empty + C1 -> SOR: LOAD(K1) + SOR --> C1: V3 + C1 -> S: APPEND(K1, V3) + C1 -> S: UNLOCK(H(K1)) + end + end +end + +||| +== Update SOR == + +note over S + When a chain size is greater than configured queue size, + server selects a random client to update SOR +end note + +S -> C1: UPDATE-SOR(Hash) +note over C1 + Client crashes may cause duplicate writes to SOR +end note + +critical + C1 -> S: LOCK(Hash) + activate S + loop through APPENDWITHWRITER \n operations in the chain + C1 -> SOR: OP + end + C1 -> S: replaceAtHead with resolved chain + note over S + Server unpins the hash entry if there were no appends + since last lock acquire + end note + C1 -> S: UNLOCK(Hash) + deactivate S +end + +@enduml + diff --git a/docs/src/docs/uml/passive-sync-solution-four.puml b/docs/src/docs/uml/passive-sync-solution-four.puml new file mode 100644 index 0000000000..ca8997ea7f --- /dev/null +++ b/docs/src/docs/uml/passive-sync-solution-four.puml @@ -0,0 +1,78 @@ +@startuml + +title Passive Replication Proposal Three ('The Modified Clifford') + +participant "Client" as C +participant "Active Server" as AS +participant "Active Entity" as AE +participant "Passive Server" as PS +participant "Passive Entity" as PE + +[o->C: putIfAbsent(k, v) +activate C +C->AS: getAndAppend(putIfAbsent(k, v)) +activate AS +AS->PS: begin() +activate PS +PS->AS: ack() +deactivate PS + +C<<--AS: received + +AS->AE: getAndAppend(putIfAbsent(k, v)) +activate AE +AE->AS: apply chain delta +activate AS +AS<--AE: chain +deactivate AE +C<<--AS: chain + +C->C: resolve + +AS->PS: apply chain delta +activate PS +PS->PE: apply chain delta +activate PE +AS<--PS: persisted +note left +//persisted// means "message recorded" +which has to be fully recorded in the passive +since the active and client are the same in this case +end note +deactivate PS + +AS->AE: apply chain delta +activate AE +note over AE +ignored +end note +AS<--AE +deactivate AE +deactivate AS + + +C<--AS +deactivate AS +[<--C +deactivate C + +PE->PE: apply delta +opt delta apply failure +PE->PE: evict +end opt +PE -[#white]-> AE #layout +deactivate PE + +C->>AS: replace(k.hash, chain, resolved) +activate AS +AS->AS: replace +opt replace-success +AS->>PS: replicateChain(active-state) +activate PS +deactivate PS +end opt +note over AS, PS +Servers now forced back in sync +end note +deactivate AS +@enduml diff --git a/docs/src/docs/uml/passive-sync-solution-one.puml b/docs/src/docs/uml/passive-sync-solution-one.puml new file mode 100644 index 0000000000..9b77aed1b3 --- /dev/null +++ b/docs/src/docs/uml/passive-sync-solution-one.puml @@ -0,0 +1,47 @@ +@startuml + +title Passive Replication Proposal One ('The Chris') +participant "Client" as C +participant "Active Server" as AS +participant "Passive Server" as PS + +[o->C: putIfAbsent(k, v) +activate C +C->AS: getAndAppend(putIfAbsent(k, v)) +activate AS +AS->PS: append(putIfAbsent(k, v)) +activate PS +AS<--PS +deactivate PS +C<--AS: chain +deactivate AS + +C->C: resolve + +C->AS: replace(k.hash, chain, resolved) +activate AS +AS->PS: replace(k.hash, chain, resolved) +activate PS +AS<--PS: passive response +deactivate PS +C<--AS: active & passive responses +deactivate AS + +alt active response ≠ passive response +C->AS: evict(k.hash) +activate AS +AS->PS: evict(k.hash) +activate PS +AS<--PS +deactivate PS +C<--AS +deactivate AS +note over AS, PS +Servers now forced back in sync +end note +end + +[<--C: +deactivate C + +@enduml diff --git a/docs/src/docs/uml/passive-sync-solution-three.puml b/docs/src/docs/uml/passive-sync-solution-three.puml new file mode 100644 index 0000000000..f78b2bf5f1 --- /dev/null +++ b/docs/src/docs/uml/passive-sync-solution-three.puml @@ -0,0 +1,47 @@ +@startuml + +title Passive Replication Proposal Three ('The Modified Clifford') + +participant "Client" as C +participant "Active Server" as AS +participant "Passive Server" as PS + +[o->C: putIfAbsent(k, v) +activate C +C->AS: getAndAppend(putIfAbsent(k, v)) +activate AS +C<<--AS: chain + +C->C: resolve +AS->PS: replicateDelta(putIfAbsent(k, v)) +activate PS +AS<--PS: persisted +note left +//persisted// means "message recorded" +which could be in either client or passive +end note +C<--AS +deactivate AS +[<--C +deactivate C + +PS->PS: apply delta +opt delta apply failure +PS->PS: evict +end opt +PS -[#white]-> AS #layout +deactivate PS + +C->>AS: replace(k.hash, chain, resolved) +activate AS +AS->AS: replace +opt replace-success +AS->>PS: replicateChain(active-state) +activate PS +deactivate PS +end opt +note over AS, PS +Servers now forced back in sync +end note +deactivate AS +@enduml diff --git a/docs/src/docs/uml/passive-sync-solution-two.puml b/docs/src/docs/uml/passive-sync-solution-two.puml new file mode 100644 index 0000000000..040c4f581b --- /dev/null +++ b/docs/src/docs/uml/passive-sync-solution-two.puml @@ -0,0 +1,59 @@ +@startuml + +title Passive Replication Proposal Two ('The Clifford') + +participant "Client" as C +participant "Active Server" as AS +participant "Passive Server" as PS + +[o->C: putIfAbsent(k, v) +activate C +C->AS: getAndAppend(putIfAbsent(k, v)) +activate AS +C<--AS: chain +deactivate AS + +AS->>AS +note right: triggers async passive replication +activate AS +C->C: resolve +AS->PS: replicateDelta(putIfAbsent(k, v)) +activate PS +AS<--PS +deactivate PS +opt delta replication failure +AS->PS: replicateChain(active-state) +activate PS +AS<--PS +deactivate PS +note over AS, PS +Servers now forced back in sync +end note +end +C<-AS + +deactivate AS + +[<--C +deactivate C + +==== Client Operation Complete ==== + +C->>AS: replace(k.hash, chain, resolved) +activate AS +AS->PS: replicateDelta(...) +activate PS +AS<--PS +deactivate PS +opt delta replication failure +AS->PS: replicateChain(active-state) +activate PS +AS<--PS +deactivate PS +note over AS, PS +Servers now forced back in sync +end note +end +deactivate AS + +@enduml diff --git a/docs/src/docs/uml/putIfAbsentUml.txt b/docs/src/docs/uml/putIfAbsentUml.puml similarity index 100% rename from docs/src/docs/uml/putIfAbsentUml.txt rename to docs/src/docs/uml/putIfAbsentUml.puml diff --git a/ehcache-107/README.adoc b/ehcache-107/README.adoc new file mode 100644 index 0000000000..65e8d93be6 --- /dev/null +++ b/ehcache-107/README.adoc @@ -0,0 +1,85 @@ += The Ehcache 3.x JSR-107 Provider + +== Getting started with Ehcache3 & JSR-107 + +There isn't anything special for you to do to use Ehcache3 as the caching provider for your application: add the +`ehcache-3.0.0.jar` to your application's classpath (possibly removing the previous provider's jar) and you are ready to +go: + +[source,java] +---- +CachingProvider provider = Caching.getCachingProvider(); // <1> +CacheManager cacheManager = provider.getCacheManager(); // <2> +---- +<1> Retrieves the default `CachingProvider`, this should be `org.ehcache.EhCachingProvider`, + which you can also force, by using the `Caching.getCachingProvider(String)` static method instead; +<2> Retrieve the default `CacheManager` instance using the provider. + +=== Using a specific configuration file + +You can also add a XML file that preconfigure `Cache` instances upfront. See +link:../xml/README.adoc[the XML README file] for more details on configuring `Cache` in XML. + +In order to pre-configure the `CacheManager` at creation time using the XML file, simply: + +[source,java] +---- +CachingProvider provider = Caching.getCachingProvider(); +CacheManager cacheManager = provider.getCacheManager( // <1> + this.getClass().getResource("/ehcache.xml").toURI(), // <2> + Customer.class.getClassLoader()); // <3> +---- +<1> Invoking `javax.cache.spi.CachingProvider.getCacheManager(java.net.URI, java.lang.ClassLoader)` +<2> where the first argument is an `URI` pointing to our XML configuration file, e.g. `ehcache.xml`; +<3> the second argument being the `ClassLoader` to use to load user-types if needed; i.e. `Class` instances that are + stored in the `Cache` managed by our `CacheManager`. + +NOTE: You can also use the `CachingProvider.getCacheManager()` method that doesn't take any argument instead. + The `URI` and `ClassLoader` used to configure the `CacheManager` will then use the + vendor specific values returned by `CachingProvider.getDefaultURI` and `.getDefaultClassLoader` respectively. + Be aware that these aren¹t entirely spec¹ed for Ehcache3 and may change in future releases! + +=== Supplement JSR-107's configurations + +You can also create `cache-templates` as of Ehcache3, see +link:../xml/README.adoc#__code_cache_template_code_elements[Cache Templates] section of the XML README file for more details. The Ehcache3 +JSR-107 Caching Provider comes with an extension to the regular XML configuration, so you can: + + . Configure a default template all programmatically created `Cache` instances will inherit from, and + . Configure a given named `Cache` to inherit from a specific template. + +This feature is particularly useful to configure `Cache` beyond the JSR-107 specification, e.g. giving them a capacity +constraint. All is needed is adding a jsr107 service in your XML configuration file: + +[source,xml] +---- + + + + + + + + + + java.lang.String + java.lang.String + 2000 + + + + 20 + + +---- +<1> First, declare a namespace for the 107 extension, e.g. `jsr107` +<2> Within a `service` element at the top of you configuration, add a `jsr107:defaults` element +<3> The element takes an optional attribute `default-template`, which references the `cache-template` to use for all + `javax.cache.Cache` created by the application at runtime using `javax.cache.CacheManager.createCache`. In + this example, the default `cache-template` used will be `tinyCache`, meaning that atop of their particular config, + programmatically created `Cache` instances will have their capacity constrained to 20 entries. +<4> Nested within the `jsr107:defaults`, add specific `cache-templates` to use for given named `Cache`, e.g. when + creating the `Cache` named `foos` at runtime, Ehcache will enhance its config, giving it a capacity of 2000 entries, + as well as insuring both key and value types are `String`. diff --git a/ehcache-107/build.gradle b/ehcache-107/build.gradle new file mode 100644 index 0000000000..035a2dd193 --- /dev/null +++ b/ehcache-107/build.gradle @@ -0,0 +1,102 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.internal-module' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 JSR-107 module' + description = 'The JSR-107 compatibility module of Ehcache 3' + } +} + +configurations { + tckTestClasses + + all { + resolutionStrategy { + dependencySubstitution { + substitute(module('junit:junit:4.11')) + .because('CVE-2020-15250') + .with(module('junit:junit:4.13.1')) + } + } + } +} + +sourceSets { + tckTest { + java.srcDir 'src/tck/java' + resources.srcDir 'src/tck/resources' + compileClasspath += sourceSets.test.compileClasspath + runtimeClasspath += sourceSets.test.runtimeClasspath + } +} + +dependencies { + api project(':ehcache-api') + api "javax.cache:cache-api:$parent.jcacheVersion" + + implementation project(':ehcache-impl') + implementation project(':ehcache-xml') + implementation "org.terracotta:statistics:$statisticVersion" + + compileOnly 'org.osgi:org.osgi.service.component.annotations:1.3.0' + + tckTestRuntimeOnly "javax.cache:cache-tests:$jcacheTckVersion" + tckTestClasses("javax.cache:cache-tests:$jcacheTckVersion:tests") { + transitive = false + } +} + +javadoc { + exclude '**/tck/**' +} + +jar { + bnd( + 'Export-Package': '!org.ehcache.jsr107.tck, !org.ehcache.jsr107.internal.*, org.ehcache.jsr107.*', + 'Import-Package': 'javax.cache.*;resolution:=optional, *', + ) +} + +task unpackTckTests(type: Sync) { + from { + configurations.tckTestClasses.collect {zipTree(it)} + } + into sourceSets.tckTest.java.outputDir +} + +task tckTest(type: Test, dependsOn: unpackTckTests) { + testClassesDirs = sourceSets.tckTest.output.classesDirs + classpath += sourceSets.tckTest.runtimeClasspath + + binResultsDir file("$buildDir/tck-tests-results/binary/$name") + reports.junitXml.destination = file("$buildDir/tck-tests-results") + reports.html.destination = file("$buildDir/reports/tck-tests") + + systemProperty 'java.net.preferIPv4Stack', 'true' + systemProperty 'javax.management.builder.initial', 'org.ehcache.jsr107.internal.tck.Eh107MBeanServerBuilder' + systemProperty 'org.jsr107.tck.management.agentId', 'Eh107MBeanServer' + systemProperty 'javax.cache.CacheManager', 'org.ehcache.CacheManager' + systemProperty 'javax.cache.Cache', 'org.ehcache.Cache' + systemProperty 'javax.cache.Cache.Entry', 'org.ehcache.Cache$Entry' + systemProperty 'javax.cache.annotation.CacheInvocationContext', 'javax.cache.annotation.impl.cdi.CdiCacheKeyInvocationContextImpl' +} + +test.dependsOn tckTest diff --git a/107/config/checkstyle-suppressions.xml b/ehcache-107/config/checkstyle-suppressions.xml similarity index 100% rename from 107/config/checkstyle-suppressions.xml rename to ehcache-107/config/checkstyle-suppressions.xml diff --git a/107/src/main/java/org/ehcache/jsr107/CacheResources.java b/ehcache-107/src/main/java/org/ehcache/jsr107/CacheResources.java similarity index 82% rename from 107/src/main/java/org/ehcache/jsr107/CacheResources.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/CacheResources.java index b1d71e7e1d..deb75016a8 100644 --- a/107/src/main/java/org/ehcache/jsr107/CacheResources.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/CacheResources.java @@ -15,16 +15,18 @@ */ package org.ehcache.jsr107; -import java.io.Closeable; import java.util.Collections; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; +import javax.cache.CacheException; import javax.cache.configuration.CacheEntryListenerConfiguration; import org.ehcache.jsr107.internal.Jsr107CacheLoaderWriter; +import static org.ehcache.jsr107.CloseUtil.closeAllAfter; + /** * @author teck */ @@ -66,9 +68,7 @@ synchronized ListenerResources registerCacheEntryListener(CacheEntryListen throw new IllegalArgumentException("listener config already registered"); } - MultiCacheException mce = new MultiCacheException(); - ListenerResources rv = ListenerResources.createListenerResources(listenerConfig, mce); - mce.throwIfNotEmpty(); + ListenerResources rv = ListenerResources.createListenerResources(listenerConfig); listenerResources.put(listenerConfig, rv); return rv; } @@ -86,33 +86,29 @@ synchronized ListenerResources deregisterCacheEntryListener(CacheEntryList if (resources == null) { return null; } - MultiCacheException mce = new MultiCacheException(); - close(resources, mce); - mce.throwIfNotEmpty(); + try { + CloseUtil.closeAll(resources); + } catch (Throwable t) { + throw new CacheException(t); + } return resources; } - synchronized void closeResources(MultiCacheException mce) { + synchronized void closeResources() { if (closed.compareAndSet(false, true)) { - close(expiryPolicy, mce); - close(cacheLoaderWriter, mce); - for (ListenerResources lr : listenerResources.values()) { - close(lr, mce); + try { + CloseUtil.closeAll(expiryPolicy, cacheLoaderWriter, listenerResources.values()); + } catch (Throwable t) { + throw new CacheException(t); } } } - boolean isClosed() { - return closed.get(); - } - - static void close(Object obj, MultiCacheException mce) { - if (obj instanceof Closeable) { - try { - ((Closeable) obj).close(); - } catch (Throwable t) { - mce.addThrowable(t); - } + synchronized CacheException closeResourcesAfter(CacheException exception) { + if (closed.compareAndSet(false, true)) { + return closeAllAfter(exception, expiryPolicy, cacheLoaderWriter, listenerResources.values()); + } else { + return exception; } } } diff --git a/ehcache-107/src/main/java/org/ehcache/jsr107/CloseUtil.java b/ehcache-107/src/main/java/org/ehcache/jsr107/CloseUtil.java new file mode 100644 index 0000000000..e2c350b478 --- /dev/null +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/CloseUtil.java @@ -0,0 +1,86 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.jsr107; + +import java.io.Closeable; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Optional; +import java.util.stream.Stream; + +public class CloseUtil { + public static T closeAllAfter(T failure, Object ... objects) { + Optional closeable = extractCloseables(Stream.of(objects)).reduce(CloseUtil::composeCloseables); + if (closeable.isPresent()) { + try { + closeable.get().close(); + } catch (Throwable t) { + failure.addSuppressed(t); + } + } + return failure; + } + + static void closeAll(Object ... objects) throws IOException { + closeAll(Stream.of(objects)); + } + + static void closeAll(Stream objects) throws IOException { + chain(extractCloseables(objects)); + } + + static void chain(Closeable ... objects) throws IOException { + chain(Stream.of(objects)); + } + + public static void chain(Stream objects) throws IOException { + Optional closeable = objects.reduce(CloseUtil::composeCloseables); + if (closeable.isPresent()) { + closeable.get().close(); + } + } + + private static Stream extractCloseables(Stream objects) { + return objects.filter(o -> o != null).flatMap(o -> { + if (o instanceof Collection) { + return ((Collection) o).stream(); + } else if (o.getClass().isArray()) { + return Arrays.stream((Object[]) o); + } else { + return Stream.of(o); + } + }).filter(o -> o != null).filter(Closeable.class::isInstance).map(Closeable.class::cast); + } + + private static Closeable composeCloseables(Closeable a, Closeable b) { + return () -> { + try { + a.close(); + } catch (Throwable t1) { + try { + b.close(); + } catch (Throwable t2) { + t1.addSuppressed(t2); + } + throw t1; + } + b.close(); + }; + } + +} diff --git a/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java b/ehcache-107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java similarity index 84% rename from 107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java index ec8db61e07..2987736314 100644 --- a/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java @@ -23,22 +23,22 @@ import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; -import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.jsr107.config.ConfigurationElementState; import org.ehcache.jsr107.config.Jsr107CacheConfiguration; -import org.ehcache.jsr107.config.Jsr107Service; import org.ehcache.jsr107.internal.Jsr107CacheLoaderWriter; -import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterConfiguration; import org.ehcache.xml.XmlConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import javax.cache.CacheException; import javax.cache.configuration.CacheEntryListenerConfiguration; import javax.cache.configuration.CompleteConfiguration; import javax.cache.configuration.Configuration; @@ -49,6 +49,7 @@ import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; +import static org.ehcache.jsr107.CloseUtil.closeAllAfter; /** * ConfigurationMerger @@ -81,7 +82,7 @@ ConfigHolder mergeConfigurations(String cacheName, Configuration templateBuilder = null; + CacheConfigurationBuilder templateBuilder; try { templateBuilder = xmlConfiguration.newCacheConfigurationBuilderFromTemplate(templateName, jsr107Configuration.getKeyType(), jsr107Configuration.getValueType()); @@ -106,11 +107,11 @@ ConfigHolder mergeConfigurations(String cacheName, Configuration ehcacheLoaderWriterConfiguration = builder.getService(DefaultCacheLoaderWriterConfiguration.class); if (ehcacheLoaderWriterConfiguration == null) { useEhcacheLoaderWriter = false; // No template loader/writer - let's activate the JSR-107 one if any - loaderWriter = initCacheLoaderWriter(jsr107Configuration, new MultiCacheException()); + loaderWriter = initCacheLoaderWriter(jsr107Configuration); if (loaderWriter != null && (jsr107Configuration.isReadThrough() || jsr107Configuration.isWriteThrough())) { cacheLoaderWriterFactory.registerJsr107Loader(cacheName, loaderWriter); } @@ -127,7 +128,7 @@ ConfigHolder mergeConfigurations(String cacheName, Configuration(cacheConfiguration.getExpiry()); + expiryPolicy = new EhcacheExpiryWrapper<>(cacheConfiguration.getExpiryPolicy()); } return new ConfigHolder<>( @@ -135,42 +136,35 @@ ConfigHolder mergeConfigurations(String cacheName, Configuration(jsr107Configuration, cacheConfiguration, hasConfiguredExpiry, useEhcacheLoaderWriter), cacheConfiguration, useEhcacheLoaderWriter); } catch (Throwable throwable) { - MultiCacheException mce = new MultiCacheException(); - CacheResources.close(expiryPolicy, mce); - CacheResources.close(loaderWriter, mce); - if (throwable instanceof IllegalArgumentException) { - String message = throwable.getMessage(); - if (mce.getMessage() != null) { - message = message + "\nSuppressed " + mce.getMessage(); - } - throw new IllegalArgumentException(message, throwable); + throw closeAllAfter((IllegalArgumentException) throwable, expiryPolicy, loaderWriter); + } else { + throw closeAllAfter(new CacheException(throwable), expiryPolicy, loaderWriter); } - mce.addFirstThrowable(throwable); - throw mce; } } private CacheConfigurationBuilder handleStoreByValue(Eh107CompleteConfiguration jsr107Configuration, CacheConfigurationBuilder builder, String cacheName) { - DefaultCopierConfiguration copierConfig = builder.getExistingServiceConfiguration(DefaultCopierConfiguration.class); - if(copierConfig == null) { + @SuppressWarnings("unchecked") + Collection> copierConfigs = builder.getServices((Class>) (Class) DefaultCopierConfiguration.class); + if(copierConfigs.isEmpty()) { if(jsr107Configuration.isStoreByValue()) { if (xmlConfiguration != null) { DefaultCopyProviderConfiguration defaultCopyProviderConfiguration = findSingletonAmongst(DefaultCopyProviderConfiguration.class, - xmlConfiguration.getServiceCreationConfigurations().toArray()); + xmlConfiguration.getServiceCreationConfigurations()); if (defaultCopyProviderConfiguration != null) { - Map, ClassInstanceConfiguration>> defaults = defaultCopyProviderConfiguration.getDefaults(); + Map, DefaultCopierConfiguration> defaults = defaultCopyProviderConfiguration.getDefaults(); handleCopierDefaultsforImmutableTypes(defaults); boolean matchingDefault = false; if (defaults.containsKey(jsr107Configuration.getKeyType())) { matchingDefault = true; } else { - builder = builder.add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); + builder = builder.withService(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); } if (defaults.containsKey(jsr107Configuration.getValueType())) { matchingDefault = true; } else { - builder = builder.add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); + builder = builder.withService(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); } if (matchingDefault) { LOG.info("CacheManager level copier configuration overwriting JSR-107 by-value semantics for cache {}", cacheName); @@ -188,8 +182,8 @@ private CacheConfigurationBuilder handleStoreByValue(Eh107CompleteC } @SuppressWarnings("unchecked") - private static CacheConfigurationBuilder addDefaultCopiers(CacheConfigurationBuilder builder, Class keyType, Class valueType ) { - Set immutableTypes = new HashSet<>(); + private static CacheConfigurationBuilder addDefaultCopiers(CacheConfigurationBuilder builder, Class keyType, Class valueType ) { + Set> immutableTypes = new HashSet<>(); immutableTypes.add(String.class); immutableTypes.add(Long.class); immutableTypes.add(Float.class); @@ -197,20 +191,20 @@ private static CacheConfigurationBuilder addDefaultCopiers(CacheCon immutableTypes.add(Character.class); immutableTypes.add(Integer.class); if (immutableTypes.contains(keyType)) { - builder = builder.add(new DefaultCopierConfiguration((Class)Eh107IdentityCopier.class, DefaultCopierConfiguration.Type.KEY)); + builder = builder.withService(new DefaultCopierConfiguration((Class)Eh107IdentityCopier.class, DefaultCopierConfiguration.Type.KEY)); } else { - builder = builder.add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); + builder = builder.withService(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); } if (immutableTypes.contains(valueType)) { - builder = builder.add(new DefaultCopierConfiguration((Class)Eh107IdentityCopier.class, DefaultCopierConfiguration.Type.VALUE)); + builder = builder.withService(new DefaultCopierConfiguration((Class)Eh107IdentityCopier.class, DefaultCopierConfiguration.Type.VALUE)); } else { - builder = builder.add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); + builder = builder.withService(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); } return builder; } - private static void handleCopierDefaultsforImmutableTypes(Map, ClassInstanceConfiguration>> defaults) { + private static void handleCopierDefaultsforImmutableTypes(Map, DefaultCopierConfiguration> defaults) { addIdentityCopierIfNoneRegistered(defaults, Long.class); addIdentityCopierIfNoneRegistered(defaults, Integer.class); addIdentityCopierIfNoneRegistered(defaults, String.class); @@ -219,8 +213,8 @@ private static void handleCopierDefaultsforImmutableTypes(Map, ClassIns addIdentityCopierIfNoneRegistered(defaults, Character.class); } - @SuppressWarnings("unchecked") - private static void addIdentityCopierIfNoneRegistered(Map, ClassInstanceConfiguration>> defaults, Class clazz) { + @SuppressWarnings({"rawtypes", "unchecked"}) + private static void addIdentityCopierIfNoneRegistered(Map, DefaultCopierConfiguration> defaults, Class clazz) { if (!defaults.containsKey(clazz)) { defaults.put(clazz, new DefaultCopierConfiguration(Eh107IdentityCopier.class, DefaultCopierConfiguration.Type.VALUE)); } @@ -228,9 +222,8 @@ private static void addIdentityCopierIfNoneRegistered(Map, ClassInstanc private Map, ListenerResources> initCacheEventListeners(CompleteConfiguration config) { Map, ListenerResources> listenerResources = new ConcurrentHashMap<>(); - MultiCacheException mce = new MultiCacheException(); for (CacheEntryListenerConfiguration listenerConfig : config.getCacheEntryListenerConfigurations()) { - listenerResources.put(listenerConfig, ListenerResources.createListenerResources(listenerConfig, mce)); + listenerResources.put(listenerConfig, ListenerResources.createListenerResources(listenerConfig)); } return listenerResources; } @@ -239,7 +232,7 @@ private Eh107Expiry initExpiryPolicy(CompleteConfiguration co return new ExpiryPolicyToEhcacheExpiry<>(config.getExpiryPolicyFactory().create()); } - private Jsr107CacheLoaderWriter initCacheLoaderWriter(CompleteConfiguration config, MultiCacheException mce) { + private Jsr107CacheLoaderWriter initCacheLoaderWriter(CompleteConfiguration config) { Factory> cacheLoaderFactory = config.getCacheLoaderFactory(); @SuppressWarnings("unchecked") Factory> cacheWriterFactory = (Factory>) (Object) config.getCacheWriterFactory(); @@ -256,11 +249,7 @@ private Jsr107CacheLoaderWriter initCacheLoaderWriter(CompleteConfi try { cacheWriter = cacheWriterFactory == null ? null : cacheWriterFactory.create(); } catch (Throwable t) { - if (t != mce) { - mce.addThrowable(t); - } - CacheResources.close(cacheLoader, mce); - throw mce; + throw closeAllAfter(new CacheException(t), cacheLoader); } if (cacheLoader == null && cacheWriter == null) { diff --git a/107/src/main/java/org/ehcache/jsr107/DefaultConfigurationResolver.java b/ehcache-107/src/main/java/org/ehcache/jsr107/DefaultConfigurationResolver.java similarity index 94% rename from 107/src/main/java/org/ehcache/jsr107/DefaultConfigurationResolver.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/DefaultConfigurationResolver.java index 0db6470554..a4e89bf738 100644 --- a/107/src/main/java/org/ehcache/jsr107/DefaultConfigurationResolver.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/DefaultConfigurationResolver.java @@ -27,9 +27,7 @@ class DefaultConfigurationResolver { static final String DEFAULT_CONFIG_PROPERTY_NAME = "ehcache.jsr107.config.default"; static URI resolveConfigURI(Properties cacheManagerProperties) { - Object config = null; - - config = cacheManagerProperties.get(DEFAULT_CONFIG_PROPERTY_NAME); + Object config = cacheManagerProperties.get(DEFAULT_CONFIG_PROPERTY_NAME); if (config == null) { config = System.getProperties().get(DEFAULT_CONFIG_PROPERTY_NAME); diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107Cache.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107Cache.java similarity index 91% rename from 107/src/main/java/org/ehcache/jsr107/Eh107Cache.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/Eh107Cache.java index a639c48752..9595a55cff 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107Cache.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107Cache.java @@ -17,7 +17,6 @@ import org.ehcache.core.InternalCache; import org.ehcache.Status; -import org.ehcache.UserManagedCache; import org.ehcache.core.Jsr107Cache; import org.ehcache.core.spi.service.StatisticsService; import org.ehcache.event.EventFiring; @@ -36,11 +35,9 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; import javax.cache.Cache; +import javax.cache.CacheException; import javax.cache.CacheManager; import javax.cache.configuration.CacheEntryListenerConfiguration; import javax.cache.configuration.Configuration; @@ -68,7 +65,7 @@ class Eh107Cache implements Cache { private final Jsr107CacheLoaderWriter cacheLoaderWriter; Eh107Cache(String name, Eh107Configuration config, CacheResources cacheResources, - InternalCache ehCache, Eh107CacheManager cacheManager) { + InternalCache ehCache, StatisticsService statisticsService, Eh107CacheManager cacheManager) { this.cacheLoaderWriter = cacheResources.getCacheLoaderWriter(); this.config = config; this.ehCache = ehCache; @@ -76,15 +73,14 @@ class Eh107Cache implements Cache { this.name = name; this.cacheResources = cacheResources; this.managementBean = new Eh107CacheMXBean(name, cacheManager.getURI(), config); - this.statisticsBean = new Eh107CacheStatisticsMXBean(name, cacheManager.getURI(), - cacheManager.getEhCacheManager().getServiceProvider().getService(StatisticsService.class)); + this.statisticsBean = new Eh107CacheStatisticsMXBean(name, cacheManager.getURI(), statisticsService); for (Map.Entry, ListenerResources> entry : cacheResources .getListenerResources().entrySet()) { registerEhcacheListeners(entry.getKey(), entry.getValue()); } - this.jsr107Cache = ehCache.getJsr107Cache(); + this.jsr107Cache = ehCache.createJsr107Cache(); } @Override @@ -138,14 +134,7 @@ public void loadAll(Set keys, boolean replaceExistingValues, Comple jsr107Cache.loadAll(keys, replaceExistingValues, this::loadAllFunction); } catch (Exception e) { final CacheLoaderException cle; - if (e instanceof CacheLoaderException) { - cle = (CacheLoaderException) e; - } else if (e.getCause() instanceof CacheLoaderException) { - cle = (CacheLoaderException) e.getCause(); - } else { - cle = new CacheLoaderException(e); - } - + cle = getCacheLoaderException(e); completionListener.onException(cle); return; } @@ -153,6 +142,18 @@ public void loadAll(Set keys, boolean replaceExistingValues, Comple completionListener.onCompletion(); } + private CacheLoaderException getCacheLoaderException(Exception e) { + CacheLoaderException cle; + if (e instanceof CacheLoaderException) { + cle = (CacheLoaderException) e; + } else if (e.getCause() instanceof CacheLoaderException) { + cle = (CacheLoaderException) e.getCause(); + } else { + cle = new CacheLoaderException(e); + } + return cle; + } + private Map loadAllFunction(Iterable keysIterable) { try { Map loadResult = cacheLoaderWriter.loadAllAlways(keysIterable); @@ -162,15 +163,7 @@ private Map loadAllFunction(Iterable keysIterable) { } return resultMap; } catch (Exception e) { - final CacheLoaderException cle; - if (e instanceof CacheLoaderException) { - cle = (CacheLoaderException) e; - } else if (e.getCause() instanceof CacheLoaderException) { - cle = (CacheLoaderException) e.getCause(); - } else { - cle = new CacheLoaderException(e); - } - + CacheLoaderException cle = getCacheLoaderException(e); throw cle; } } @@ -422,9 +415,7 @@ public CacheManager getCacheManager() { @Override public void close() { - MultiCacheException closeException = new MultiCacheException(); - cacheManager.close(this, closeException); - closeException.throwIfNotEmpty(); + cacheManager.close(this); } @Override @@ -432,35 +423,27 @@ public boolean isClosed() { return syncedIsClose(); } - void closeInternal(MultiCacheException closeException) { - closeInternal(false, closeException); + CacheException closeInternalAfter(CacheException failure) { + if (hypotheticallyClosed.compareAndSet(false, true)) { + return cacheResources.closeResourcesAfter(failure); + } else { + return failure; + } } - private void closeInternal(boolean destroy, MultiCacheException closeException) { + void closeInternal() { if (hypotheticallyClosed.compareAndSet(false, true)) { - if (destroy) { - try { - clear(false); - } catch (Throwable t) { - closeException.addThrowable(t); - } - } - - cacheResources.closeResources(closeException); + cacheResources.closeResources(); } } private boolean syncedIsClose() { - if (((UserManagedCache)ehCache).getStatus() == Status.UNINITIALIZED && !hypotheticallyClosed.get()) { + if (ehCache.getStatus() == Status.UNINITIALIZED && !hypotheticallyClosed.get()) { close(); } return hypotheticallyClosed.get(); } - void destroy(MultiCacheException destroyException) { - closeInternal(true, destroyException); - } - @Override public T unwrap(Class clazz) { return Unwrap.unwrap(clazz, this, ehCache); @@ -622,8 +605,8 @@ public T unwrap(Class clazz) { } } - private static enum MutableEntryOperation { - NONE, ACCESS, CREATE, LOAD, REMOVE, UPDATE; + private enum MutableEntryOperation { + NONE, ACCESS, CREATE, LOAD, REMOVE, UPDATE } private static final Object UNDEFINED = new Object(); diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java similarity index 94% rename from 107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java index 85d1725031..dc08b2255c 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java @@ -63,6 +63,8 @@ public boolean isOldValueAvailable() { static class NormalEvent extends Eh107CacheEntryEvent { + private static final long serialVersionUID = 1566947833363986792L; + public NormalEvent(Cache source, EventType eventType, CacheEvent ehEvent, boolean hasOldValue) { super(source, eventType, ehEvent, hasOldValue); } @@ -75,6 +77,8 @@ public V getValue() { static class RemovingEvent extends Eh107CacheEntryEvent { + private static final long serialVersionUID = -1363817518693572909L; + public RemovingEvent(Cache source, EventType eventType, CacheEvent ehEvent, boolean hasOldValue) { super(source, eventType, ehEvent, hasOldValue); } diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheLoaderWriter.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheLoaderWriter.java similarity index 91% rename from 107/src/main/java/org/ehcache/jsr107/Eh107CacheLoaderWriter.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheLoaderWriter.java index 815b9315a3..5e9df94dab 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheLoaderWriter.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheLoaderWriter.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collection; -import static java.util.Collections.emptyMap; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -30,9 +29,10 @@ import javax.cache.integration.CacheWriter; import org.ehcache.jsr107.internal.Jsr107CacheLoaderWriter; -import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; import org.ehcache.spi.loaderwriter.BulkCacheWritingException; +import static java.util.Collections.emptyMap; + /** * @author teck */ @@ -53,7 +53,7 @@ class Eh107CacheLoaderWriter implements Jsr107CacheLoaderWriter, Clo } @Override - public V load(K key) throws Exception { + public V load(K key) { if (readThrough) { return cacheLoader.load(key); } else { @@ -62,7 +62,7 @@ public V load(K key) throws Exception { } @Override - public Map loadAll(Iterable keys) throws Exception { + public Map loadAll(Iterable keys) { if (readThrough) { return loadAllAlways(keys); } else { @@ -71,7 +71,7 @@ public Map loadAll(Iterable keys) throws Exception { } @Override - public Map loadAllAlways(Iterable keys) throws BulkCacheLoadingException, Exception { + public Map loadAllAlways(Iterable keys) { if (cacheLoader == null) { return emptyMap(); } else { @@ -80,21 +80,21 @@ public Map loadAllAlways(Iterable keys) throws BulkCacheLoadi } @Override - public void write(K key, V value) throws Exception { + public void write(K key, V value) { if (cacheWriter != null) { cacheWriter.write(cacheEntryFor(key, value)); } } @Override - public void delete(K key) throws Exception { + public void delete(K key) { if (cacheWriter != null) { cacheWriter.delete(key); } } @Override - public void deleteAll(Iterable keys) throws Exception { + public void deleteAll(Iterable keys) throws BulkCacheWritingException { if (cacheWriter != null) { Set allKeys = new HashSet<>(); for (K key : keys) { @@ -131,7 +131,7 @@ private Map failures(Set keys, Exception e) { } @Override - public void writeAll(Iterable> entries) throws Exception { + public void writeAll(Iterable> entries) { if (cacheWriter != null) { Collection> toWrite = new ArrayList<>(); for (Map.Entry entry : entries) { diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheLoaderWriterProvider.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheLoaderWriterProvider.java similarity index 91% rename from 107/src/main/java/org/ehcache/jsr107/Eh107CacheLoaderWriterProvider.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheLoaderWriterProvider.java index 3738fdb10a..2fd305cd27 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheLoaderWriterProvider.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheLoaderWriterProvider.java @@ -45,12 +45,13 @@ public CacheLoaderWriter createCacheLoaderWriter(String ali } @Override - public void releaseCacheLoaderWriter(CacheLoaderWriter cacheLoaderWriter) { - // + public void releaseCacheLoaderWriter(String alias, CacheLoaderWriter cacheLoaderWriter) { + deregisterJsrLoaderForCache(alias); } void registerJsr107Loader(String alias, CacheLoaderWriter cacheLoaderWriter) { CacheLoaderWriter prev = cacheLoaderWriters.putIfAbsent(alias, cacheLoaderWriter); + registerJsrLoaderForCache(alias); if (prev != null) { throw new IllegalStateException("loader already registered for [" + alias + "]"); } diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheMXBean.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheMXBean.java similarity index 100% rename from 107/src/main/java/org/ehcache/jsr107/Eh107CacheMXBean.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheMXBean.java diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java similarity index 77% rename from 107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java index 1b4e308765..92a15e4397 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java @@ -15,9 +15,12 @@ */ package org.ehcache.jsr107; +import org.ehcache.CachePersistenceException; +import org.ehcache.PersistentCacheManager; import org.ehcache.Status; import org.ehcache.config.CacheConfiguration; import org.ehcache.core.InternalCache; +import org.ehcache.core.spi.service.StatisticsService; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.jsr107.internal.Jsr107CacheLoaderWriter; @@ -27,6 +30,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.Closeable; +import java.io.IOException; import java.lang.management.ManagementFactory; import java.net.URI; import java.util.ArrayList; @@ -45,6 +50,9 @@ import javax.management.InstanceNotFoundException; import javax.management.MBeanServer; +import static org.ehcache.jsr107.CloseUtil.chain; +import static org.ehcache.jsr107.CloseUtil.closeAll; + /** * @author teck */ @@ -52,33 +60,31 @@ class Eh107CacheManager implements CacheManager { private static final Logger LOG = LoggerFactory.getLogger(Eh107CacheManager.class); - private static MBeanServer MBEAN_SERVER = ManagementFactory.getPlatformMBeanServer(); + private static final MBeanServer MBEAN_SERVER = ManagementFactory.getPlatformMBeanServer(); private final Object cachesLock = new Object(); private final ConcurrentMap> caches = new ConcurrentHashMap<>(); - private final Eh107InternalCacheManager ehCacheManager; + private final org.ehcache.CacheManager ehCacheManager; private final EhcacheCachingProvider cachingProvider; private final ClassLoader classLoader; private final URI uri; private final Properties props; private final ConfigurationMerger configurationMerger; + private final StatisticsService statisticsService; - Eh107CacheManager(EhcacheCachingProvider cachingProvider, Eh107InternalCacheManager ehCacheManager, Properties props, - ClassLoader classLoader, URI uri, ConfigurationMerger configurationMerger) { + Eh107CacheManager(EhcacheCachingProvider cachingProvider, org.ehcache.CacheManager ehCacheManager, Jsr107Service jsr107Service, + Properties props, ClassLoader classLoader, URI uri, ConfigurationMerger configurationMerger) { this.cachingProvider = cachingProvider; this.ehCacheManager = ehCacheManager; this.props = props; this.classLoader = classLoader; this.uri = uri; this.configurationMerger = configurationMerger; + this.statisticsService = jsr107Service.getStatistics(); refreshAllCaches(); } - Eh107InternalCacheManager getEhCacheManager() { - return ehCacheManager; - } - private void refreshAllCaches() { for (Map.Entry> entry : ehCacheManager.getRuntimeConfiguration().getCacheConfigurations().entrySet()) { String name = entry.getKey(); @@ -109,7 +115,7 @@ private Eh107Cache wrapEhcacheCache(String alias, InternalCache cacheLoaderWriter = cache.getCacheLoaderWriter(); boolean storeByValueOnHeap = false; - for (ServiceConfiguration serviceConfiguration : cache.getRuntimeConfiguration().getServiceConfigurations()) { + for (ServiceConfiguration serviceConfiguration : cache.getRuntimeConfiguration().getServiceConfigurations()) { if (serviceConfiguration instanceof DefaultCopierConfiguration) { DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration) serviceConfiguration; if(!copierConfig.getClazz().isAssignableFrom(IdentityCopier.class)) @@ -119,9 +125,9 @@ private Eh107Cache wrapEhcacheCache(String alias, InternalCache config = new Eh107ReverseConfiguration<>(cache, cacheLoaderWriter != null, cacheLoaderWriter != null, storeByValueOnHeap); configurationMerger.setUpManagementAndStats(cache, config); - Eh107Expiry expiry = new EhcacheExpiryWrapper<>(cache.getRuntimeConfiguration().getExpiry()); + Eh107Expiry expiry = new EhcacheExpiryWrapper<>(cache.getRuntimeConfiguration().getExpiryPolicy()); CacheResources resources = new CacheResources<>(alias, wrapCacheLoaderWriter(cacheLoaderWriter), expiry); - return new Eh107Cache<>(alias, config, resources, cache, this); + return new Eh107Cache<>(alias, config, resources, cache, statisticsService, this); } private Jsr107CacheLoaderWriter wrapCacheLoaderWriter(CacheLoaderWriter cacheLoaderWriter) { @@ -162,6 +168,7 @@ public > Cache createCache(String cach synchronized (cachesLock) { if (config instanceof Eh107Configuration.Eh107ConfigurationWrapper) { + @SuppressWarnings("unchecked") Eh107Configuration.Eh107ConfigurationWrapper configurationWrapper = (Eh107Configuration.Eh107ConfigurationWrapper)config; CacheConfiguration unwrap = configurationWrapper.getCacheConfiguration(); final org.ehcache.Cache ehcache; @@ -193,14 +200,10 @@ public > Cache createCache(String cach try { ehCache = (InternalCache)ehCacheManager.createCache(cacheName, configHolder.cacheConfiguration); } catch (IllegalArgumentException e) { - MultiCacheException mce = new MultiCacheException(e); - configHolder.cacheResources.closeResources(mce); - throw new CacheException("A Cache named [" + cacheName + "] already exists", mce); + throw configHolder.cacheResources.closeResourcesAfter(new CacheException("A Cache named [" + cacheName + "] already exists")); } catch (Throwable t) { // something went wrong in ehcache land, make sure to clean up our stuff - MultiCacheException mce = new MultiCacheException(t); - configHolder.cacheResources.closeResources(mce); - throw mce; + throw configHolder.cacheResources.closeResourcesAfter(new CacheException(t)); } Eh107Cache cache = null; @@ -211,7 +214,7 @@ public > Cache createCache(String cach cacheResources.getExpiryPolicy(), cacheResources.getListenerResources()); } cache = new Eh107Cache<>(cacheName, new Eh107CompleteConfiguration<>(configHolder.jsr107Configuration, ehCache - .getRuntimeConfiguration()), cacheResources, ehCache, this); + .getRuntimeConfiguration()), cacheResources, ehCache, statisticsService, this); caches.put(cacheName, cache); @@ -225,13 +228,11 @@ public > Cache createCache(String cach return cache; } catch (Throwable t) { - MultiCacheException mce = new MultiCacheException(t); if (cache != null) { - cache.closeInternal(mce); + throw cache.closeInternalAfter(new CacheException(t)); } else { - cacheResources.closeResources(mce); + throw cacheResources.closeResourcesAfter(new CacheException(t)); } - throw mce; } } } @@ -286,18 +287,7 @@ public Cache getCache(String cacheName) { throw new NullPointerException(); } - Eh107Cache cache = safeCacheRetrieval(cacheName); - - if (cache == null) { - return null; - } - - if (cache.getConfiguration(Configuration.class).getKeyType() != Object.class - || cache.getConfiguration(Configuration.class).getValueType() != Object.class) { - throw new IllegalArgumentException("Cache [" + cacheName - + "] specifies key/value types. Use getCache(String, Class, Class)"); - } - return cache; + return safeCacheRetrieval(cacheName); } @SuppressWarnings("unchecked") @@ -311,6 +301,7 @@ private Eh107Cache safeCacheRetrieval(final String cacheName) { @Override public Iterable getCacheNames() { + checkClosed(); refreshAllCaches(); return Collections.unmodifiableList(new ArrayList<>(caches.keySet())); } @@ -321,7 +312,6 @@ public void destroyCache(String cacheName) { throw new NullPointerException(); } - MultiCacheException destroyException = new MultiCacheException(); synchronized (cachesLock) { checkClosed(); @@ -333,27 +323,25 @@ public void destroyCache(String cacheName) { } try { - enableManagement(cache, false); - } catch (Throwable t) { - destroyException.addThrowable(t); - } - - try { - enableStatistics(cache, false); - } catch (Throwable t) { - destroyException.addThrowable(t); - } - - cache.destroy(destroyException); - - try { - ehCacheManager.removeCache(cache.getName()); + chain( + () -> enableManagement(cache, false), + () -> enableStatistics(cache, false), + () -> cache.closeInternal(), + () -> ehCacheManager.removeCache(cache.getName()), + () -> { + if (ehCacheManager instanceof PersistentCacheManager) { + try { + ((PersistentCacheManager) ehCacheManager).destroyCache(cache.getName()); + } catch (CachePersistenceException t) { + throw new IOException(t); + } + } + } + ); } catch (Throwable t) { - destroyException.addThrowable(t); + throw new CacheException(t); } } - - destroyException.throwIfNotEmpty(); } @Override @@ -449,68 +437,30 @@ public T unwrap(Class clazz) { @Override public void close() { - MultiCacheException closeException = new MultiCacheException(); - cachingProvider.close(this, closeException); - closeException.throwIfNotEmpty(); + cachingProvider.close(this); } - void closeInternal(MultiCacheException closeException) { - try { - synchronized (cachesLock) { - for (Eh107Cache cache : caches.values()) { - try { - close(cache, closeException); - } catch (Throwable t) { - closeException.addThrowable(t); - } - } - - try { - caches.clear(); - } catch (Throwable t) { - closeException.addThrowable(t); - } - - try { - ehCacheManager.close(); - } catch (Throwable t) { - closeException.addThrowable(t); - } + void closeInternal() { + synchronized (cachesLock) { + try { + closeAll(caches.values(), (Closeable) caches::clear, ehCacheManager); + } catch (IOException e) { + throw new CacheException(e); } - } catch (Throwable t) { - closeException.addThrowable(t); } } - void close(Eh107Cache cache, MultiCacheException closeException) { - try { - if (caches.remove(cache.getName(), cache)) { - try { - unregisterObject(cache.getManagementMBean()); - } catch (Throwable t) { - closeException.addThrowable(t); - } - - try { - unregisterObject(cache.getStatisticsMBean()); - } catch (Throwable t) { - closeException.addThrowable(t); - } - - try { - cache.closeInternal(closeException); - } catch (Throwable t) { - closeException.addThrowable(t); - } - - try { - ehCacheManager.removeCache(cache.getName()); - } catch (Throwable t) { - closeException.addThrowable(t); - } + void close(Eh107Cache cache) { + if (caches.remove(cache.getName(), cache)) { + try { + chain( + () -> unregisterObject(cache.getManagementMBean()), + () -> unregisterObject(cache.getStatisticsMBean()), + () -> cache.closeInternal(), + () -> ehCacheManager.removeCache(cache.getName())); + } catch (Throwable t) { + throw new CacheException(t); } - } catch (Throwable t) { - closeException.addThrowable(t); } } } diff --git a/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java new file mode 100644 index 0000000000..a2d94cd83b --- /dev/null +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java @@ -0,0 +1,116 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.jsr107; + +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.statistics.CacheOperationOutcomes; +import org.ehcache.core.statistics.CacheStatistics; +import org.ehcache.jsr107.internal.Jsr107LatencyMonitor; + +import java.net.URI; + +/** + * @author Ludovic Orban + */ +class Eh107CacheStatisticsMXBean extends Eh107MXBean implements javax.cache.management.CacheStatisticsMXBean { + + private final CacheStatistics cacheStatistics; + + private final Jsr107LatencyMonitor averageGetTime; + private final Jsr107LatencyMonitor averagePutTime; + private final Jsr107LatencyMonitor averageRemoveTime; + + Eh107CacheStatisticsMXBean(String cacheName, URI cacheManagerURI, StatisticsService statisticsService) { + super(cacheName, cacheManagerURI, "CacheStatistics"); + + cacheStatistics = statisticsService.getCacheStatistics(cacheName); + + averageGetTime = registerDerivedStatistics(CacheOperationOutcomes.GetOutcome.class, "get"); + averagePutTime = registerDerivedStatistics(CacheOperationOutcomes.PutOutcome.class, "put"); + averageRemoveTime = registerDerivedStatistics(CacheOperationOutcomes.RemoveOutcome.class, "remove"); + } + + private > Jsr107LatencyMonitor registerDerivedStatistics(Class outcome, String name) { + Jsr107LatencyMonitor monitor = new Jsr107LatencyMonitor<>(outcome); + CacheStatistics cacheStatistics = this.cacheStatistics; + cacheStatistics.registerDerivedStatistic(outcome, name, monitor); + return monitor; + } + + @Override + public void clear() { + cacheStatistics.clear(); + averageGetTime.clear(); + averagePutTime.clear(); + averageRemoveTime.clear(); + } + + @Override + public long getCacheHits() { + return cacheStatistics.getCacheHits(); + } + + @Override + public float getCacheHitPercentage() { + return cacheStatistics.getCacheHitPercentage(); + } + + @Override + public long getCacheMisses() { + return cacheStatistics.getCacheMisses(); + } + + @Override + public float getCacheMissPercentage() { + return cacheStatistics.getCacheMissPercentage(); + } + + @Override + public long getCacheGets() { + return cacheStatistics.getCacheGets(); + } + + @Override + public long getCachePuts() { + return cacheStatistics.getCachePuts(); + } + + @Override + public long getCacheRemovals() { + return cacheStatistics.getCacheRemovals(); + } + + @Override + public long getCacheEvictions() { + return cacheStatistics.getCacheEvictions(); + } + + @Override + public float getAverageGetTime() { + return (float) averageGetTime.average(); + } + + @Override + public float getAveragePutTime() { + return (float) averagePutTime.average(); + } + + @Override + public float getAverageRemoveTime() { + return (float) averageRemoveTime.average(); + } + +} diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java similarity index 96% rename from 107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java index 5dcfe8c74b..0dba47507c 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java @@ -20,7 +20,6 @@ import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.spi.service.ServiceConfiguration; -import java.io.ObjectStreamException; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -110,8 +109,8 @@ public Eh107CompleteConfiguration(Configuration config, final CacheConfigu private static boolean isStoreByValue(Configuration config, CacheConfiguration ehcacheConfig) { if(ehcacheConfig != null) { - Collection> serviceConfigurations = ehcacheConfig.getServiceConfigurations(); - for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { + Collection> serviceConfigurations = ehcacheConfig.getServiceConfigurations(); + for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { if (serviceConfiguration instanceof DefaultCopierConfiguration) { DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration)serviceConfiguration; if(copierConfig.getType().equals(DefaultCopierConfiguration.Type.VALUE)) { @@ -207,7 +206,7 @@ public T unwrap(Class clazz) { return Unwrap.unwrap(clazz, this, ehcacheConfig); } - private Object writeReplace() throws ObjectStreamException { + private Object writeReplace() { throw new UnsupportedOperationException("Serialization of Ehcache provider configuration classes is not supported"); } diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107Configuration.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107Configuration.java similarity index 95% rename from 107/src/main/java/org/ehcache/jsr107/Eh107Configuration.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/Eh107Configuration.java index 5c85f79fd6..0b8e8f8f4f 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107Configuration.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107Configuration.java @@ -118,8 +118,7 @@ public Class getValueType() { @Override public boolean isStoreByValue() { - Collection copierConfig = findAmongst(DefaultCopierConfiguration.class, cacheConfiguration.getServiceConfigurations()); - return !copierConfig.isEmpty(); + return !findAmongst(DefaultCopierConfiguration.class, cacheConfiguration.getServiceConfigurations()).isEmpty(); } } } diff --git a/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107Expiry.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107Expiry.java new file mode 100644 index 0000000000..e3ccaacaee --- /dev/null +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107Expiry.java @@ -0,0 +1,51 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.jsr107; + +import org.ehcache.expiry.ExpiryPolicy; + +import java.time.Duration; +import java.util.function.Supplier; + +/** + * Eh107Expiry + */ +abstract class Eh107Expiry implements ExpiryPolicy { + private final ThreadLocal shortCircuitAccess = new ThreadLocal<>(); + + void enableShortCircuitAccessCalls() { + shortCircuitAccess.set(this); + } + + void disableShortCircuitAccessCalls() { + shortCircuitAccess.remove(); + } + + private boolean isShortCircuitAccessCalls() { + return shortCircuitAccess.get() != null; + } + + @Override + public final Duration getExpiryForAccess(K key, Supplier value) { + if (isShortCircuitAccessCalls()) { + return null; + } else { + return getExpiryForAccessInternal(key, value); + } + } + + protected abstract Duration getExpiryForAccessInternal(K key, Supplier value); +} diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107IdentityCopier.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107IdentityCopier.java similarity index 100% rename from 107/src/main/java/org/ehcache/jsr107/Eh107IdentityCopier.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/Eh107IdentityCopier.java diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107MXBean.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107MXBean.java similarity index 100% rename from 107/src/main/java/org/ehcache/jsr107/Eh107MXBean.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/Eh107MXBean.java diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107ReverseConfiguration.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107ReverseConfiguration.java similarity index 97% rename from 107/src/main/java/org/ehcache/jsr107/Eh107ReverseConfiguration.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/Eh107ReverseConfiguration.java index a9199cefc1..c667c8a795 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107ReverseConfiguration.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/Eh107ReverseConfiguration.java @@ -102,7 +102,7 @@ public boolean isStoreByValue() { return storeByValueOnHeap; } - private Object writeReplace() throws ObjectStreamException { + private Object writeReplace() { throw new UnsupportedOperationException("Serialization of Ehcache provider configuration classes is not supported"); } } diff --git a/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java b/ehcache-107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java similarity index 81% rename from 107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java index 496aba8080..15febe4e31 100644 --- a/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java @@ -16,33 +16,40 @@ package org.ehcache.jsr107; import org.ehcache.config.Configuration; +import org.ehcache.core.EhcacheManager; import org.ehcache.core.config.DefaultConfiguration; -import org.ehcache.core.internal.util.ClassLoading; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.core.spi.service.ServiceUtils; +import org.ehcache.core.util.ClassLoading; import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; +import org.ehcache.impl.serialization.PlainJavaSerializer; import org.ehcache.jsr107.config.Jsr107Configuration; -import org.ehcache.jsr107.config.Jsr107Service; import org.ehcache.jsr107.internal.DefaultJsr107Service; -import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.xml.XmlConfiguration; +import org.osgi.service.component.annotations.Component; import java.net.URI; import java.net.URISyntaxException; -import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.Properties; import java.util.WeakHashMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.function.UnaryOperator; +import javax.cache.CacheException; import javax.cache.CacheManager; import javax.cache.configuration.OptionalFeature; import javax.cache.spi.CachingProvider; +import static org.ehcache.jsr107.CloseUtil.chain; + /** * {@link CachingProvider} implementation for Ehcache. */ +@Component public class EhcacheCachingProvider implements CachingProvider { private static final String DEFAULT_URI_STRING = "urn:X-ehcache:jsr107-default-config"; @@ -133,24 +140,25 @@ Eh107CacheManager getCacheManager(ConfigSupplier configSupplier, Properties prop } private Eh107CacheManager createCacheManager(URI uri, Configuration config, Properties properties) { - Eh107CacheLoaderWriterProvider cacheLoaderWriterFactory = new Eh107CacheLoaderWriterProvider(); - - Object[] serviceCreationConfigurations = config.getServiceCreationConfigurations().toArray(); + Collection> serviceCreationConfigurations = config.getServiceCreationConfigurations(); Jsr107Service jsr107Service = new DefaultJsr107Service(ServiceUtils.findSingletonAmongst(Jsr107Configuration.class, serviceCreationConfigurations)); + Eh107CacheLoaderWriterProvider cacheLoaderWriterFactory = new Eh107CacheLoaderWriterProvider(); + @SuppressWarnings("unchecked") + DefaultSerializationProviderConfiguration serializerConfiguration = new DefaultSerializationProviderConfiguration().addSerializerFor(Object.class, (Class) PlainJavaSerializer.class); - Collection services = new ArrayList<>(4); - services.add(cacheLoaderWriterFactory); - services.add(jsr107Service); - - if (ServiceUtils.findSingletonAmongst(DefaultSerializationProviderConfiguration.class, serviceCreationConfigurations) == null) { - services.add(new DefaultJsr107SerializationProvider()); - } + UnaryOperator customization = dependencies -> { + ServiceLocator.DependencySet d = dependencies.with(jsr107Service).with(cacheLoaderWriterFactory); + if (ServiceUtils.findSingletonAmongst(DefaultSerializationProviderConfiguration.class, serviceCreationConfigurations) == null) { + d = d.with(serializerConfiguration); + } + return d; + }; - Eh107InternalCacheManager ehcacheManager = new Eh107InternalCacheManager(config, services, !jsr107Service.jsr107CompliantAtomics()); + org.ehcache.CacheManager ehcacheManager = new EhcacheManager(config, customization, !jsr107Service.jsr107CompliantAtomics()); ehcacheManager.init(); - return new Eh107CacheManager(this, ehcacheManager, properties, config.getClassLoader(), uri, + return new Eh107CacheManager(this, ehcacheManager, jsr107Service, properties, config.getClassLoader(), uri, new ConfigurationMerger(config, jsr107Service, cacheLoaderWriterFactory)); } @@ -218,17 +226,16 @@ public void close(final ClassLoader classLoader) { throw new NullPointerException(); } - MultiCacheException closeException = new MultiCacheException(); synchronized (cacheManagers) { final ConcurrentMap map = cacheManagers.remove(classLoader); if (map != null) { - for (Eh107CacheManager cacheManager : map.values()) { - cacheManager.closeInternal(closeException); + try { + chain(map.values().stream().map(cm -> cm::closeInternal)); + } catch (Throwable t) { + throw new CacheException(t); } } } - - closeException.throwIfNotEmpty(); } /** @@ -240,17 +247,15 @@ public void close(final URI uri, final ClassLoader classLoader) { throw new NullPointerException(); } - MultiCacheException closeException = new MultiCacheException(); synchronized (cacheManagers) { final ConcurrentMap map = cacheManagers.get(classLoader); if (map != null) { final Eh107CacheManager cacheManager = map.remove(uri); if (cacheManager != null) { - cacheManager.closeInternal(closeException); + cacheManager.closeInternal(); } } } - closeException.throwIfNotEmpty(); } /** @@ -272,16 +277,12 @@ public boolean isSupported(final OptionalFeature optionalFeature) { throw new IllegalArgumentException("Unknown OptionalFeature: " + optionalFeature.name()); } - void close(Eh107CacheManager cacheManager, MultiCacheException closeException) { - try { - synchronized (cacheManagers) { - final ConcurrentMap map = cacheManagers.get(cacheManager.getClassLoader()); - if (map != null && map.remove(cacheManager.getURI()) != null) { - cacheManager.closeInternal(closeException); - } + void close(Eh107CacheManager cacheManager) { + synchronized (cacheManagers) { + final ConcurrentMap map = cacheManagers.get(cacheManager.getClassLoader()); + if (map != null && map.remove(cacheManager.getURI()) != null) { + cacheManager.closeInternal(); } - } catch (Throwable t) { - closeException.addThrowable(t); } } diff --git a/ehcache-107/src/main/java/org/ehcache/jsr107/EhcacheExpiryWrapper.java b/ehcache-107/src/main/java/org/ehcache/jsr107/EhcacheExpiryWrapper.java new file mode 100644 index 0000000000..e3a23b2a29 --- /dev/null +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/EhcacheExpiryWrapper.java @@ -0,0 +1,48 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.jsr107; + +import org.ehcache.expiry.ExpiryPolicy; + +import java.time.Duration; +import java.util.function.Supplier; + +/** + * EhcacheExpiryWrapper + */ +class EhcacheExpiryWrapper extends Eh107Expiry { + + private final ExpiryPolicy wrappedExpiry; + + EhcacheExpiryWrapper(ExpiryPolicy wrappedExpiry) { + this.wrappedExpiry = wrappedExpiry; + } + + @Override + public Duration getExpiryForCreation(K key, V value) { + return wrappedExpiry.getExpiryForCreation(key, value); + } + + @Override + protected Duration getExpiryForAccessInternal(K key, Supplier value) { + return wrappedExpiry.getExpiryForAccess(key, value); + } + + @Override + public Duration getExpiryForUpdate(K key, Supplier oldValue, V newValue) { + return wrappedExpiry.getExpiryForUpdate(key, oldValue, newValue); + } +} diff --git a/107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java b/ehcache-107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java similarity index 99% rename from 107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java index a47040ab6a..b03d1080db 100644 --- a/107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java @@ -174,7 +174,7 @@ org.ehcache.event.EventType getEhcacheEventType() { @SuppressWarnings("unchecked") @Override public void onEvent(org.ehcache.event.CacheEvent ehEvent) { - Eh107CacheEntryEvent event = new Eh107CacheEntryEvent.NormalEvent<>(source, EventType.CREATED, ehEvent, requestsOld); + Eh107CacheEntryEvent event = new Eh107CacheEntryEvent.NormalEvent<>(source, EventType.CREATED, ehEvent, false); if (filter.evaluate(event)) { Set events = Collections.singleton(event); listener.onCreated((Iterable>) events); diff --git a/ehcache-107/src/main/java/org/ehcache/jsr107/ExpiryPolicyToEhcacheExpiry.java b/ehcache-107/src/main/java/org/ehcache/jsr107/ExpiryPolicyToEhcacheExpiry.java new file mode 100644 index 0000000000..3bf9dd4c0c --- /dev/null +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/ExpiryPolicyToEhcacheExpiry.java @@ -0,0 +1,84 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.jsr107; + +import org.ehcache.core.config.ExpiryUtils; + +import java.io.Closeable; +import java.io.IOException; +import java.util.function.Supplier; + +import javax.cache.expiry.Duration; +import javax.cache.expiry.ExpiryPolicy; + +class ExpiryPolicyToEhcacheExpiry extends Eh107Expiry implements Closeable { + + private final ExpiryPolicy expiryPolicy; + + ExpiryPolicyToEhcacheExpiry(ExpiryPolicy expiryPolicy) { + this.expiryPolicy = expiryPolicy; + } + + @Override + public java.time.Duration getExpiryForCreation(K key, V value) { + try { + Duration duration = expiryPolicy.getExpiryForCreation(); + return convertDuration(duration); + } catch (Throwable t) { + return java.time.Duration.ZERO; + } + } + + @Override + protected java.time.Duration getExpiryForAccessInternal(K key, Supplier value) { + try { + Duration duration = expiryPolicy.getExpiryForAccess(); + if (duration == null) { + return null; + } + return convertDuration(duration); + } catch (Throwable t) { + return java.time.Duration.ZERO; + } + } + + @Override + public java.time.Duration getExpiryForUpdate(K key, Supplier oldValue, V newValue) { + try { + Duration duration = expiryPolicy.getExpiryForUpdate(); + if (duration == null) { + return null; + } + return convertDuration(duration); + } catch (Throwable t) { + return java.time.Duration.ZERO; + } + } + + @Override + public void close() throws IOException { + if (expiryPolicy instanceof Closeable) { + ((Closeable)expiryPolicy).close(); + } + } + + private java.time.Duration convertDuration(Duration duration) { + if (duration.isEternal()) { + return org.ehcache.expiry.ExpiryPolicy.INFINITE; + } + return java.time.Duration.of(duration.getDurationAmount(), ExpiryUtils.jucTimeUnitToTemporalUnit(duration.getTimeUnit())); + } +} diff --git a/107/src/main/java/org/ehcache/jsr107/config/Jsr107Service.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Jsr107Service.java similarity index 85% rename from 107/src/main/java/org/ehcache/jsr107/config/Jsr107Service.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/Jsr107Service.java index b713fa0b95..6d1754cd51 100644 --- a/107/src/main/java/org/ehcache/jsr107/config/Jsr107Service.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/Jsr107Service.java @@ -14,8 +14,10 @@ * limitations under the License. */ -package org.ehcache.jsr107.config; +package org.ehcache.jsr107; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.jsr107.config.ConfigurationElementState; import org.ehcache.spi.service.Service; /** @@ -54,4 +56,10 @@ public interface Jsr107Service extends Service { */ ConfigurationElementState isStatisticsEnabledOnAllCaches(); + /** + * Returns the statistics service associated with this JSR107 service. + * + * @return the statistics service + */ + StatisticsService getStatistics(); } diff --git a/107/src/main/java/org/ehcache/jsr107/ListenerResources.java b/ehcache-107/src/main/java/org/ehcache/jsr107/ListenerResources.java similarity index 81% rename from 107/src/main/java/org/ehcache/jsr107/ListenerResources.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/ListenerResources.java index d4fa873b92..cb43b3befa 100644 --- a/107/src/main/java/org/ehcache/jsr107/ListenerResources.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/ListenerResources.java @@ -17,16 +17,19 @@ package org.ehcache.jsr107; import java.io.Closeable; -import java.io.IOException; import java.util.Collections; import java.util.List; import javax.cache.Cache; +import javax.cache.CacheException; import javax.cache.configuration.CacheEntryListenerConfiguration; import javax.cache.configuration.Factory; import javax.cache.event.CacheEntryEventFilter; import javax.cache.event.CacheEntryListener; +import static org.ehcache.jsr107.CloseUtil.closeAllAfter; +import static org.ehcache.jsr107.CloseUtil.closeAll; + /** * ListenerResources */ @@ -37,8 +40,7 @@ class ListenerResources implements Closeable { private List> ehListeners = null; @SuppressWarnings("unchecked") - static ListenerResources createListenerResources(CacheEntryListenerConfiguration listenerConfig, - MultiCacheException mce) { + static ListenerResources createListenerResources(CacheEntryListenerConfiguration listenerConfig) { CacheEntryListener listener = listenerConfig.getCacheEntryListenerFactory().create(); // create the filter, closing the listener above upon exception @@ -49,21 +51,16 @@ static ListenerResources createListenerResources(CacheEntryListener if (filterFactory != null) { filter = listenerConfig.getCacheEntryEventFilterFactory().create(); } else { - filter = (CacheEntryEventFilter) NullCacheEntryEventFilter.INSTANCE; + filter = event -> true; } } catch (Throwable t) { - mce.addThrowable(t); - CacheResources.close(listener, mce); - throw mce; + throw closeAllAfter(new CacheException(t), listener); } try { return new ListenerResources<>(listener, filter); } catch (Throwable t) { - mce.addThrowable(t); - CacheResources.close(filter, mce); - CacheResources.close(listener, mce); - throw mce; + throw closeAllAfter(new CacheException(t), filter, listener); } } @@ -91,11 +88,12 @@ synchronized List> getEhcacheLi } @Override - public void close() throws IOException { - MultiCacheException mce = new MultiCacheException(); - CacheResources.close(listener, mce); - CacheResources.close(filter, mce); - mce.throwIfNotEmpty(); + public void close() { + try { + closeAll(listener, filter); + } catch (Throwable t) { + throw new CacheException(t); + } } } diff --git a/107/src/main/java/org/ehcache/jsr107/NullCompletionListener.java b/ehcache-107/src/main/java/org/ehcache/jsr107/NullCompletionListener.java similarity index 100% rename from 107/src/main/java/org/ehcache/jsr107/NullCompletionListener.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/NullCompletionListener.java diff --git a/ehcache-107/src/main/java/org/ehcache/jsr107/Unwrap.java b/ehcache-107/src/main/java/org/ehcache/jsr107/Unwrap.java new file mode 100644 index 0000000000..2bdafe4a5d --- /dev/null +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/Unwrap.java @@ -0,0 +1,35 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.jsr107; + +import static java.util.Arrays.stream; +import static java.util.Objects.requireNonNull; + +/** + * @author teck + */ +final class Unwrap { + + static T unwrap(Class clazz, Object... obj) { + requireNonNull(clazz); + return stream(obj).filter(clazz::isInstance).map(clazz::cast).findFirst() + .orElseThrow(() -> new IllegalArgumentException("Cannot unwrap to " + clazz)); + } + + private Unwrap() { + // + } +} diff --git a/107/src/main/java/org/ehcache/jsr107/config/ConfigurationElementState.java b/ehcache-107/src/main/java/org/ehcache/jsr107/config/ConfigurationElementState.java similarity index 100% rename from 107/src/main/java/org/ehcache/jsr107/config/ConfigurationElementState.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/config/ConfigurationElementState.java diff --git a/107/src/main/java/org/ehcache/jsr107/config/Jsr107CacheConfiguration.java b/ehcache-107/src/main/java/org/ehcache/jsr107/config/Jsr107CacheConfiguration.java similarity index 96% rename from 107/src/main/java/org/ehcache/jsr107/config/Jsr107CacheConfiguration.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/config/Jsr107CacheConfiguration.java index 8791e4f20a..cba5dfa4f9 100644 --- a/107/src/main/java/org/ehcache/jsr107/config/Jsr107CacheConfiguration.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/config/Jsr107CacheConfiguration.java @@ -16,12 +16,13 @@ package org.ehcache.jsr107.config; +import org.ehcache.jsr107.Jsr107Service; import org.ehcache.spi.service.ServiceConfiguration; /** * Jsr107CacheConfiguration */ -public class Jsr107CacheConfiguration implements ServiceConfiguration { +public class Jsr107CacheConfiguration implements ServiceConfiguration { private final ConfigurationElementState statisticsEnabled; private final ConfigurationElementState managementEnabled; diff --git a/107/src/main/java/org/ehcache/jsr107/config/Jsr107Configuration.java b/ehcache-107/src/main/java/org/ehcache/jsr107/config/Jsr107Configuration.java similarity index 94% rename from 107/src/main/java/org/ehcache/jsr107/config/Jsr107Configuration.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/config/Jsr107Configuration.java index 13f6cc1ee9..a6cf09ce10 100644 --- a/107/src/main/java/org/ehcache/jsr107/config/Jsr107Configuration.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/config/Jsr107Configuration.java @@ -16,6 +16,7 @@ package org.ehcache.jsr107.config; +import org.ehcache.jsr107.Jsr107Service; import org.ehcache.spi.service.ServiceCreationConfiguration; import java.util.Map; @@ -24,7 +25,7 @@ /** * {@link ServiceCreationConfiguration} for default {@link Jsr107Service} implementation. */ -public class Jsr107Configuration implements ServiceCreationConfiguration { +public class Jsr107Configuration implements ServiceCreationConfiguration { private final String defaultTemplate; private final boolean jsr107CompliantAtomics; @@ -37,8 +38,8 @@ public class Jsr107Configuration implements ServiceCreationConfiguration templates, boolean jsr107CompliantAtomics, ConfigurationElementState enableManagementAll, ConfigurationElementState enableStatisticsAll) { diff --git a/107/src/main/java/org/ehcache/jsr107/config/package-info.java b/ehcache-107/src/main/java/org/ehcache/jsr107/config/package-info.java similarity index 100% rename from 107/src/main/java/org/ehcache/jsr107/config/package-info.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/config/package-info.java diff --git a/107/src/main/java/org/ehcache/jsr107/internal/DefaultJsr107Service.java b/ehcache-107/src/main/java/org/ehcache/jsr107/internal/DefaultJsr107Service.java similarity index 87% rename from 107/src/main/java/org/ehcache/jsr107/internal/DefaultJsr107Service.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/internal/DefaultJsr107Service.java index ed95db8061..e9ba6be1fd 100644 --- a/107/src/main/java/org/ehcache/jsr107/internal/DefaultJsr107Service.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/internal/DefaultJsr107Service.java @@ -19,15 +19,18 @@ import org.ehcache.core.spi.service.StatisticsService; import org.ehcache.jsr107.config.ConfigurationElementState; import org.ehcache.jsr107.config.Jsr107Configuration; -import org.ehcache.jsr107.config.Jsr107Service; +import org.ehcache.jsr107.Jsr107Service; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.service.Service; +import static java.util.Objects.requireNonNull; + @ServiceDependencies(StatisticsService.class) public class DefaultJsr107Service implements Jsr107Service { private final Jsr107Configuration configuration; + private volatile StatisticsService statisticsService; public DefaultJsr107Service(Jsr107Configuration configuration) { this.configuration = configuration; @@ -35,7 +38,7 @@ public DefaultJsr107Service(Jsr107Configuration configuration) { @Override public void start(final ServiceProvider serviceProvider) { - // no-op + this.statisticsService = serviceProvider.getService(StatisticsService.class); } @Override @@ -82,4 +85,9 @@ public ConfigurationElementState isStatisticsEnabledOnAllCaches() { return configuration.isEnableStatisticsAll(); } } + + @Override + public StatisticsService getStatistics() { + return requireNonNull(statisticsService); + } } diff --git a/107/src/main/java/org/ehcache/jsr107/internal/Jsr107CacheConfigurationParser.java b/ehcache-107/src/main/java/org/ehcache/jsr107/internal/Jsr107CacheConfigurationParser.java similarity index 82% rename from 107/src/main/java/org/ehcache/jsr107/internal/Jsr107CacheConfigurationParser.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/internal/Jsr107CacheConfigurationParser.java index 28caa6c52b..81203ee0a9 100644 --- a/107/src/main/java/org/ehcache/jsr107/internal/Jsr107CacheConfigurationParser.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/internal/Jsr107CacheConfigurationParser.java @@ -16,12 +16,14 @@ package org.ehcache.jsr107.internal; + import org.ehcache.jsr107.config.ConfigurationElementState; import org.ehcache.jsr107.config.Jsr107CacheConfiguration; -import org.ehcache.jsr107.config.Jsr107Service; +import org.ehcache.jsr107.Jsr107Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.xml.CacheServiceConfigurationParser; import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.osgi.service.component.annotations.Component; import org.w3c.dom.Element; import java.io.IOException; @@ -34,10 +36,11 @@ /** * Jsr107CacheConfigurationParser */ +@Component public class Jsr107CacheConfigurationParser implements CacheServiceConfigurationParser { private static final URI NAMESPACE = URI.create("http://www.ehcache.org/v3/jsr107"); - private static final URL XML_SCHEMA = Jsr107CacheConfigurationParser.class.getResource("/ehcache-107ext.xsd"); + private static final URL XML_SCHEMA = Jsr107CacheConfigurationParser.class.getResource("/ehcache-107-ext.xsd"); private static final String MANAGEMENT_ENABLED_ATTRIBUTE = "enable-management"; private static final String STATISTICS_ENABLED_ATTRIBUTE = "enable-statistics"; @@ -52,7 +55,7 @@ public URI getNamespace() { } @Override - public ServiceConfiguration parseServiceConfiguration(Element fragment) { + public ServiceConfiguration parseServiceConfiguration(Element fragment, ClassLoader classLoader) { String localName = fragment.getLocalName(); if ("mbeans".equals(localName)) { ConfigurationElementState managementEnabled = ConfigurationElementState.UNSPECIFIED; @@ -69,4 +72,15 @@ public ServiceConfiguration parseServiceConfiguration(Element fra fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); } } + + @Override + public Class getServiceType() { + return Jsr107Service.class; + } + + @Override + public Element unparseServiceConfiguration(ServiceConfiguration serviceConfiguration) { + throw new XmlConfigurationException("XML translation of JSR-107 cache elements are not supported"); + } + } diff --git a/107/src/main/java/org/ehcache/jsr107/internal/Jsr107CacheLoaderWriter.java b/ehcache-107/src/main/java/org/ehcache/jsr107/internal/Jsr107CacheLoaderWriter.java similarity index 100% rename from 107/src/main/java/org/ehcache/jsr107/internal/Jsr107CacheLoaderWriter.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/internal/Jsr107CacheLoaderWriter.java diff --git a/ehcache-107/src/main/java/org/ehcache/jsr107/internal/Jsr107LatencyMonitor.java b/ehcache-107/src/main/java/org/ehcache/jsr107/internal/Jsr107LatencyMonitor.java new file mode 100644 index 0000000000..5c03ae35da --- /dev/null +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/internal/Jsr107LatencyMonitor.java @@ -0,0 +1,47 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.jsr107.internal; + +import org.ehcache.core.statistics.ChainedOperationObserver; + +import java.util.EnumSet; + +public class Jsr107LatencyMonitor> implements ChainedOperationObserver { + + private final org.terracotta.statistics.derived.latency.Jsr107LatencyMonitor delegate; + + public Jsr107LatencyMonitor(Class outcome) { + delegate = new org.terracotta.statistics.derived.latency.Jsr107LatencyMonitor<>(EnumSet.allOf(outcome), 1.0); + } + + public double average() { + return delegate.average(); + } + + public void clear() { + delegate.clear(); + } + + @Override + public void begin(long time) { + delegate.begin(time); + } + + @Override + public void end(long time, long latency, T result) { + delegate.end(time, latency, result); + } +} diff --git a/107/src/main/java/org/ehcache/jsr107/internal/Jsr107ServiceConfigurationParser.java b/ehcache-107/src/main/java/org/ehcache/jsr107/internal/Jsr107ServiceConfigurationParser.java similarity index 83% rename from 107/src/main/java/org/ehcache/jsr107/internal/Jsr107ServiceConfigurationParser.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/internal/Jsr107ServiceConfigurationParser.java index 51dc5fa2c1..83250dbfe5 100644 --- a/107/src/main/java/org/ehcache/jsr107/internal/Jsr107ServiceConfigurationParser.java +++ b/ehcache-107/src/main/java/org/ehcache/jsr107/internal/Jsr107ServiceConfigurationParser.java @@ -19,8 +19,10 @@ import org.ehcache.jsr107.config.ConfigurationElementState; import org.ehcache.jsr107.config.Jsr107Configuration; import org.ehcache.xml.CacheManagerServiceConfigurationParser; -import org.ehcache.jsr107.config.Jsr107Service; +import org.ehcache.jsr107.Jsr107Service; import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.osgi.service.component.annotations.Component; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; @@ -38,10 +40,11 @@ /** * @author Alex Snaps */ +@Component public class Jsr107ServiceConfigurationParser implements CacheManagerServiceConfigurationParser { private static final URI NAMESPACE = URI.create("http://www.ehcache.org/v3/jsr107"); - private static final URL XML_SCHEMA = Jsr107ServiceConfigurationParser.class.getResource("/ehcache-107ext.xsd"); + private static final URL XML_SCHEMA = Jsr107ServiceConfigurationParser.class.getResource("/ehcache-107-ext.xsd"); private static final String ENABLE_MANAGEMENT_ALL_ATTRIBUTE = "enable-management"; private static final String JSR_107_COMPLIANT_ATOMICS_ATTRIBUTE = "jsr-107-compliant-atomics"; private static final String ENABLE_STATISTICS_ALL_ATTRIBUTE = "enable-statistics"; @@ -60,7 +63,7 @@ public URI getNamespace() { } @Override - public ServiceCreationConfiguration parseServiceCreationConfiguration(final Element fragment) { + public ServiceCreationConfiguration parseServiceCreationConfiguration(final Element fragment, ClassLoader classLoader) { boolean jsr107CompliantAtomics = true; ConfigurationElementState enableManagementAll = ConfigurationElementState.UNSPECIFIED; ConfigurationElementState enableStatisticsAll = ConfigurationElementState.UNSPECIFIED; @@ -86,4 +89,15 @@ public ServiceCreationConfiguration parseServiceCreationConfigura return new Jsr107Configuration(defaultTemplate, templates, jsr107CompliantAtomics, enableManagementAll, enableStatisticsAll); } + + @Override + public Class getServiceType() { + return Jsr107Service.class; + } + + @Override + public Element unparseServiceCreationConfiguration(ServiceCreationConfiguration serviceCreationConfiguration) { + throw new XmlConfigurationException("XML translation of JSR-107 cache elements are not supported"); + } + } diff --git a/107/src/main/java/org/ehcache/jsr107/internal/WrappedCacheLoaderWriter.java b/ehcache-107/src/main/java/org/ehcache/jsr107/internal/WrappedCacheLoaderWriter.java similarity index 100% rename from 107/src/main/java/org/ehcache/jsr107/internal/WrappedCacheLoaderWriter.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/internal/WrappedCacheLoaderWriter.java diff --git a/107/src/main/java/org/ehcache/jsr107/internal/tck/Eh107MBeanServerBuilder.java b/ehcache-107/src/main/java/org/ehcache/jsr107/internal/tck/Eh107MBeanServerBuilder.java similarity index 100% rename from 107/src/main/java/org/ehcache/jsr107/internal/tck/Eh107MBeanServerBuilder.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/internal/tck/Eh107MBeanServerBuilder.java diff --git a/107/src/main/java/org/ehcache/jsr107/package-info.java b/ehcache-107/src/main/java/org/ehcache/jsr107/package-info.java similarity index 100% rename from 107/src/main/java/org/ehcache/jsr107/package-info.java rename to ehcache-107/src/main/java/org/ehcache/jsr107/package-info.java diff --git a/107/src/main/resources/META-INF/services/javax.cache.spi.CachingProvider b/ehcache-107/src/main/resources/META-INF/services/javax.cache.spi.CachingProvider similarity index 100% rename from 107/src/main/resources/META-INF/services/javax.cache.spi.CachingProvider rename to ehcache-107/src/main/resources/META-INF/services/javax.cache.spi.CachingProvider diff --git a/107/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser b/ehcache-107/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser similarity index 100% rename from 107/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser rename to ehcache-107/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser diff --git a/107/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser b/ehcache-107/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser similarity index 100% rename from 107/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser rename to ehcache-107/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser diff --git a/107/src/main/resources/ehcache-107ext.xsd b/ehcache-107/src/main/resources/ehcache-107-ext.xsd similarity index 100% rename from 107/src/main/resources/ehcache-107ext.xsd rename to ehcache-107/src/main/resources/ehcache-107-ext.xsd diff --git a/ehcache-107/src/tck/resources/ExcludeList b/ehcache-107/src/tck/resources/ExcludeList new file mode 100644 index 0000000000..e84e6c9418 --- /dev/null +++ b/ehcache-107/src/tck/resources/ExcludeList @@ -0,0 +1,6 @@ +#List tests to be excluded. +#Lines beginning with a '#' are comments +#Enter One method per line with syntax FULL_CLASS_NAME#METHOD_NAME as in the example below + +# This is a dummy test that fails if not in the exclude list. +org.jsr107.tck.CachingTest#dummyTest diff --git a/107/src/test/java/com/pany/domain/Client.java b/ehcache-107/src/test/java/com/pany/domain/Client.java similarity index 95% rename from 107/src/test/java/com/pany/domain/Client.java rename to ehcache-107/src/test/java/com/pany/domain/Client.java index ae5745433b..7829184092 100644 --- a/107/src/test/java/com/pany/domain/Client.java +++ b/ehcache-107/src/test/java/com/pany/domain/Client.java @@ -23,6 +23,8 @@ */ public class Client implements Serializable { + private static final long serialVersionUID = 1L; + private final String name; private final long creditLine; diff --git a/107/src/test/java/com/pany/domain/Customer.java b/ehcache-107/src/test/java/com/pany/domain/Customer.java similarity index 100% rename from 107/src/test/java/com/pany/domain/Customer.java rename to ehcache-107/src/test/java/com/pany/domain/Customer.java diff --git a/107/src/test/java/com/pany/domain/Product.java b/ehcache-107/src/test/java/com/pany/domain/Product.java similarity index 100% rename from 107/src/test/java/com/pany/domain/Product.java rename to ehcache-107/src/test/java/com/pany/domain/Product.java diff --git a/107/src/test/java/com/pany/ehcache/ClientCopier.java b/ehcache-107/src/test/java/com/pany/ehcache/ClientCopier.java similarity index 100% rename from 107/src/test/java/com/pany/ehcache/ClientCopier.java rename to ehcache-107/src/test/java/com/pany/ehcache/ClientCopier.java diff --git a/107/src/test/java/com/pany/ehcache/MyEvictionAdvisor.java b/ehcache-107/src/test/java/com/pany/ehcache/MyEvictionAdvisor.java similarity index 100% rename from 107/src/test/java/com/pany/ehcache/MyEvictionAdvisor.java rename to ehcache-107/src/test/java/com/pany/ehcache/MyEvictionAdvisor.java diff --git a/107/src/test/java/com/pany/ehcache/Test107CacheEntryListener.java b/ehcache-107/src/test/java/com/pany/ehcache/Test107CacheEntryListener.java similarity index 100% rename from 107/src/test/java/com/pany/ehcache/Test107CacheEntryListener.java rename to ehcache-107/src/test/java/com/pany/ehcache/Test107CacheEntryListener.java diff --git a/107/src/test/java/com/pany/ehcache/TestCacheEventListener.java b/ehcache-107/src/test/java/com/pany/ehcache/TestCacheEventListener.java similarity index 100% rename from 107/src/test/java/com/pany/ehcache/TestCacheEventListener.java rename to ehcache-107/src/test/java/com/pany/ehcache/TestCacheEventListener.java diff --git a/107/src/test/java/com/pany/ehcache/integration/ProductCacheLoaderWriter.java b/ehcache-107/src/test/java/com/pany/ehcache/integration/ProductCacheLoaderWriter.java similarity index 84% rename from 107/src/test/java/com/pany/ehcache/integration/ProductCacheLoaderWriter.java rename to ehcache-107/src/test/java/com/pany/ehcache/integration/ProductCacheLoaderWriter.java index 30770b375e..efd2315a88 100644 --- a/107/src/test/java/com/pany/ehcache/integration/ProductCacheLoaderWriter.java +++ b/ehcache-107/src/test/java/com/pany/ehcache/integration/ProductCacheLoaderWriter.java @@ -38,13 +38,13 @@ public class ProductCacheLoaderWriter implements CacheLoaderWriter seen = new HashSet<>(); @Override - public Product load(final Long key) throws Exception { + public Product load(final Long key) { seen.add(key); return new Product(key); } @Override - public Map loadAll(final Iterable keys) throws Exception { + public Map loadAll(final Iterable keys) { for (Long key : keys) { seen.add(key); } @@ -52,7 +52,7 @@ public Map loadAll(final Iterable keys) throws Ex } @Override - public void write(final Long key, final Product value) throws Exception { + public void write(final Long key, final Product value) { List products = written.get(key); if(products == null) { products = new ArrayList<>(); @@ -65,17 +65,17 @@ public void write(final Long key, final Product value) throws Exception { } @Override - public void writeAll(final Iterable> entries) throws Exception { + public void writeAll(final Iterable> entries) { // no-op } @Override - public void delete(final Long key) throws Exception { + public void delete(final Long key) { // no-op } @Override - public void deleteAll(final Iterable keys) throws Exception { + public void deleteAll(final Iterable keys) { // no-op } } diff --git a/107/src/test/java/org/ehcache/ParsesConfigurationExtensionTest.java b/ehcache-107/src/test/java/org/ehcache/ParsesConfigurationExtensionTest.java similarity index 92% rename from 107/src/test/java/org/ehcache/ParsesConfigurationExtensionTest.java rename to ehcache-107/src/test/java/org/ehcache/ParsesConfigurationExtensionTest.java index eedc12cae4..26f774dff6 100644 --- a/107/src/test/java/org/ehcache/ParsesConfigurationExtensionTest.java +++ b/ehcache-107/src/test/java/org/ehcache/ParsesConfigurationExtensionTest.java @@ -23,20 +23,19 @@ import org.ehcache.config.CacheRuntimeConfiguration; import org.ehcache.core.EhcacheManager; import org.ehcache.core.spi.service.ServiceUtils; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.jsr107.config.Jsr107Configuration; import org.ehcache.config.ResourceType; import org.ehcache.xml.XmlConfiguration; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expiry; import org.ehcache.jsr107.internal.DefaultJsr107Service; import org.ehcache.spi.service.Service; import org.junit.Test; import org.xml.sax.SAXException; import java.io.IOException; +import java.time.Duration; import java.util.Collections; import java.util.List; -import java.util.concurrent.TimeUnit; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.hamcrest.CoreMatchers.equalTo; @@ -55,7 +54,7 @@ public class ParsesConfigurationExtensionTest { @Test public void testConfigParse() throws ClassNotFoundException, SAXException, InstantiationException, IllegalAccessException, IOException { final XmlConfiguration configuration = new XmlConfiguration(this.getClass().getResource("/ehcache-107.xml")); - final DefaultJsr107Service jsr107Service = new DefaultJsr107Service(ServiceUtils.findSingletonAmongst(Jsr107Configuration.class, configuration.getServiceCreationConfigurations().toArray())); + final DefaultJsr107Service jsr107Service = new DefaultJsr107Service(ServiceUtils.findSingletonAmongst(Jsr107Configuration.class, configuration.getServiceCreationConfigurations())); final CacheManager cacheManager = new EhcacheManager(configuration, Collections.singletonList(jsr107Service)); cacheManager.init(); @@ -68,7 +67,7 @@ public void testConfigParse() throws ClassNotFoundException, SAXException, Insta @Test public void testXmlExample() throws ClassNotFoundException, SAXException, InstantiationException, IOException, IllegalAccessException { XmlConfiguration config = new XmlConfiguration(ParsesConfigurationExtensionTest.class.getResource("/ehcache-example.xml")); - final DefaultJsr107Service jsr107Service = new DefaultJsr107Service(ServiceUtils.findSingletonAmongst(Jsr107Configuration.class, config.getServiceCreationConfigurations().toArray())); + final DefaultJsr107Service jsr107Service = new DefaultJsr107Service(ServiceUtils.findSingletonAmongst(Jsr107Configuration.class, config.getServiceCreationConfigurations())); final CacheManager cacheManager = new EhcacheManager(config, Collections.singletonList(jsr107Service)); cacheManager.init(); @@ -83,9 +82,9 @@ public void testXmlExample() throws ClassNotFoundException, SAXException, Instan final CacheRuntimeConfiguration runtimeConfiguration = productCache.getRuntimeConfiguration(); assertThat(runtimeConfiguration.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(200L)); - final Expiry expiry = runtimeConfiguration.getExpiry(); - assertThat(expiry.getClass().getName(), equalTo("org.ehcache.expiry.Expirations$TimeToIdleExpiry")); - assertThat(expiry.getExpiryForAccess(42L, null), equalTo(new Duration(2, TimeUnit.MINUTES))); + final ExpiryPolicy expiry = runtimeConfiguration.getExpiryPolicy(); + assertThat(expiry.getClass().getName(), equalTo("org.ehcache.config.builders.ExpiryPolicyBuilder$TimeToIdleExpiryPolicy")); + assertThat(expiry.getExpiryForAccess(42L, null), equalTo(Duration.ofMinutes(2))); assertThat(runtimeConfiguration.getEvictionAdvisor(), instanceOf(com.pany.ehcache.MyEvictionAdvisor.class)); } diff --git a/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java b/ehcache-107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java similarity index 92% rename from 107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java rename to ehcache-107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java index be00ddb591..165fa321d6 100644 --- a/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java +++ b/ehcache-107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java @@ -22,7 +22,6 @@ import org.ehcache.config.ResourceType; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.core.config.DefaultConfiguration; -import org.ehcache.core.internal.util.ValueSuppliers; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.jsr107.Eh107Configuration; import org.ehcache.jsr107.EhcacheCachingProvider; @@ -39,7 +38,6 @@ import java.io.File; import java.util.Random; -import java.util.concurrent.TimeUnit; import javax.cache.Cache; import javax.cache.CacheManager; @@ -53,6 +51,7 @@ import javax.cache.expiry.ExpiryPolicy; import javax.cache.spi.CachingProvider; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -60,7 +59,6 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; /** @@ -129,12 +127,12 @@ public void testGettingToEhcacheConfiguration() { long nanoTime = System.nanoTime(); LOGGER.info("Seeding random with {}", nanoTime); Random random = new Random(nanoTime); - assertThat(runtimeConfiguration.getExpiry().getExpiryForCreation(random.nextLong(), Long.toOctalString(random.nextLong())), - equalTo(org.ehcache.expiry.Duration.INFINITE)); - assertThat(runtimeConfiguration.getExpiry().getExpiryForAccess(random.nextLong(), - ValueSuppliers.supplierOf(Long.toOctalString(random.nextLong()))), nullValue()); - assertThat(runtimeConfiguration.getExpiry().getExpiryForUpdate(random.nextLong(), - ValueSuppliers.supplierOf(Long.toOctalString(random.nextLong())), Long.toOctalString(random.nextLong())), nullValue()); + assertThat(runtimeConfiguration.getExpiryPolicy().getExpiryForCreation(random.nextLong(), Long.toOctalString(random.nextLong())), + equalTo(org.ehcache.expiry.ExpiryPolicy.INFINITE)); + assertThat(runtimeConfiguration.getExpiryPolicy().getExpiryForAccess(random.nextLong(), + () -> Long.toOctalString(random.nextLong())), nullValue()); + assertThat(runtimeConfiguration.getExpiryPolicy().getExpiryForUpdate(random.nextLong(), + () -> Long.toOctalString(random.nextLong()), Long.toOctalString(random.nextLong())), nullValue()); } @Test @@ -201,7 +199,7 @@ public void testWithoutEhcacheExplicitDependencyAndNoCodeChanges() throws Except CacheRuntimeConfiguration foosEhcacheConfig = (CacheRuntimeConfiguration)foosCache.getConfiguration( Eh107Configuration.class).unwrap(CacheRuntimeConfiguration.class); Client client1 = new Client("client1", 1); - foosEhcacheConfig.getExpiry().getExpiryForCreation(42L, client1).getLength(); // <8> + foosEhcacheConfig.getExpiryPolicy().getExpiryForCreation(42L, client1).toMinutes(); // <8> CompleteConfiguration foosConfig = foosCache.getConfiguration(CompleteConfiguration.class); @@ -214,8 +212,8 @@ public void testWithoutEhcacheExplicitDependencyAndNoCodeChanges() throws Except } // end::jsr107SupplementWithTemplatesExample[] assertThat(ehcacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), is(20L)); - assertThat(foosEhcacheConfig.getExpiry().getExpiryForCreation(42L, client1), - is(new org.ehcache.expiry.Duration(2, TimeUnit.MINUTES))); + assertThat(foosEhcacheConfig.getExpiryPolicy().getExpiryForCreation(42L, client1), + is(java.time.Duration.ofMinutes(2))); } @Test @@ -261,7 +259,7 @@ public void testTemplateOverridingStoreByRef() throws Exception { MutableConfiguration mutableConfiguration = new MutableConfiguration<>(); mutableConfiguration.setTypes(Long.class, Client.class).setStoreByValue(false); - Cache myCache = null; + Cache myCache; Client client1 = new Client("client1", 1); myCache = cacheManager.createCache("anotherCache", mutableConfiguration); diff --git a/107/src/test/java/org/ehcache/jsr107/CacheResourcesTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/CacheResourcesTest.java similarity index 88% rename from 107/src/test/java/org/ehcache/jsr107/CacheResourcesTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/CacheResourcesTest.java index ffa0c06f49..063366662a 100644 --- a/107/src/test/java/org/ehcache/jsr107/CacheResourcesTest.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/CacheResourcesTest.java @@ -19,12 +19,12 @@ import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.withSettings; import javax.cache.configuration.CacheEntryListenerConfiguration; import org.ehcache.jsr107.internal.Jsr107CacheLoaderWriter; import org.junit.Test; -import org.mockito.internal.creation.MockSettingsImpl; import java.io.Closeable; import java.util.HashMap; @@ -37,7 +37,7 @@ public class CacheResourcesTest { public void testRegisterDeregisterAfterClose() { Map, ListenerResources> emptyMap = emptyMap(); CacheResources cacheResources = new CacheResources<>("cache", null, null, emptyMap); - cacheResources.closeResources(new MultiCacheException()); + cacheResources.closeResources(); try { cacheResources.registerCacheEntryListener(mock(CacheEntryListenerConfiguration.class)); @@ -57,8 +57,8 @@ public void testRegisterDeregisterAfterClose() { @SuppressWarnings("unchecked") @Test public void closesAllResources() throws Exception { - Jsr107CacheLoaderWriter loaderWriter = mock(Jsr107CacheLoaderWriter.class, new MockSettingsImpl<>().extraInterfaces(Closeable.class)); - Eh107Expiry expiry = mock(Eh107Expiry.class, new MockSettingsImpl<>().extraInterfaces(Closeable.class)); + Jsr107CacheLoaderWriter loaderWriter = mock(Jsr107CacheLoaderWriter.class, withSettings().extraInterfaces(Closeable.class)); + Eh107Expiry expiry = mock(Eh107Expiry.class, withSettings().extraInterfaces(Closeable.class)); CacheEntryListenerConfiguration listenerConfiguration = mock(CacheEntryListenerConfiguration.class); ListenerResources listenerResources = mock(ListenerResources.class); @@ -67,7 +67,7 @@ public void closesAllResources() throws Exception { map.put(listenerConfiguration, listenerResources); CacheResources cacheResources = new CacheResources<>("cache", loaderWriter, expiry, map); - cacheResources.closeResources(new MultiCacheException()); + cacheResources.closeResources(); verify((Closeable) loaderWriter).close(); verify((Closeable) expiry).close(); diff --git a/107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java similarity index 97% rename from 107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java index 16db6abf35..cf56e09d7f 100644 --- a/107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java @@ -36,8 +36,8 @@ import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * ConfigStatsManagementActivationTest @@ -130,7 +130,7 @@ public void testEnableCacheLevelProgrammatic() throws Exception { CacheManager cacheManager = provider.getCacheManager(); CacheConfigurationBuilder configurationBuilder = newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .add(new Jsr107CacheConfiguration(ConfigurationElementState.ENABLED, ConfigurationElementState.ENABLED)); + .withService(new Jsr107CacheConfiguration(ConfigurationElementState.ENABLED, ConfigurationElementState.ENABLED)); Cache cache = cacheManager.createCache("test", Eh107Configuration.fromEhcacheCacheConfiguration(configurationBuilder)); @SuppressWarnings("unchecked") @@ -202,7 +202,7 @@ public void basicJsr107StillWorks() throws Exception { Cache cache = cacheManager.createCache("cache", configuration); @SuppressWarnings("unchecked") - Eh107Configuration eh107Configuration = cache.getConfiguration(Eh107Configuration.class); + Eh107Configuration eh107Configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(eh107Configuration.isManagementEnabled(), is(true)); assertThat(eh107Configuration.isStatisticsEnabled(), is(true)); diff --git a/107/src/test/java/org/ehcache/jsr107/ConfigurationMergerTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/ConfigurationMergerTest.java similarity index 87% rename from 107/src/test/java/org/ehcache/jsr107/ConfigurationMergerTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/ConfigurationMergerTest.java index 2f20b29728..d4783cb90e 100644 --- a/107/src/test/java/org/ehcache/jsr107/ConfigurationMergerTest.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/ConfigurationMergerTest.java @@ -17,16 +17,13 @@ package org.ehcache.jsr107; import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.core.spi.service.ServiceUtils; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.jsr107.config.Jsr107Configuration; -import org.ehcache.jsr107.config.Jsr107Service; import org.ehcache.jsr107.internal.DefaultJsr107Service; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.service.ServiceConfiguration; @@ -38,9 +35,10 @@ import org.mockito.internal.creation.MockSettingsImpl; import java.io.Closeable; +import java.time.Duration; import java.util.Collection; -import java.util.concurrent.TimeUnit; +import javax.cache.CacheException; import javax.cache.configuration.CacheEntryListenerConfiguration; import javax.cache.configuration.Factory; import javax.cache.configuration.MutableConfiguration; @@ -53,13 +51,12 @@ import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; @@ -90,13 +87,13 @@ public void mergeConfigNoTemplateNoLoaderWriter() { MutableConfiguration configuration = new MutableConfiguration<>(); ConfigurationMerger.ConfigHolder configHolder = merger.mergeConfigurations("cache", configuration); - assertThat(configHolder.cacheResources.getExpiryPolicy().getExpiryForCreation(42L, "Yay!"), is(Duration.INFINITE)); + assertThat(configHolder.cacheResources.getExpiryPolicy().getExpiryForCreation(42L, "Yay!"), is(org.ehcache.expiry.ExpiryPolicy.INFINITE)); assertThat(configHolder.cacheResources.getCacheLoaderWriter(), nullValue()); assertThat(configHolder.useEhcacheLoaderWriter, is(false)); boolean storeByValue = false; - Collection> serviceConfigurations = configHolder.cacheConfiguration.getServiceConfigurations(); - for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { + Collection> serviceConfigurations = configHolder.cacheConfiguration.getServiceConfigurations(); + for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { if (serviceConfiguration instanceof DefaultCopierConfiguration) { storeByValue = true; break; @@ -114,8 +111,8 @@ public void jsr107ExpiryGetsRegistered() { ConfigurationMerger.ConfigHolder configHolder = merger.mergeConfigurations("Cache", configuration); assertThat(factory.called, is(true)); - Expiry resourcesExpiry = configHolder.cacheResources.getExpiryPolicy(); - Expiry configExpiry = configHolder.cacheConfiguration.getExpiry(); + org.ehcache.expiry.ExpiryPolicy resourcesExpiry = configHolder.cacheResources.getExpiryPolicy(); + org.ehcache.expiry.ExpiryPolicy configExpiry = configHolder.cacheConfiguration.getExpiryPolicy(); assertThat(configExpiry, sameInstance(resourcesExpiry)); } @@ -165,7 +162,7 @@ public void loadsTemplateWhenNameFound() throws Exception { public void jsr107ExpiryGetsOverriddenByTemplate() throws Exception { when(jsr107Service.getTemplateNameForCache("cache")).thenReturn("cacheTemplate"); when(xmlConfiguration.newCacheConfigurationBuilderFromTemplate("cacheTemplate", Object.class, Object.class)).thenReturn( - newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withExpiry(Expirations.timeToLiveExpiration(new Duration(5, TimeUnit.MINUTES))) + newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMinutes(5))) ); MutableConfiguration configuration = new MutableConfiguration<>(); @@ -176,9 +173,9 @@ public void jsr107ExpiryGetsOverriddenByTemplate() throws Exception { assertThat(factory.called, is(false)); Eh107Expiry expiryPolicy = configHolder.cacheResources.getExpiryPolicy(); - Expiry expiry = configHolder.cacheConfiguration.getExpiry(); - assertThat(expiryPolicy.getExpiryForAccess(42, supplierOf("Yay")), is(expiry.getExpiryForAccess(42, supplierOf("Yay")))); - assertThat(expiryPolicy.getExpiryForUpdate(42, supplierOf("Yay"), "Lala"), is(expiry.getExpiryForUpdate(42, supplierOf("Yay"), "Lala"))); + org.ehcache.expiry.ExpiryPolicy expiry = configHolder.cacheConfiguration.getExpiryPolicy(); + assertThat(expiryPolicy.getExpiryForAccess(42, () -> "Yay"), is(expiry.getExpiryForAccess(42, () -> "Yay"))); + assertThat(expiryPolicy.getExpiryForUpdate(42, () -> "Yay", "Lala"), is(expiry.getExpiryForUpdate(42, () -> "Yay", "Lala"))); assertThat(expiryPolicy.getExpiryForCreation(42, "Yay"), is(expiry.getExpiryForCreation(42, "Yay"))); } @@ -186,7 +183,7 @@ public void jsr107ExpiryGetsOverriddenByTemplate() throws Exception { public void jsr107LoaderGetsOverriddenByTemplate() throws Exception { when(jsr107Service.getTemplateNameForCache("cache")).thenReturn("cacheTemplate"); when(xmlConfiguration.newCacheConfigurationBuilderFromTemplate("cacheTemplate", Object.class, Object.class)).thenReturn( - newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).add(new DefaultCacheLoaderWriterConfiguration((Class)null)) + newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(new DefaultCacheLoaderWriterConfiguration((Class)null)) ); MutableConfiguration configuration = new MutableConfiguration<>(); @@ -203,8 +200,8 @@ public void jsr107LoaderGetsOverriddenByTemplate() throws Exception { @Test public void jsr107StoreByValueGetsOverriddenByTemplate() throws Exception { CacheConfigurationBuilder builder = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) - .add(new DefaultCopierConfiguration((Class)IdentityCopier.class, DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration((Class)IdentityCopier.class, DefaultCopierConfiguration.Type.VALUE)); + .withService(new DefaultCopierConfiguration((Class)IdentityCopier.class, DefaultCopierConfiguration.Type.KEY)) + .withService(new DefaultCopierConfiguration((Class)IdentityCopier.class, DefaultCopierConfiguration.Type.VALUE)); when(jsr107Service.getTemplateNameForCache("cache")).thenReturn("cacheTemplate"); when(xmlConfiguration.newCacheConfigurationBuilderFromTemplate("cacheTemplate", Object.class, Object.class)) @@ -215,10 +212,10 @@ public void jsr107StoreByValueGetsOverriddenByTemplate() throws Exception { ConfigurationMerger.ConfigHolder configHolder = merger.mergeConfigurations("cache", configuration); boolean storeByValue = true; - Collection> serviceConfigurations = configHolder.cacheConfiguration.getServiceConfigurations(); - for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { + Collection> serviceConfigurations = configHolder.cacheConfiguration.getServiceConfigurations(); + for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { if (serviceConfiguration instanceof DefaultCopierConfiguration) { - DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration)serviceConfiguration; + DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration)serviceConfiguration; if(copierConfig.getClazz().isAssignableFrom(IdentityCopier.class)) storeByValue = false; break; @@ -240,7 +237,7 @@ public void jsr107LoaderInitFailureClosesExpiry() throws Exception { try { merger.mergeConfigurations("cache", configuration); fail("Loader factory should have thrown"); - } catch (MultiCacheException mce) { + } catch (CacheException mce) { verify((Closeable) expiryPolicy).close(); } } @@ -259,7 +256,7 @@ public void jsr107ListenerFactoryInitFailureClosesExpiryLoader() throws Exceptio try { merger.mergeConfigurations("cache", configuration); fail("Loader factory should have thrown"); - } catch (MultiCacheException mce) { + } catch (CacheException mce) { verify((Closeable) expiryPolicy).close(); verify((Closeable) loader).close(); } @@ -311,7 +308,7 @@ public void setWriteThroughWithoutWriterFails() { @Test public void jsr107DefaultEh107IdentityCopierForImmutableTypes() { XmlConfiguration xmlConfiguration = new XmlConfiguration(getClass().getResource("/ehcache-107-copiers-immutable-types.xml")); - final DefaultJsr107Service jsr107Service = new DefaultJsr107Service(ServiceUtils.findSingletonAmongst(Jsr107Configuration.class, xmlConfiguration.getServiceCreationConfigurations().toArray())); + DefaultJsr107Service jsr107Service = new DefaultJsr107Service(ServiceUtils.findSingletonAmongst(Jsr107Configuration.class, xmlConfiguration.getServiceCreationConfigurations())); merger = new ConfigurationMerger(xmlConfiguration, jsr107Service, mock(Eh107CacheLoaderWriterProvider.class)); MutableConfiguration stringCacheConfiguration = new MutableConfiguration<>(); @@ -349,7 +346,7 @@ public void jsr107DefaultEh107IdentityCopierForImmutableTypes() { @Test public void jsr107DefaultEh107IdentityCopierForImmutableTypesWithCMLevelDefaults() { XmlConfiguration xmlConfiguration = new XmlConfiguration(getClass().getResource("/ehcache-107-immutable-types-cm-level-copiers.xml")); - final DefaultJsr107Service jsr107Service = new DefaultJsr107Service(ServiceUtils.findSingletonAmongst(Jsr107Configuration.class, xmlConfiguration.getServiceCreationConfigurations().toArray())); + DefaultJsr107Service jsr107Service = new DefaultJsr107Service(ServiceUtils.findSingletonAmongst(Jsr107Configuration.class, xmlConfiguration.getServiceCreationConfigurations())); merger = new ConfigurationMerger(xmlConfiguration, jsr107Service, mock(Eh107CacheLoaderWriterProvider.class)); MutableConfiguration stringCacheConfiguration = new MutableConfiguration<>(); @@ -358,7 +355,7 @@ public void jsr107DefaultEh107IdentityCopierForImmutableTypesWithCMLevelDefaults assertThat(configHolder1.cacheConfiguration.getServiceConfigurations().isEmpty(), is(true)); - for (ServiceCreationConfiguration serviceCreationConfiguration : xmlConfiguration.getServiceCreationConfigurations()) { + for (ServiceCreationConfiguration serviceCreationConfiguration : xmlConfiguration.getServiceCreationConfigurations()) { if (serviceCreationConfiguration instanceof DefaultCopyProviderConfiguration) { DefaultCopyProviderConfiguration copierConfig = (DefaultCopyProviderConfiguration)serviceCreationConfiguration; assertThat(copierConfig.getDefaults().size(), is(6)); @@ -381,12 +378,12 @@ public void jsr107DefaultEh107IdentityCopierForImmutableTypesWithoutTemplates() assertDefaultCopier(configHolder1.cacheConfiguration.getServiceConfigurations()); } - private static void assertDefaultCopier(Collection> serviceConfigurations) { + private static void assertDefaultCopier(Collection> serviceConfigurations) { boolean noCopierConfigPresent = false; - for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { + for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { if (serviceConfiguration instanceof DefaultCopierConfiguration) { noCopierConfigPresent = true; - DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration)serviceConfiguration; + DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration)serviceConfiguration; assertThat(copierConfig.getClazz().isAssignableFrom(Eh107IdentityCopier.class), is(true)); } } @@ -406,6 +403,7 @@ private RecordingFactory factoryOf(final T instance) { } private static class RecordingFactory implements Factory { + private static final long serialVersionUID = 1L; private final T instance; boolean called; @@ -421,6 +419,8 @@ public T create() { } private static class ThrowingCacheEntryListenerConfiguration implements CacheEntryListenerConfiguration { + private static final long serialVersionUID = 1L; + @Override public Factory> getCacheEntryListenerFactory() { throw new UnsupportedOperationException("BOOM"); diff --git a/107/src/test/java/org/ehcache/jsr107/DefaultConfigurationResolverTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/DefaultConfigurationResolverTest.java similarity index 100% rename from 107/src/test/java/org/ehcache/jsr107/DefaultConfigurationResolverTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/DefaultConfigurationResolverTest.java diff --git a/107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java similarity index 88% rename from 107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java index c633988975..ae4802d6af 100644 --- a/107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java @@ -42,28 +42,26 @@ public void testCompileTimeTypeSafety() throws Exception { cache.put(1l, "one"); cache.put(2l, "two"); - Configuration cache1CompleteConf = cache.getConfiguration(Configuration.class); + Configuration cache1CompleteConf = cache.getConfiguration(Configuration.class); //This ensures that we have compile time type safety, i.e when configuration does not have types defined but // what you get cache as should work. - assertThat((Class)cache1CompleteConf.getKeyType(), is(equalTo(Object.class))); - assertThat((Class)cache1CompleteConf.getValueType(), is(equalTo(Object.class))); + assertThat(cache1CompleteConf.getKeyType(), is(equalTo(Object.class))); + assertThat(cache1CompleteConf.getValueType(), is(equalTo(Object.class))); assertThat(cache.get(1l), is(equalTo("one"))); assertThat(cache.get(2l), is(equalTo("two"))); - javax.cache.Cache second = cacheManager.getCache("cache1"); + javax.cache.Cache second = cacheManager.getCache("cache1"); second.put("3","three"); - assertThat((String)second.get("3"), is(equalTo("three"))); + assertThat(second.get("3"), is(equalTo("three"))); cacheManager.destroyCache("cache1"); cacheManager.close(); - } - @Test - public void testRunTimeTypeSafety() throws Exception { + public void testRunTimeTypeLaxity() throws Exception { CachingProvider provider = Caching.getCachingProvider(); javax.cache.CacheManager cacheManager = provider.getCacheManager(this.getClass().getResource("/ehcache-107-types.xml").toURI(), getClass().getClassLoader()); @@ -79,9 +77,6 @@ public void testRunTimeTypeSafety() throws Exception { try { cacheManager.getCache("cache1"); - fail("Caches with runtime types should throw illegal argument exception when different types are used in getcache"); - } catch (IllegalArgumentException e) { - //Empty block as nothing is required to be tested } finally { cacheManager.destroyCache("cache1"); cacheManager.close(); diff --git a/107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java similarity index 99% rename from 107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java index 670dfdf9b7..0563e120a7 100644 --- a/107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java @@ -76,6 +76,8 @@ public void setUp() throws Exception { public void test107CacheCanReturnCompleteConfigurationWhenNonePassedIn() { CacheManager cacheManager = cachingProvider.getCacheManager(); Cache cache = cacheManager.createCache("cacheWithoutCompleteConfig", new Configuration() { + private static final long serialVersionUID = 1L; + @Override public Class getKeyType() { return Long.class; diff --git a/107/src/test/java/org/ehcache/jsr107/EhCachingProviderTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/EhCachingProviderTest.java similarity index 100% rename from 107/src/test/java/org/ehcache/jsr107/EhCachingProviderTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/EhCachingProviderTest.java diff --git a/107/src/test/java/org/ehcache/jsr107/IteratorTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/IteratorTest.java similarity index 86% rename from 107/src/test/java/org/ehcache/jsr107/IteratorTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/IteratorTest.java index 0cf04f6e73..11e200fbb0 100644 --- a/107/src/test/java/org/ehcache/jsr107/IteratorTest.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/IteratorTest.java @@ -23,7 +23,6 @@ import javax.cache.Cache; import javax.cache.CacheManager; import javax.cache.Caching; -import javax.cache.configuration.Factory; import javax.cache.configuration.MutableConfiguration; import javax.cache.expiry.Duration; import javax.cache.expiry.ExpiryPolicy; @@ -33,7 +32,6 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; /** * @author Ludovic Orban @@ -55,7 +53,7 @@ private void advanceTime(long delta) { } @Test - public void testIterateExpiredReturnsNull() throws Exception { + public void testIterateExpiredIsSkipped() throws Exception { EhcacheCachingProvider provider = (EhcacheCachingProvider) Caching.getCachingProvider(); TestTimeSource testTimeSource = new TestTimeSource(); TimeSourceConfiguration timeSourceConfiguration = new TimeSourceConfiguration(testTimeSource); @@ -86,16 +84,7 @@ public Duration getExpiryForUpdate() { testTimeSource.advanceTime(1000); Iterator> iterator = testCache.iterator(); - assertThat(iterator.hasNext(), is(true)); - - int loopCount = 0; - while (iterator.hasNext()) { - Cache.Entry next = iterator.next(); - assertThat(next, is(nullValue())); - - loopCount++; - } - assertThat(loopCount, is(1)); + assertThat(iterator.hasNext(), is(false)); cacheManager.close(); } diff --git a/ehcache-107/src/test/java/org/ehcache/jsr107/Jsr107CacheParserIT.java b/ehcache-107/src/test/java/org/ehcache/jsr107/Jsr107CacheParserIT.java new file mode 100644 index 0000000000..131d328e3a --- /dev/null +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/Jsr107CacheParserIT.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.jsr107; + +import org.ehcache.config.Configuration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.junit.Test; + +import java.net.URL; + +/** + * Jsr107CacheParserIT + */ +public class Jsr107CacheParserIT { + + @Test(expected = XmlConfigurationException.class) + public void testJsr107CacheXmlTranslationToString() { + URL resource = Jsr107CacheParserIT.class.getResource("/ehcache-107.xml"); + Configuration config = new XmlConfiguration(resource); + XmlConfiguration xmlConfig = new XmlConfiguration(config); + } +} diff --git a/107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java b/ehcache-107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java similarity index 98% rename from 107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java index 8ad6714e49..27043ea2af 100644 --- a/107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java @@ -32,8 +32,8 @@ import javax.cache.integration.CacheWriter; import javax.cache.spi.CachingProvider; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; diff --git a/107/src/test/java/org/ehcache/jsr107/LoaderWriterConfigTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/LoaderWriterConfigTest.java similarity index 100% rename from 107/src/test/java/org/ehcache/jsr107/LoaderWriterConfigTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/LoaderWriterConfigTest.java diff --git a/107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java similarity index 99% rename from 107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java index 906cf61f44..bd7265e365 100644 --- a/107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java @@ -35,8 +35,8 @@ import javax.cache.integration.CacheWriter; import javax.cache.spi.CachingProvider; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; diff --git a/107/src/test/java/org/ehcache/jsr107/LongSerializer.java b/ehcache-107/src/test/java/org/ehcache/jsr107/LongSerializer.java similarity index 100% rename from 107/src/test/java/org/ehcache/jsr107/LongSerializer.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/LongSerializer.java diff --git a/107/src/test/java/org/ehcache/jsr107/ResourceCombinationsTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/ResourceCombinationsTest.java similarity index 98% rename from 107/src/test/java/org/ehcache/jsr107/ResourceCombinationsTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/ResourceCombinationsTest.java index e2790f82b7..d8964866fa 100644 --- a/107/src/test/java/org/ehcache/jsr107/ResourceCombinationsTest.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/ResourceCombinationsTest.java @@ -41,8 +41,8 @@ import static org.ehcache.config.units.EntryUnit.ENTRIES; import static org.ehcache.config.units.MemoryUnit.MB; import static org.ehcache.jsr107.Eh107Configuration.fromEhcacheCacheConfiguration; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; @RunWith(Parameterized.class) public class ResourceCombinationsTest { diff --git a/107/src/test/java/org/ehcache/jsr107/SerializerTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/SerializerTest.java similarity index 97% rename from 107/src/test/java/org/ehcache/jsr107/SerializerTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/SerializerTest.java index 10bc298ac0..d638dc5097 100644 --- a/107/src/test/java/org/ehcache/jsr107/SerializerTest.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/SerializerTest.java @@ -27,7 +27,7 @@ import javax.cache.spi.CachingProvider; import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; /** * @author rism diff --git a/107/src/test/java/org/ehcache/jsr107/SimpleEh107ConfigTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/SimpleEh107ConfigTest.java similarity index 97% rename from 107/src/test/java/org/ehcache/jsr107/SimpleEh107ConfigTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/SimpleEh107ConfigTest.java index b37a89040a..09a8058b2e 100644 --- a/107/src/test/java/org/ehcache/jsr107/SimpleEh107ConfigTest.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/SimpleEh107ConfigTest.java @@ -24,7 +24,6 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; import javax.cache.Cache; @@ -40,9 +39,9 @@ import javax.cache.integration.CompletionListenerFuture; import javax.cache.spi.CachingProvider; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * SimpleEh107ConfigTest diff --git a/107/src/test/java/org/ehcache/jsr107/StatisticsTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/StatisticsTest.java similarity index 86% rename from 107/src/test/java/org/ehcache/jsr107/StatisticsTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/StatisticsTest.java index 8c88118b5b..5064e6a396 100644 --- a/107/src/test/java/org/ehcache/jsr107/StatisticsTest.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/StatisticsTest.java @@ -15,7 +15,6 @@ */ package org.ehcache.jsr107; -import org.hamcrest.Matcher; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -26,15 +25,15 @@ import javax.cache.configuration.MutableConfiguration; import javax.cache.spi.CachingProvider; +import java.time.Duration; import java.util.HashSet; -import java.util.concurrent.Callable; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; -import static org.hamcrest.Matchers.not; +import static org.terracotta.utilities.test.matchers.Eventually.within; /** * @author Ludovic Orban @@ -210,8 +209,7 @@ public void test_getAverageGetTime() throws Exception { heapCache.get("key"); heapCache.get("key"); - assertFor(1100L, () -> heapStatistics.getAverageGetTime(), is(not(0.0f))); - assertThat(heapStatistics.getAverageGetTime(), greaterThan(0.0f)); + assertThat(heapStatistics::getAverageGetTime, within(Duration.ofMillis(1100)).matches(greaterThan(0.0f))); } @Test @@ -224,8 +222,7 @@ public void test_getAveragePutTime() throws Exception { heapCache.put("key", "value"); heapCache.put("key", "value"); - assertFor(1100L, () -> heapStatistics.getAveragePutTime(), is(not(0.0f))); - assertThat(heapStatistics.getAveragePutTime(), greaterThan(0.0f)); + assertThat(heapStatistics::getAveragePutTime, within(Duration.ofMillis(1100)).matches(greaterThan(0.0f))); } @Test @@ -244,28 +241,6 @@ public void test_getAverageRemoveTime() throws Exception { heapCache.remove("key3"); heapCache.remove("key4"); - assertFor(1100L, () -> heapStatistics.getAverageRemoveTime(), is(not(0.0f))); - assertThat(heapStatistics.getAverageRemoveTime(), greaterThan(0.0f)); + assertThat(heapStatistics::getAverageRemoveTime, within(Duration.ofMillis(1100)).matches(greaterThan(0.0f))); } - - private static void assertFor(long timeoutInMs, Callable callable, Matcher matcher) throws Exception { - long timeLeftInMs = timeoutInMs; - - while (timeLeftInMs > 0) { - try { - assertThat(callable.call(), matcher); - return; - } catch (AssertionError assertionError) { - try { - Thread.sleep(100); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - timeLeftInMs -= 100; - } - } - - assertThat(callable.call(), matcher); - } - } diff --git a/107/src/test/java/org/ehcache/jsr107/StringSerializer.java b/ehcache-107/src/test/java/org/ehcache/jsr107/StringSerializer.java similarity index 100% rename from 107/src/test/java/org/ehcache/jsr107/StringSerializer.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/StringSerializer.java diff --git a/107/src/test/java/org/ehcache/jsr107/UnwrapTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/UnwrapTest.java similarity index 95% rename from 107/src/test/java/org/ehcache/jsr107/UnwrapTest.java rename to ehcache-107/src/test/java/org/ehcache/jsr107/UnwrapTest.java index f9926e51c5..a3233c0fe4 100644 --- a/107/src/test/java/org/ehcache/jsr107/UnwrapTest.java +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/UnwrapTest.java @@ -28,9 +28,9 @@ import javax.cache.event.EventType; import javax.cache.spi.CachingProvider; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertThat; /** * @author rism @@ -77,7 +77,6 @@ public void testCacheEntryEventUnwrap() { assertThat(cacheEntryEvent.unwrap(cacheEntryEvent.getClass()), is(instanceOf(Eh107CacheEntryEvent.NormalEvent.class))); } - @SuppressWarnings("unchecked") private class EhEvent implements CacheEvent { @Override public org.ehcache.event.EventType getType() { @@ -99,8 +98,9 @@ public String getOldValue() { throw new UnsupportedOperationException("Implement me!"); } + @SuppressWarnings("deprecation") @Override - public org.ehcache.Cache getSource() { + public org.ehcache.Cache getSource() { throw new UnsupportedOperationException("Implement me!"); } } diff --git a/ehcache-107/src/test/java/org/ehcache/jsr107/internal/Jsr107CacheConfigurationParserTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/internal/Jsr107CacheConfigurationParserTest.java new file mode 100644 index 0000000000..85d24f01da --- /dev/null +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/internal/Jsr107CacheConfigurationParserTest.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.jsr107.internal; + +import org.ehcache.jsr107.config.ConfigurationElementState; +import org.ehcache.jsr107.config.Jsr107CacheConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.junit.Test; + +/** + * Jsr107CacheConfigurationParserTest + */ +public class Jsr107CacheConfigurationParserTest { + + @Test(expected = XmlConfigurationException.class) + public void testTranslateServiceCreationConfigurationWithStatisticsManagementEnabled() { + Jsr107CacheConfigurationParser configTranslator = new Jsr107CacheConfigurationParser(); + Jsr107CacheConfiguration cacheConfiguration = + new Jsr107CacheConfiguration(ConfigurationElementState.ENABLED, ConfigurationElementState.DISABLED); + configTranslator.unparseServiceConfiguration(cacheConfiguration); + } + +} diff --git a/ehcache-107/src/test/java/org/ehcache/jsr107/internal/Jsr107ServiceConfigurationParserTest.java b/ehcache-107/src/test/java/org/ehcache/jsr107/internal/Jsr107ServiceConfigurationParserTest.java new file mode 100644 index 0000000000..508bbda7b4 --- /dev/null +++ b/ehcache-107/src/test/java/org/ehcache/jsr107/internal/Jsr107ServiceConfigurationParserTest.java @@ -0,0 +1,46 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.jsr107.internal; + +import org.ehcache.jsr107.config.ConfigurationElementState; +import org.ehcache.jsr107.config.Jsr107Configuration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.junit.Test; + +import java.util.HashMap; +import java.util.Map; + +/** + * Jsr107ServiceConfigurationParserTest + */ +public class Jsr107ServiceConfigurationParserTest { + + @Test(expected = XmlConfigurationException.class) + public void testTranslateServiceCreationConfiguration() { + Jsr107ServiceConfigurationParser configTranslator = new Jsr107ServiceConfigurationParser(); + + Map templateMap = new HashMap<>(); + templateMap.put("testCache", "simpleCacheTemplate"); + templateMap.put("testCache1", "simpleCacheTemplate1"); + boolean jsr107CompliantAtomics = true; + Jsr107Configuration serviceCreationConfiguration = + new Jsr107Configuration("tiny-template", templateMap, jsr107CompliantAtomics, + ConfigurationElementState.ENABLED, ConfigurationElementState.DISABLED); + + configTranslator.unparseServiceCreationConfiguration(serviceCreationConfiguration); + } + +} diff --git a/107/src/test/resources/ehcache-107-copiers-immutable-types.xml b/ehcache-107/src/test/resources/ehcache-107-copiers-immutable-types.xml similarity index 97% rename from 107/src/test/resources/ehcache-107-copiers-immutable-types.xml rename to ehcache-107/src/test/resources/ehcache-107-copiers-immutable-types.xml index d2499af468..366d4f5127 100644 --- a/107/src/test/resources/ehcache-107-copiers-immutable-types.xml +++ b/ehcache-107/src/test/resources/ehcache-107-copiers-immutable-types.xml @@ -15,7 +15,6 @@ --> diff --git a/107/src/test/resources/ehcache-107-default-copiers.xml b/ehcache-107/src/test/resources/ehcache-107-default-copiers.xml similarity index 95% rename from 107/src/test/resources/ehcache-107-default-copiers.xml rename to ehcache-107/src/test/resources/ehcache-107-default-copiers.xml index a484c01976..5c707f0335 100644 --- a/107/src/test/resources/ehcache-107-default-copiers.xml +++ b/ehcache-107/src/test/resources/ehcache-107-default-copiers.xml @@ -15,7 +15,6 @@ --> diff --git a/107/src/test/resources/ehcache-107-immutable-types-cm-level-copiers.xml b/ehcache-107/src/test/resources/ehcache-107-immutable-types-cm-level-copiers.xml similarity index 97% rename from 107/src/test/resources/ehcache-107-immutable-types-cm-level-copiers.xml rename to ehcache-107/src/test/resources/ehcache-107-immutable-types-cm-level-copiers.xml index db38f60436..8882458718 100644 --- a/107/src/test/resources/ehcache-107-immutable-types-cm-level-copiers.xml +++ b/ehcache-107/src/test/resources/ehcache-107-immutable-types-cm-level-copiers.xml @@ -15,7 +15,6 @@ --> diff --git a/107/src/test/resources/ehcache-107-integration.xml b/ehcache-107/src/test/resources/ehcache-107-integration.xml similarity index 100% rename from 107/src/test/resources/ehcache-107-integration.xml rename to ehcache-107/src/test/resources/ehcache-107-integration.xml diff --git a/107/src/test/resources/ehcache-107-listeners.xml b/ehcache-107/src/test/resources/ehcache-107-listeners.xml similarity index 80% rename from 107/src/test/resources/ehcache-107-listeners.xml rename to ehcache-107/src/test/resources/ehcache-107-listeners.xml index a32743e097..eb460261ee 100644 --- a/107/src/test/resources/ehcache-107-listeners.xml +++ b/ehcache-107/src/test/resources/ehcache-107-listeners.xml @@ -16,11 +16,8 @@ --> + xmlns:jsr107='http://www.ehcache.org/v3/jsr107'> @@ -42,4 +39,4 @@ 2000 - \ No newline at end of file + diff --git a/ehcache-107/src/test/resources/ehcache-107-mbeans-cache-config.xml b/ehcache-107/src/test/resources/ehcache-107-mbeans-cache-config.xml new file mode 100644 index 0000000000..ca599c2396 --- /dev/null +++ b/ehcache-107/src/test/resources/ehcache-107-mbeans-cache-config.xml @@ -0,0 +1,28 @@ + + + + + + java.lang.String + java.lang.String + 2000 + + + + diff --git a/107/src/test/resources/ehcache-107-mbeans-template-config.xml b/ehcache-107/src/test/resources/ehcache-107-mbeans-template-config.xml similarity index 78% rename from 107/src/test/resources/ehcache-107-mbeans-template-config.xml rename to ehcache-107/src/test/resources/ehcache-107-mbeans-template-config.xml index 8824829628..430984105e 100644 --- a/107/src/test/resources/ehcache-107-mbeans-template-config.xml +++ b/ehcache-107/src/test/resources/ehcache-107-mbeans-template-config.xml @@ -15,11 +15,8 @@ --> + xmlns:jsr107='http://www.ehcache.org/v3/jsr107'> @@ -38,4 +35,4 @@ - \ No newline at end of file + diff --git a/ehcache-107/src/test/resources/ehcache-107-serializer.xml b/ehcache-107/src/test/resources/ehcache-107-serializer.xml new file mode 100644 index 0000000000..8d8deec44e --- /dev/null +++ b/ehcache-107/src/test/resources/ehcache-107-serializer.xml @@ -0,0 +1,26 @@ + + + + org.ehcache.impl.serialization.CompactJavaSerializer + + + + java.lang.Long + java.lang.String + + 20 + 1 + + + + + java.lang.Long + java.lang.String + + 20 + 1 + + + diff --git a/ehcache-107/src/test/resources/ehcache-107-stats.xml b/ehcache-107/src/test/resources/ehcache-107-stats.xml new file mode 100644 index 0000000000..909732857e --- /dev/null +++ b/ehcache-107/src/test/resources/ehcache-107-stats.xml @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + 10 + 10 + + + + + + 10 + 10 + + + + + java.lang.String + java.lang.String + 10 + + + diff --git a/107/src/test/resources/ehcache-107-types.xml b/ehcache-107/src/test/resources/ehcache-107-types.xml similarity index 100% rename from 107/src/test/resources/ehcache-107-types.xml rename to ehcache-107/src/test/resources/ehcache-107-types.xml diff --git a/ehcache-107/src/test/resources/ehcache-107.xml b/ehcache-107/src/test/resources/ehcache-107.xml new file mode 100644 index 0000000000..cd32783126 --- /dev/null +++ b/ehcache-107/src/test/resources/ehcache-107.xml @@ -0,0 +1,21 @@ + + + + + + + + + + java.lang.String + java.lang.String + 2000 + + + + 20 + + + diff --git a/107/src/test/resources/ehcache-example.xml b/ehcache-107/src/test/resources/ehcache-example.xml similarity index 100% rename from 107/src/test/resources/ehcache-example.xml rename to ehcache-107/src/test/resources/ehcache-example.xml diff --git a/107/src/test/resources/ehcache-loader-writer-107-load-atomics.xml b/ehcache-107/src/test/resources/ehcache-loader-writer-107-load-atomics.xml similarity index 89% rename from 107/src/test/resources/ehcache-loader-writer-107-load-atomics.xml rename to ehcache-107/src/test/resources/ehcache-loader-writer-107-load-atomics.xml index 3cf28037f4..b8febe0ce1 100644 --- a/107/src/test/resources/ehcache-loader-writer-107-load-atomics.xml +++ b/ehcache-107/src/test/resources/ehcache-loader-writer-107-load-atomics.xml @@ -1,5 +1,4 @@ diff --git a/107/src/test/resources/ehcache-loader-writer-107.xml b/ehcache-107/src/test/resources/ehcache-loader-writer-107.xml similarity index 88% rename from 107/src/test/resources/ehcache-loader-writer-107.xml rename to ehcache-107/src/test/resources/ehcache-loader-writer-107.xml index ee84dd73ba..01a332c67b 100644 --- a/107/src/test/resources/ehcache-loader-writer-107.xml +++ b/ehcache-107/src/test/resources/ehcache-loader-writer-107.xml @@ -1,5 +1,4 @@ diff --git a/107/src/test/resources/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml b/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml similarity index 81% rename from 107/src/test/resources/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml rename to ehcache-107/src/test/resources/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml index bebec36a39..5bf3687b4e 100644 --- a/107/src/test/resources/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml +++ b/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml @@ -15,12 +15,8 @@ --> + xmlns:jsr107='http://www.ehcache.org/v3/jsr107'> diff --git a/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-cache-through.xml b/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-jsr107-cache-through.xml similarity index 93% rename from 107/src/test/resources/org/ehcache/docs/ehcache-jsr107-cache-through.xml rename to ehcache-107/src/test/resources/org/ehcache/docs/ehcache-jsr107-cache-through.xml index 1061bc0b6d..f587c97f35 100644 --- a/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-cache-through.xml +++ b/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-jsr107-cache-through.xml @@ -15,7 +15,6 @@ --> diff --git a/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-jsr107-config.xml b/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-jsr107-config.xml new file mode 100644 index 0000000000..2a38ad11f9 --- /dev/null +++ b/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-jsr107-config.xml @@ -0,0 +1,12 @@ + + + + java.lang.Long + com.pany.domain.Product + + com.pany.ehcache.integration.ProductCacheLoaderWriter + + 100 + + + \ No newline at end of file diff --git a/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml b/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml similarity index 85% rename from 107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml rename to ehcache-107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml index c50dc33616..1e2599caf6 100644 --- a/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml +++ b/ehcache-107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml @@ -1,10 +1,6 @@ + xmlns:jsr107='http://www.ehcache.org/v3/jsr107'> diff --git a/107/src/test/resources/org/ehcache/docs/public-xsds-location.xml b/ehcache-107/src/test/resources/org/ehcache/docs/public-xsds-location.xml similarity index 100% rename from 107/src/test/resources/org/ehcache/docs/public-xsds-location.xml rename to ehcache-107/src/test/resources/org/ehcache/docs/public-xsds-location.xml diff --git a/ehcache-api/build.gradle b/ehcache-api/build.gradle new file mode 100644 index 0000000000..c32a4fa5e2 --- /dev/null +++ b/ehcache-api/build.gradle @@ -0,0 +1,40 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.internal-module' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 API module' + description = 'The API module of Ehcache 3' + } +} + +checkstyle { + configFile = file("$projectDir/config/checkstyle.xml") +} + +jar { + bnd( + 'Export-Package': 'org.ehcache.*', + 'Import-Package': '*' + ) +} + +//TODO : Baseline task is broken pending Gradle artifact resolution fixes +//check.dependsOn(baseline) diff --git a/api/config/checkstyle-suppressions.xml b/ehcache-api/config/checkstyle-suppressions.xml similarity index 100% rename from api/config/checkstyle-suppressions.xml rename to ehcache-api/config/checkstyle-suppressions.xml diff --git a/api/config/checkstyle.xml b/ehcache-api/config/checkstyle.xml similarity index 80% rename from api/config/checkstyle.xml rename to ehcache-api/config/checkstyle.xml index 58acf9aadf..945a03c384 100644 --- a/api/config/checkstyle.xml +++ b/ehcache-api/config/checkstyle.xml @@ -24,13 +24,6 @@ - - - - - - - @@ -41,13 +34,17 @@ - + + + + + + - diff --git a/api/src/main/java/org/ehcache/Cache.java b/ehcache-api/src/main/java/org/ehcache/Cache.java similarity index 97% rename from api/src/main/java/org/ehcache/Cache.java rename to ehcache-api/src/main/java/org/ehcache/Cache.java index 011136b6c9..a904dce76d 100644 --- a/api/src/main/java/org/ehcache/Cache.java +++ b/ehcache-api/src/main/java/org/ehcache/Cache.java @@ -22,6 +22,7 @@ import org.ehcache.spi.loaderwriter.CacheWritingException; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import java.util.Iterator; import java.util.Map; import java.util.Set; @@ -254,6 +255,16 @@ public interface Cache extends Iterable> { */ CacheRuntimeConfiguration getRuntimeConfiguration(); + /** + * Returns an iterator over the cache entries. + *

+ * Due to the interactions of the cache and iterator contracts it is possible + * for iteration to return expired entries. + * + * @return an Iterator over the cache entries. + */ + @Override + Iterator> iterator(); /** * A mapping of key to value held in a {@link Cache}. diff --git a/api/src/main/java/org/ehcache/CacheIterationException.java b/ehcache-api/src/main/java/org/ehcache/CacheIterationException.java similarity index 100% rename from api/src/main/java/org/ehcache/CacheIterationException.java rename to ehcache-api/src/main/java/org/ehcache/CacheIterationException.java diff --git a/api/src/main/java/org/ehcache/CacheManager.java b/ehcache-api/src/main/java/org/ehcache/CacheManager.java similarity index 100% rename from api/src/main/java/org/ehcache/CacheManager.java rename to ehcache-api/src/main/java/org/ehcache/CacheManager.java diff --git a/api/src/main/java/org/ehcache/CachePersistenceException.java b/ehcache-api/src/main/java/org/ehcache/CachePersistenceException.java similarity index 95% rename from api/src/main/java/org/ehcache/CachePersistenceException.java rename to ehcache-api/src/main/java/org/ehcache/CachePersistenceException.java index 04d73477dc..6b90c9dd2a 100644 --- a/api/src/main/java/org/ehcache/CachePersistenceException.java +++ b/ehcache-api/src/main/java/org/ehcache/CachePersistenceException.java @@ -25,6 +25,8 @@ */ public class CachePersistenceException extends Exception { + private static final long serialVersionUID = -5858875151420107040L; + /** * Creates a {@code CachePersistenceException} with the provided message. * diff --git a/api/src/main/java/org/ehcache/PersistentCacheManager.java b/ehcache-api/src/main/java/org/ehcache/PersistentCacheManager.java similarity index 100% rename from api/src/main/java/org/ehcache/PersistentCacheManager.java rename to ehcache-api/src/main/java/org/ehcache/PersistentCacheManager.java diff --git a/api/src/main/java/org/ehcache/PersistentUserManagedCache.java b/ehcache-api/src/main/java/org/ehcache/PersistentUserManagedCache.java similarity index 100% rename from api/src/main/java/org/ehcache/PersistentUserManagedCache.java rename to ehcache-api/src/main/java/org/ehcache/PersistentUserManagedCache.java diff --git a/api/src/main/java/org/ehcache/StateTransitionException.java b/ehcache-api/src/main/java/org/ehcache/StateTransitionException.java similarity index 100% rename from api/src/main/java/org/ehcache/StateTransitionException.java rename to ehcache-api/src/main/java/org/ehcache/StateTransitionException.java diff --git a/api/src/main/java/org/ehcache/Status.java b/ehcache-api/src/main/java/org/ehcache/Status.java similarity index 100% rename from api/src/main/java/org/ehcache/Status.java rename to ehcache-api/src/main/java/org/ehcache/Status.java diff --git a/api/src/main/java/org/ehcache/UserManagedCache.java b/ehcache-api/src/main/java/org/ehcache/UserManagedCache.java similarity index 100% rename from api/src/main/java/org/ehcache/UserManagedCache.java rename to ehcache-api/src/main/java/org/ehcache/UserManagedCache.java diff --git a/api/src/main/java/org/ehcache/ValueSupplier.java b/ehcache-api/src/main/java/org/ehcache/ValueSupplier.java similarity index 88% rename from api/src/main/java/org/ehcache/ValueSupplier.java rename to ehcache-api/src/main/java/org/ehcache/ValueSupplier.java index 65a8a4b4bb..505813e4cc 100644 --- a/api/src/main/java/org/ehcache/ValueSupplier.java +++ b/ehcache-api/src/main/java/org/ehcache/ValueSupplier.java @@ -22,7 +22,11 @@ * This indicates that the value needs to be computed before it can be retrieved, such as deserialization. * * @param the value type + * + * @deprecated Now using {@code Supplier} for {@link org.ehcache.expiry.ExpiryPolicy} */ +@Deprecated +@FunctionalInterface public interface ValueSupplier { /** diff --git a/api/src/main/java/org/ehcache/config/Builder.java b/ehcache-api/src/main/java/org/ehcache/config/Builder.java similarity index 100% rename from api/src/main/java/org/ehcache/config/Builder.java rename to ehcache-api/src/main/java/org/ehcache/config/Builder.java diff --git a/ehcache-api/src/main/java/org/ehcache/config/CacheConfiguration.java b/ehcache-api/src/main/java/org/ehcache/config/CacheConfiguration.java new file mode 100644 index 0000000000..ab808dbe7b --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/config/CacheConfiguration.java @@ -0,0 +1,128 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.config; + +import org.ehcache.Cache; + +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.service.ServiceConfiguration; + +import java.util.Collection; + +/** + * Represents the minimal configuration for a {@link Cache}. + *

+ * Implementations are expected to be read-only. + * + * @param the key type for the cache + * @param the value type for the cache + */ +public interface CacheConfiguration { + + /** + * The service configurations defined for the {@link Cache}. + *

+ * Implementations must return an unmodifiable collection. + * + * @return service configurations + */ + Collection> getServiceConfigurations(); + + /** + * The key type for the {@link Cache}. + *

+ * The key type must not be {@code null}. + * + * @return a non {@code null} class + */ + Class getKeyType(); + + /** + * The value type for the {@link Cache}. + *

+ * The value type must not be {@code null}. + * + * @return a non {@code null} class + */ + Class getValueType(); + + /** + * The {@link EvictionAdvisor} predicate function. + *

+ * Entries which pass this predicate may be ignored by the eviction process. + * This is only a hint. + * + * @return the eviction advisor predicate + */ + EvictionAdvisor getEvictionAdvisor(); + + /** + * The {@link ClassLoader} for the {@link Cache}. + *

+ * This {@code ClassLoader} will be used to instantiate cache level services + * and for deserializing cache entries when required. + *

+ * The {@code ClassLoader} must not be null. + * + * @return the cache {@code ClassLoader} + */ + ClassLoader getClassLoader(); + + /** + * The {@link org.ehcache.expiry.Expiry} rules for the {@link Cache}. + *

+ * The {@code Expiry} cannot be null. + * + * @return the {@code Expiry} + * + * @deprecated Use {@link #getExpiryPolicy()} + */ + @Deprecated + org.ehcache.expiry.Expiry getExpiry(); + + /** + * The {@link ExpiryPolicy} rules for the {@link Cache}. + *

+ * The {@code ExpiryPolicy} cannot be null. + * + * @return the {@code ExpiryPolicy} + */ + ExpiryPolicy getExpiryPolicy(); + + /** + * The {@link ResourcePools} for the {@link Cache}. + *

+ * The {@code ResourcePools} cannot be null nor empty. + * + * @return the {@link ResourcePools} + */ + ResourcePools getResourcePools(); + + /** + * Create a builder seeded with this configuration. + *

+ * The default implementation throws {@code UnsupportedOperationException} to indicate that configuration derivation + * is not supported. + * + * @see FluentConfigurationBuilder + * @return a configuration builder + * @throws UnsupportedOperationException if configuration derivation is not supported + */ + default FluentCacheConfigurationBuilder derive() throws UnsupportedOperationException { + throw new UnsupportedOperationException(); + } +} diff --git a/api/src/main/java/org/ehcache/config/CacheRuntimeConfiguration.java b/ehcache-api/src/main/java/org/ehcache/config/CacheRuntimeConfiguration.java similarity index 92% rename from api/src/main/java/org/ehcache/config/CacheRuntimeConfiguration.java rename to ehcache-api/src/main/java/org/ehcache/config/CacheRuntimeConfiguration.java index c111d5aaf6..26d30b59e2 100644 --- a/api/src/main/java/org/ehcache/config/CacheRuntimeConfiguration.java +++ b/ehcache-api/src/main/java/org/ehcache/config/CacheRuntimeConfiguration.java @@ -21,6 +21,7 @@ import org.ehcache.event.EventOrdering; import org.ehcache.event.EventType; +import java.util.EnumSet; import java.util.Set; /** @@ -68,8 +69,10 @@ void registerCacheEventListener(CacheEventListener listene * * @throws java.lang.IllegalStateException if the listener is already registered */ - void registerCacheEventListener(CacheEventListener listener, - EventOrdering ordering, EventFiring firing, EventType eventType, EventType... eventTypes); + default void registerCacheEventListener(CacheEventListener listener, + EventOrdering ordering, EventFiring firing, EventType eventType, EventType... eventTypes) { + registerCacheEventListener(listener, ordering, firing, EnumSet.of(eventType, eventTypes)); + } /** * Deregisters a previously registered {@link org.ehcache.event.CacheEventListener CacheEventListener} instance. diff --git a/api/src/main/java/org/ehcache/config/Configuration.java b/ehcache-api/src/main/java/org/ehcache/config/Configuration.java similarity index 78% rename from api/src/main/java/org/ehcache/config/Configuration.java rename to ehcache-api/src/main/java/org/ehcache/config/Configuration.java index d61e335e77..93a1067085 100644 --- a/api/src/main/java/org/ehcache/config/Configuration.java +++ b/ehcache-api/src/main/java/org/ehcache/config/Configuration.java @@ -46,7 +46,7 @@ public interface Configuration { * * @return a collection of service creations configurations */ - Collection> getServiceCreationConfigurations(); + Collection> getServiceCreationConfigurations(); /** * The {@link ClassLoader} for the {@link org.ehcache.CacheManager CacheManager}. @@ -59,4 +59,18 @@ public interface Configuration { * @return the cache manager {@code ClassLoader} */ ClassLoader getClassLoader(); + + /** + * Creates a builder seeded with this configuration. + *

+ * The default implementation throws {@code UnsupportedOperationException} to indicate that configuration derivation + * is not supported. + * + * @see FluentConfigurationBuilder + * @return a configuration builder + * @throws UnsupportedOperationException if configuration derivation is not supported + */ + default FluentConfigurationBuilder derive() { + throw new UnsupportedOperationException(); + } } diff --git a/api/src/main/java/org/ehcache/config/Eviction.java b/ehcache-api/src/main/java/org/ehcache/config/Eviction.java similarity index 100% rename from api/src/main/java/org/ehcache/config/Eviction.java rename to ehcache-api/src/main/java/org/ehcache/config/Eviction.java diff --git a/api/src/main/java/org/ehcache/config/EvictionAdvisor.java b/ehcache-api/src/main/java/org/ehcache/config/EvictionAdvisor.java similarity index 98% rename from api/src/main/java/org/ehcache/config/EvictionAdvisor.java rename to ehcache-api/src/main/java/org/ehcache/config/EvictionAdvisor.java index ca21460c7d..2413d303f5 100644 --- a/api/src/main/java/org/ehcache/config/EvictionAdvisor.java +++ b/ehcache-api/src/main/java/org/ehcache/config/EvictionAdvisor.java @@ -22,6 +22,7 @@ * @param the key type for the cache * @param the value type for the cache */ +@FunctionalInterface public interface EvictionAdvisor { /** diff --git a/ehcache-api/src/main/java/org/ehcache/config/FluentCacheConfigurationBuilder.java b/ehcache-api/src/main/java/org/ehcache/config/FluentCacheConfigurationBuilder.java new file mode 100644 index 0000000000..2bd70992fa --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/config/FluentCacheConfigurationBuilder.java @@ -0,0 +1,529 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.config; + +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.service.ServiceConfiguration; + +import java.util.Collection; +import java.util.function.Predicate; +import java.util.function.UnaryOperator; + +/** + * A fluent builder of {@link CacheConfiguration} instances. + * + * @param cache key type + * @param cache value type + * @param builder sub-type + */ +public interface FluentCacheConfigurationBuilder> extends Builder> { + + /** + * Builds a new {@link CacheConfiguration}. + * + * @return a new {@code CacheConfiguration} + */ + CacheConfiguration build(); + + /** + * Return the unique service configuration of the given type. + *

+ * If there are multiple configuration instances of this type (or subtypes) then an {@code IllegalArgumentException} + * will be thrown. + * + * @param configurationType desired configuration type + * @param configuration type + * @return the service configuration of the given type; @{code null} if there is no service configuration of the given type + * @throws IllegalArgumentException if there are multiple instances of this type + * + * @see #getServices(Class) + * @see #withService(ServiceConfiguration) + * @see #withService(Builder) + * @see #withoutServices(Class) + * @see #withoutServices(Class, Predicate) + * @see #updateServices(Class, UnaryOperator) + */ + default > C getService(Class configurationType) throws IllegalArgumentException { + Collection services = getServices(configurationType); + + switch (services.size()) { + case 0: + return null; + case 1: + return services.iterator().next(); + default: + throw new IllegalArgumentException(configurationType + " does not identify a unique service configuration: " + services); + } + } + + /** + * Returns all the service configurations of the given type. + * + * @param configurationType desired configuration type + * @param configuration type + * @return all services of this type + * + * @see #getService(Class) + * @see #withService(ServiceConfiguration) + * @see #withService(Builder) + * @see #withoutServices(Class) + * @see #withoutServices(Class, Predicate) + * @see #updateServices(Class, UnaryOperator) + */ + > Collection getServices(Class configurationType); + + /** + * Adds a service configuration to this configuration. + *

+ * This will remove any existing service configurations that are incompatible with the supplied one. + * This removal is equivalent to the following: + *

{@code configurations.removeIf(
+   *     existing -> !config.compatibleWith(existing) || !existing.compatibleWith(config)
+   * );}
+ * + * @param config service configuration + * @return an updated builder + * @see ServiceConfiguration#compatibleWith(ServiceConfiguration) + * + * @see #getService(Class) + * @see #getServices(Class) + * @see #withService(Builder) + * @see #withoutServices(Class) + * @see #withoutServices(Class, Predicate) + * @see #updateServices(Class, UnaryOperator) + */ + B withService(ServiceConfiguration config); + + /** + * Adds a service configuration built by the given builder to this configuration. + *

+ * This will remove any existing configurations that are incompatible with the configuration returned by + * {@code builder.build()}. + * + * @param builder service configuration builder + * @return an updated builder + * @see #withService(ServiceConfiguration) + * + * @see #getService(Class) + * @see #getServices(Class) + * @see #withService(ServiceConfiguration) + * @see #withoutServices(Class) + * @see #withoutServices(Class, Predicate) + * @see #updateServices(Class, UnaryOperator) + */ + default B withService(Builder> builder) { + return withService(builder.build()); + } + + /** + * Removes all service configurations of the given type from this configuration. + * + * @param clazz service configuration type + * @return an updated builder + * + * @see #getService(Class) + * @see #getServices(Class) + * @see #withService(ServiceConfiguration) + * @see #withService(Builder) + * @see #withoutServices(Class, Predicate) + * @see #updateServices(Class, UnaryOperator) + */ + default B withoutServices(Class> clazz) { + return withoutServices(clazz, c -> true); + } + + /** + * Removes all service configurations of the given type that pass the predicate. + * + * @param clazz service configuration type + * @param predicate predicate controlling removal + * @param configuration type + * @return an updated builder + * + * @see #getService(Class) + * @see #getServices(Class) + * @see #withService(ServiceConfiguration) + * @see #withService(Builder) + * @see #withoutServices(Class) + * @see #updateServices(Class, UnaryOperator) + */ + > B withoutServices(Class clazz, Predicate predicate); + + /** + * Updates all service configurations of the given type. + *

+ * For each existing service creation configuration instance that is assignment compatible with {@code clazz} the + * following process is performed: + *

    + *
  1. The configuration is converted to its detached representations using the + * {@link ServiceConfiguration#derive()} method.
  2. + *
  3. The detached representation is transformed using the {@code update} unary operator.
  4. + *
  5. A new configuration is generated by passing the transformed detached representation to the existing + * configurations {@link ServiceConfiguration#build(Object)} method.
  6. + *
  7. The new configuration is added to the builders service configuration set.
  8. + *
+ * If there are no service creation configurations assignment compatible with {@code clazz} then an + * {@code IllegalStateException} will be thrown. + * + * @param clazz service configuration concrete type + * @param update configuration mutation function + * @param configuration detached representation type + * @param service configuration type + * @return an updated builder + * @throws IllegalStateException if no matching service configurations exist + * + * @see #getService(Class) + * @see #getServices(Class) + * @see #withService(ServiceConfiguration) + * @see #withService(Builder) + * @see #withoutServices(Class) + * @see #withoutServices(Class, Predicate) + */ + > B updateServices(Class clazz, UnaryOperator update) throws IllegalStateException; + + /** + * Sets the {@link EvictionAdvisor} in the returned builder. + * + * @param evictionAdvisor the eviction advisor to be used + * @return a new builder with the added eviction advisor + * + * @see Eviction#NO_ADVICE + */ + B withEvictionAdvisor(final EvictionAdvisor evictionAdvisor); + + /** + * Sets the {@link ClassLoader} in the returned builder. + *

+ * The {@link ClassLoader} will be used for resolving all non Ehcache types. + * + * @param classLoader the class loader to use + * @return a new builder with the added class loader + * + * @see #withDefaultClassLoader() + */ + B withClassLoader(ClassLoader classLoader); + + /** + * Removes any previously installed custom class loader + * + * @return a new build using the default class loader + * + * @see #withClassLoader(ClassLoader) + */ + B withDefaultClassLoader(); + + /** + * Sets the {@link ResourcePools} in the returned builder. + *

+ * {@link ResourcePools} is what determines the tiering of a cache. + * + * @param resourcePools the resource pools to use + * @return a new builder with the added resource pools + * + * @see #withResourcePools(Builder) + * @see #updateResourcePools(UnaryOperator) + */ + B withResourcePools(ResourcePools resourcePools); + + /** + * Convenience method to set the {@link ResourcePools} through a {@link Builder}. + * + * @param builder the builder providing the resource pool + * @return a new builder with the added resource pools + * + * @see #withResourcePools(ResourcePools) + * @see #updateResourcePools(UnaryOperator) + */ + default B withResourcePools(Builder builder) { + return withResourcePools(builder.build()); + } + + /** + * Updates the configured resource pools. + * + * @param update resource pool update operation + * @return a new build with updated resource pools + * + * @see #withResourcePools(ResourcePools) + * @see #withResourcePools(Builder) + */ + B updateResourcePools(UnaryOperator update); + + /** + * Sets the {@link ExpiryPolicy} configuration in the returned builder. + *

+ * {@code ExpiryPolicy} is what controls data freshness in a cache. + * + * @param expiry the expiry to use + * @return a new builder with the added expiry + * + * @see ExpiryPolicy#NO_EXPIRY + */ + B withExpiry(ExpiryPolicy expiry); + + /** + * Sets the {@link CacheLoaderWriter} in the returned builder. + *

+ * Configuration of a {@link CacheLoaderWriter} is what enables cache-through patterns. + * + * @param loaderWriter the loaderwriter to use + * @return a new builder with the added loaderwriter configuration + * + * @see #withLoaderWriter(Class, Object...) + * @see #withoutLoaderWriter() + */ + B withLoaderWriter(CacheLoaderWriter loaderWriter); + + /** + * Sets the {@link CacheLoaderWriter} (using a class and constructor arguments) in the returned builder. + *

+ * Configuration of a {@link CacheLoaderWriter} is what enables cache-through patterns. + * + * @param loaderWriterClass the loaderwrite class + * @param arguments optional constructor arguments + * @return a new builder with the added loaderwriter configuration + * + * @see #withLoaderWriter(CacheLoaderWriter) + * @see #withoutLoaderWriter() + */ + B withLoaderWriter(Class> loaderWriterClass, Object... arguments); + + /** + * Removes any configured loader-writer. + * + * @return a new build with no configured loader-writer + * + * @see #withLoaderWriter(CacheLoaderWriter) + * @see #withLoaderWriter(Class, Object...) + */ + B withoutLoaderWriter(); + + /** + * Sets the {@link ResilienceStrategy} in the returned builder. + * + * @param resilienceStrategy the resilience strategy to use + * @return a new builder with the added resilience strategy configuration + * + * @see #withResilienceStrategy(Class, Object...) + * @see #withDefaultResilienceStrategy() + */ + B withResilienceStrategy(ResilienceStrategy resilienceStrategy); + + /** + * Sets the {@link ResilienceStrategy} (using a class and constructor arguments) in the returned builder. + * + * @param resilienceStrategyClass the resilience strategy class + * @param arguments optional constructor arguments + * @return a new builder with the added resilience strategy configuration + * + * @see #withResilienceStrategy(ResilienceStrategy) + * @see #withDefaultResilienceStrategy() + */ + @SuppressWarnings("rawtypes") + B withResilienceStrategy(Class resilienceStrategyClass, Object... arguments); + + /** + * Restores configuration of the implementations default resilience strategy. + * + * @return a new builder using the default resilience strategy + * + * @see #withResilienceStrategy(ResilienceStrategy) + * @see #withResilienceStrategy(Class, Object...) + */ + B withDefaultResilienceStrategy(); + + /** + * Adds by-value semantics using the cache key serializer for the key on heap. + *

+ * {@link Copier}s are what enable control of by-reference / by-value semantics for on-heap tier. + * + * @return a new builder with the added key copier + * + * @see #withKeyCopier(Copier) + * @see #withKeyCopier(Class) + * @see #withoutKeyCopier() + */ + B withKeySerializingCopier(); + + /** + * Adds by-value semantics using the cache value serializer for the value on heap. + *

+ * {@link Copier}s are what enable control of by-reference / by-value semantics for on-heap tier. + * + * @return a new builder with the added value copier + * + * @see #withValueCopier(Copier) + * @see #withValueCopier(Class) + * @see #withoutValueCopier() + */ + B withValueSerializingCopier(); + + /** + * Adds by-value semantics using the provided {@link Copier} for the key on heap. + *

+ * {@link Copier}s are what enable control of by-reference / by-value semantics for on-heap tier. + * + * @param keyCopier the key copier to use + * @return a new builder with the added key copier + * + * @see #withKeySerializingCopier() + * @see #withKeyCopier(Class) + * @see #withoutKeyCopier() + */ + B withKeyCopier(Copier keyCopier); + + /** + * Adds by-value semantics using the provided {@link Copier} class for the key on heap. + *

+ * {@link Copier}s are what enable control of by-reference / by-value semantics for on-heap tier. + * + * @param keyCopierClass the key copier class to use + * @return a new builder with the added key copier + * + * @see #withKeySerializingCopier() + * @see #withKeyCopier(Copier) + * @see #withoutKeyCopier() + */ + B withKeyCopier(Class> keyCopierClass); + + /** + * Removes any configured {@link Copier} for keys on heap. + * + * @return a new builder without a key copier + * + * @see #withKeySerializingCopier() + * @see #withKeyCopier(Copier) + * @see #withKeyCopier(Class) + */ + B withoutKeyCopier(); + + /** + * Adds by-value semantics using the provided {@link Copier} for the value on heap. + *

+ * {@link Copier}s are what enable control of by-reference / by-value semantics for on-heap tier. + * + * @param valueCopier the value copier to use + * @return a new builder with the added value copier + * + * @see #withValueSerializingCopier() + * @see #withValueCopier(Class) + * @see #withoutValueCopier() + */ + B withValueCopier(Copier valueCopier); + + /** + * Adds by-value semantics using the provided {@link Copier} class for the value on heap. + *

+ * {@link Copier}s are what enable control of by-reference / by-value semantics for on-heap tier. + * + * @param valueCopierClass the value copier class to use + * @return a new builder with the added value copier + * + * @see #withValueSerializingCopier() + * @see #withValueCopier(Copier) + * @see #withoutValueCopier() + */ + B withValueCopier(Class> valueCopierClass); + + /** + * Removes any configured {@link Copier} for values on heap. + * + * @return a new builder without a value copier + * + * @see #withValueSerializingCopier() + * @see #withValueCopier(Copier) + * @see #withValueCopier(Class) + */ + B withoutValueCopier(); + + /** + * Sets the {@link Serializer} for cache keys in the returned builder. + *

+ * {@link Serializer}s are what enables cache storage beyond the heap tier. + * + * @param keySerializer the key serializer to use + * @return a new builder with the added key serializer + * + * @see #withKeySerializer(Class) + * @see #withDefaultKeySerializer() + */ + B withKeySerializer(Serializer keySerializer); + + /** + * Sets the {@link Serializer} class for cache keys in the returned builder. + *

+ * {@link Serializer}s are what enables cache storage beyond the heap tier. + * + * @param keySerializerClass the key serializer to use + * @return a new builder with the added key serializer + * + * @see #withKeySerializer(Serializer) + * @see #withDefaultKeySerializer() + */ + B withKeySerializer(Class> keySerializerClass); + + /** + * Removes any explicitly configured {@link Serializer} for cache keys. + * + * @return a new builder with no configured key serializer + * + * @see #withKeySerializer(Serializer) + * @see #withKeySerializer(Class) + */ + B withDefaultKeySerializer(); + + /** + * Sets the {@link Serializer} for cache values in the returned builder. + *

+ * {@link Serializer}s are what enables cache storage beyond the heap tier. + * + * @param valueSerializer the key serializer to use + * @return a new builder with the added value serializer + * + * @see #withValueSerializer(Class) + * @see #withDefaultValueSerializer() + */ + B withValueSerializer(Serializer valueSerializer); + + /** + * Sets the {@link Serializer} class for cache values in the returned builder. + *

+ * {@link Serializer}s are what enables cache storage beyond the heap tier. + * + * @param valueSerializerClass the key serializer to use + * @return a new builder with the added value serializer + * + * @see #withValueSerializer(Serializer) + * @see #withDefaultValueSerializer() + */ + B withValueSerializer(Class> valueSerializerClass); + + /** + * Removes any explicitly configured {@link Serializer} for cache values. + * + * @return a new builder with no configured value serializer + * + * @see #withValueSerializer(Serializer) + * @see #withValueSerializer(Class) + */ + B withDefaultValueSerializer(); +} + diff --git a/ehcache-api/src/main/java/org/ehcache/config/FluentConfigurationBuilder.java b/ehcache-api/src/main/java/org/ehcache/config/FluentConfigurationBuilder.java new file mode 100644 index 0000000000..0b2495f111 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/config/FluentConfigurationBuilder.java @@ -0,0 +1,327 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.config; + +import org.ehcache.spi.service.ServiceCreationConfiguration; + +import java.util.Collection; +import java.util.function.Predicate; +import java.util.function.UnaryOperator; + +/** + * A fluent builder of {@link Configuration} instances. + * + * @param builder sub-type + */ +public interface FluentConfigurationBuilder> extends Builder { + + /** + * Return the cache configuration for the given alias. + * + * @param alias cache alias + * @return associated cache configuration + * + * @see #withCache(String, CacheConfiguration) + * @see #withCache(String, Builder) + * @see #updateCache(String, UnaryOperator) + * @see #withoutCache(String) + * @see #updateCaches(UnaryOperator) + */ + CacheConfiguration getCache(String alias); + + /** + * Adds the given cache to this configuration. + *

+ * This will overwrite any existing configuration for the cache with this alias. + * + * @param alias cache alias + * @param config cache configuration + * @return an updated builder + * + * @see #getCache(String) + * @see #withCache(String, Builder) + * @see #updateCache(String, UnaryOperator) + * @see #withoutCache(String) + * @see #updateCaches(UnaryOperator) + */ + B withCache(String alias, CacheConfiguration config); + + /** + * Adds the cache configuration built by a builder to this configuration. + *

+ * This will overwrite any existing configuration for the cache with this alias. + * + * @param alias cache alias + * @param builder cache configuration builder + * @return an updated builder + * + * @see #getCache(String) + * @see #withCache(String, Builder) + * @see #updateCache(String, UnaryOperator) + * @see #withoutCache(String) + * @see #updateCaches(UnaryOperator) + */ + default B withCache(String alias, Builder> builder) { + return withCache(alias, builder.build()); + } + + /** + * Removes the given cache from this configuration. + * + * @param alias cache alias + * @return an updated builder + * + * @see #getCache(String) + * @see #withCache(String, CacheConfiguration) + * @see #withCache(String, Builder) + * @see #updateCache(String, UnaryOperator) + * @see #updateCaches(UnaryOperator) + */ + B withoutCache(String alias); + + /** + * Updates the configuration of the identified cache. + *

+ * If a cache exists for the given alias then the following process is performed: + *

    + *
  1. The configuration is converted to a builder seeded with that configuration. + *
  2. The builder is then transformed using the {@code update} unary operator.
  3. + *
  4. A new configuration is generated by calling {@code build()} on the resultant builder.
  5. + *
  6. The new configuration is associated with the given alias.
  7. + *
+ * If there is no cache associated with the given {@code alias} then an {@code IllegalStateException} will be thrown. + * + * @param alias cache alias + * @param update configuration mutation function + * @return an updated builder + * @throws IllegalArgumentException if no cache configuration exists for {@code alias} + * + * @see #getCache(String) + * @see #withCache(String, CacheConfiguration) + * @see #withCache(String, Builder) + * @see #withoutCache(String) + * @see #updateCaches(UnaryOperator) + */ + B updateCache(String alias, UnaryOperator> update) throws IllegalArgumentException; + + /** + * Updates the configuration of the all caches. + *

+ * For every existing cache the following process is performed: + *

    + *
  1. The configuration is converted to a builder seeded with that configuration. + *
  2. The builder is then transformed using the {@code update} unary operator.
  3. + *
  4. A new configuration is generated by calling {@code build()} on the resultant builder.
  5. + *
  6. The new configuration is associated with the given alias.
  7. + *
+ * + * @param update configuration mutation function + * @return an updated builder + * + * @see #getCache(String) + * @see #withCache(String, CacheConfiguration) + * @see #withCache(String, Builder) + * @see #updateCache(String, UnaryOperator) + * @see #withoutCache(String) + */ + B updateCaches(UnaryOperator> update); + + /** + * Return the unique service creation configuration of the given type. + *

+ * If there are multiple configuration instances of this type (or subtypes) then an {@code IllegalArgumentException} + * will be thrown. + * + * @param configurationType desired configuration type + * @param configuration type + * @return the given configuration type + * @throws IllegalArgumentException if there are multiple instances of this type + * + * @see #getServices(Class) + * @see #withService(ServiceCreationConfiguration) + * @see #withService(Builder) + * @see #withoutServices(Class) + * @see #withoutServices(Class, Predicate) + * @see #updateServices(Class, UnaryOperator) + */ + default > C getService(Class configurationType) throws IllegalArgumentException { + Collection services = getServices(configurationType); + + switch (services.size()) { + case 0: + return null; + case 1: + return services.iterator().next(); + default: + throw new IllegalArgumentException(configurationType + " does not identify a unique service configuration: " + services); + } + } + + /** + * Return the service creation configurations of the given type. + * + * @param configurationType desired configuration type + * @param configuration type + * @return all services of this type + * + * @see #getService(Class) + * @see #withService(ServiceCreationConfiguration) + * @see #withService(Builder) + * @see #withoutServices(Class) + * @see #withoutServices(Class, Predicate) + * @see #updateServices(Class, UnaryOperator) + */ + > Collection getServices(Class configurationType); + + /** + * Adds a service creation configuration to this configuration. + *

+ * This will remove any existing service creation configurations that are incompatible with the supplied one. + * This removal is equivalent to the following: + *

{@code configurations.removeIf(
+   *     existing -> !config.compatibleWith(existing) || !existing.compatibleWith(config)
+   * );}
+ * + * @param config service creation configuration + * @return an updated builder + * @see ServiceCreationConfiguration#compatibleWith(ServiceCreationConfiguration) + * + * @see #getService(Class) + * @see #getServices(Class) + * @see #withService(Builder) + * @see #withoutServices(Class) + * @see #withoutServices(Class, Predicate) + * @see #updateServices(Class, UnaryOperator) + */ + B withService(ServiceCreationConfiguration config); + + /** + * Adds a service creation configuration built by the given builder to this configuration. + *

+ * This will remove any existing configurations that are incompatible with the supplied one. + * + * @param builder service creation configuration builder + * @return an updated builder + * + * @see #getService(Class) + * @see #getServices(Class) + * @see #withService(ServiceCreationConfiguration) + * @see #withoutServices(Class) + * @see #withoutServices(Class, Predicate) + * @see #updateServices(Class, UnaryOperator) + */ + default B withService(Builder> builder) { + return withService(builder.build()); + } + + /** + * Removes all service creation configurations of the given type from this configuration. + * + * @param clazz service configuration type + * @return an updated builder + * + * @see #getService(Class) + * @see #getServices(Class) + * @see #withService(ServiceCreationConfiguration) + * @see #withService(Builder) + * @see #withoutServices(Class, Predicate) + * @see #updateServices(Class, UnaryOperator) + */ + default B withoutServices(Class> clazz) { + return withoutServices(clazz, c -> true); + } + + /** + * Removes all service creation configurations of the given type that pass the predicate. + * + * @param clazz service configuration type + * @param predicate predicate controlling removal + * @param configuration type + * @return an updated builder + * + * @see #getService(Class) + * @see #getServices(Class) + * @see #withService(ServiceCreationConfiguration) + * @see #withService(Builder) + * @see #withoutServices(Class) + * @see #updateServices(Class, UnaryOperator) + */ + > B withoutServices(Class clazz, Predicate predicate); + + /** + * Updates all service creation configurations of the given type. + *

+ * For each existing service creation configuration instance that is assignment compatible with {@code clazz} the + * following process is performed: + *

    + *
  1. The configuration is converted to its detached representations using the + * {@link ServiceCreationConfiguration#derive()} method.
  2. + *
  3. The detached representation is transformed using the {@code update} unary operator.
  4. + *
  5. A new configuration is generated by passing the transformed detached representation to the existing + * configurations {@link ServiceCreationConfiguration#build(Object)} method.
  6. + *
  7. The new configuration is added to the builders service configuration set.
  8. + *
+ * If there are no service creation configurations assignment compatible with {@code clazz} then an + * {@code IllegalStateException} will be thrown. + * + * @param clazz service creation configuration type + * @param update configuration mutation function + * @param configuration detached representation type + * @param service configuration type + * @return an updated builder + * @throws IllegalStateException if no configurations of type {@code C} exist + * + * @see #getService(Class) + * @see #getServices(Class) + * @see #withService(ServiceCreationConfiguration) + * @see #withService(Builder) + * @see #withoutServices(Class) + * @see #withoutServices(Class, Predicate) + */ + > B updateServices(Class clazz, UnaryOperator update) throws IllegalStateException; + + /** + * Return the configured classloader instance. + * + * @return configured classloader + * + * @see #withClassLoader(ClassLoader) + * @see #withDefaultClassLoader() + */ + ClassLoader getClassLoader(); + + /** + * Sets the given class loader as the cache manager classloader + * + * @param classLoader cache manager classloader + * @return an updated builder + * + * @see #getClassLoader() + * @see #withDefaultClassLoader() + */ + B withClassLoader(ClassLoader classLoader); + + /** + * Removes any provided class loader returning to default behavior + * + * @return an updated builder + * + * @see #getClassLoader() + * @see #withClassLoader(ClassLoader) + */ + B withDefaultClassLoader(); +} diff --git a/api/src/main/java/org/ehcache/config/ResourcePool.java b/ehcache-api/src/main/java/org/ehcache/config/ResourcePool.java similarity index 100% rename from api/src/main/java/org/ehcache/config/ResourcePool.java rename to ehcache-api/src/main/java/org/ehcache/config/ResourcePool.java diff --git a/api/src/main/java/org/ehcache/config/ResourcePools.java b/ehcache-api/src/main/java/org/ehcache/config/ResourcePools.java similarity index 100% rename from api/src/main/java/org/ehcache/config/ResourcePools.java rename to ehcache-api/src/main/java/org/ehcache/config/ResourcePools.java diff --git a/api/src/main/java/org/ehcache/config/ResourceType.java b/ehcache-api/src/main/java/org/ehcache/config/ResourceType.java similarity index 100% rename from api/src/main/java/org/ehcache/config/ResourceType.java rename to ehcache-api/src/main/java/org/ehcache/config/ResourceType.java diff --git a/api/src/main/java/org/ehcache/config/ResourceUnit.java b/ehcache-api/src/main/java/org/ehcache/config/ResourceUnit.java similarity index 100% rename from api/src/main/java/org/ehcache/config/ResourceUnit.java rename to ehcache-api/src/main/java/org/ehcache/config/ResourceUnit.java diff --git a/api/src/main/java/org/ehcache/config/SizedResourcePool.java b/ehcache-api/src/main/java/org/ehcache/config/SizedResourcePool.java similarity index 100% rename from api/src/main/java/org/ehcache/config/SizedResourcePool.java rename to ehcache-api/src/main/java/org/ehcache/config/SizedResourcePool.java diff --git a/api/src/main/java/org/ehcache/config/package-info.java b/ehcache-api/src/main/java/org/ehcache/config/package-info.java similarity index 100% rename from api/src/main/java/org/ehcache/config/package-info.java rename to ehcache-api/src/main/java/org/ehcache/config/package-info.java diff --git a/api/src/main/java/org/ehcache/config/units/EntryUnit.java b/ehcache-api/src/main/java/org/ehcache/config/units/EntryUnit.java similarity index 100% rename from api/src/main/java/org/ehcache/config/units/EntryUnit.java rename to ehcache-api/src/main/java/org/ehcache/config/units/EntryUnit.java diff --git a/api/src/main/java/org/ehcache/config/units/MemoryUnit.java b/ehcache-api/src/main/java/org/ehcache/config/units/MemoryUnit.java similarity index 100% rename from api/src/main/java/org/ehcache/config/units/MemoryUnit.java rename to ehcache-api/src/main/java/org/ehcache/config/units/MemoryUnit.java diff --git a/api/src/main/java/org/ehcache/config/units/package-info.java b/ehcache-api/src/main/java/org/ehcache/config/units/package-info.java similarity index 100% rename from api/src/main/java/org/ehcache/config/units/package-info.java rename to ehcache-api/src/main/java/org/ehcache/config/units/package-info.java diff --git a/api/src/main/java/org/ehcache/event/CacheEvent.java b/ehcache-api/src/main/java/org/ehcache/event/CacheEvent.java similarity index 100% rename from api/src/main/java/org/ehcache/event/CacheEvent.java rename to ehcache-api/src/main/java/org/ehcache/event/CacheEvent.java diff --git a/api/src/main/java/org/ehcache/event/CacheEventListener.java b/ehcache-api/src/main/java/org/ehcache/event/CacheEventListener.java similarity index 100% rename from api/src/main/java/org/ehcache/event/CacheEventListener.java rename to ehcache-api/src/main/java/org/ehcache/event/CacheEventListener.java diff --git a/api/src/main/java/org/ehcache/event/EventFiring.java b/ehcache-api/src/main/java/org/ehcache/event/EventFiring.java similarity index 100% rename from api/src/main/java/org/ehcache/event/EventFiring.java rename to ehcache-api/src/main/java/org/ehcache/event/EventFiring.java diff --git a/api/src/main/java/org/ehcache/event/EventOrdering.java b/ehcache-api/src/main/java/org/ehcache/event/EventOrdering.java similarity index 100% rename from api/src/main/java/org/ehcache/event/EventOrdering.java rename to ehcache-api/src/main/java/org/ehcache/event/EventOrdering.java diff --git a/api/src/main/java/org/ehcache/event/EventType.java b/ehcache-api/src/main/java/org/ehcache/event/EventType.java similarity index 100% rename from api/src/main/java/org/ehcache/event/EventType.java rename to ehcache-api/src/main/java/org/ehcache/event/EventType.java diff --git a/api/src/main/java/org/ehcache/event/package-info.java b/ehcache-api/src/main/java/org/ehcache/event/package-info.java similarity index 100% rename from api/src/main/java/org/ehcache/event/package-info.java rename to ehcache-api/src/main/java/org/ehcache/event/package-info.java diff --git a/api/src/main/java/org/ehcache/expiry/Duration.java b/ehcache-api/src/main/java/org/ehcache/expiry/Duration.java similarity index 97% rename from api/src/main/java/org/ehcache/expiry/Duration.java rename to ehcache-api/src/main/java/org/ehcache/expiry/Duration.java index 477d9ea41e..032baa4a72 100644 --- a/api/src/main/java/org/ehcache/expiry/Duration.java +++ b/ehcache-api/src/main/java/org/ehcache/expiry/Duration.java @@ -19,7 +19,13 @@ /** * A time duration in a given {@link TimeUnit}. + * + * @see java.time.Duration + * @see ExpiryPolicy + * + * @deprecated Replaced with {@link java.time.Duration} */ +@Deprecated public final class Duration { /** diff --git a/api/src/main/java/org/ehcache/expiry/Expirations.java b/ehcache-api/src/main/java/org/ehcache/expiry/Expirations.java similarity index 81% rename from api/src/main/java/org/ehcache/expiry/Expirations.java rename to ehcache-api/src/main/java/org/ehcache/expiry/Expirations.java index bb19093d9f..87ffbd8970 100644 --- a/api/src/main/java/org/ehcache/expiry/Expirations.java +++ b/ehcache-api/src/main/java/org/ehcache/expiry/Expirations.java @@ -15,18 +15,22 @@ */ package org.ehcache.expiry; -import org.ehcache.ValueSupplier; +import java.util.Objects; /** * Utility class for getting predefined {@link Expiry} instances. */ +@Deprecated public final class Expirations { /** * Get an {@link Expiry} instance for a non expiring (ie. "eternal") cache. * * @return the no expiry instance + * + * @deprecated Use {@code org.ehcache.config.builders.ExpiryPolicyBuilder#noExpiration()} instead */ + @Deprecated public static Expiry noExpiration() { return NoExpiry.INSTANCE; } @@ -36,7 +40,10 @@ public static Expiry noExpiration() { * * @param timeToLive the TTL duration * @return a TTL expiry + * + * @deprecated Use {@code org.ehcache.config.builders.ExpiryPolicyBuilder#timeToLiveExpiration(java.time.Duration)} instead */ + @Deprecated public static Expiry timeToLiveExpiration(Duration timeToLive) { if (timeToLive == null) { throw new NullPointerException("Duration cannot be null"); @@ -49,7 +56,10 @@ public static Expiry timeToLiveExpiration(Duration timeToLive) { * * @param timeToIdle the TTI duration * @return a TTI expiry + * + * @deprecated Use {@code org.ehcache.config.builders.ExpiryPolicyBuilder#timeToIdleExpiration(java.time.Duration)} instead */ + @Deprecated public static Expiry timeToIdleExpiration(Duration timeToIdle) { if (timeToIdle == null) { throw new NullPointerException("Duration cannot be null"); @@ -65,7 +75,10 @@ public static Expiry timeToIdleExpiration(Duration timeToIdle) { * @param the key type for the cache * @param the value type for the cache * @return an {@link Expiry} builder + * + * @deprecated Use {@code org.ehcache.config.builders.ExpiryPolicyBuilder#expiry()} instead */ + @Deprecated public static ExpiryBuilder builder() { return new ExpiryBuilder<>(); } @@ -77,6 +90,7 @@ private Expirations() { /** * Simple implementation of the {@link Expiry} interface allowing to set constants to each expiry types. */ + @Deprecated private static class BaseExpiry implements Expiry { private final Duration create; @@ -95,12 +109,12 @@ public Duration getExpiryForCreation(K key, V value) { } @Override - public Duration getExpiryForAccess(K key, ValueSupplier value) { + public Duration getExpiryForAccess(K key, org.ehcache.ValueSupplier value) { return access; } @Override - public Duration getExpiryForUpdate(K key, ValueSupplier oldValue, V newValue) { + public Duration getExpiryForUpdate(K key, org.ehcache.ValueSupplier oldValue, V newValue) { return update; } @@ -109,20 +123,20 @@ public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - final BaseExpiry that = (BaseExpiry)o; + BaseExpiry that = (BaseExpiry)o; - if (access != null ? !access.equals(that.access) : that.access != null) return false; - if (create != null ? !create.equals(that.create) : that.create != null) return false; - if (update != null ? !update.equals(that.update) : that.update != null) return false; + if (!Objects.equals(access, that.access)) return false; + if (!Objects.equals(create, that.create)) return false; + if (!Objects.equals(update, that.update)) return false; return true; } @Override public int hashCode() { - int result = create != null ? create.hashCode() : 0; - result = 31 * result + (access != null ? access.hashCode() : 0); - result = 31 * result + (update != null ? update.hashCode() : 0); + int result = Objects.hashCode(create); + result = 31 * result + Objects.hashCode(access); + result = 31 * result + Objects.hashCode(update); return result; } @@ -136,18 +150,21 @@ public String toString() { } } + @Deprecated private static class TimeToLiveExpiry extends BaseExpiry { TimeToLiveExpiry(Duration ttl) { super(ttl, null, ttl); } } + @Deprecated private static class TimeToIdleExpiry extends BaseExpiry { TimeToIdleExpiry(Duration tti) { super(tti, tti, tti); } } + @Deprecated private static class NoExpiry extends BaseExpiry { private static final Expiry INSTANCE = new NoExpiry(); @@ -163,6 +180,7 @@ private NoExpiry() { * @param Key type of the cache entries * @param Value type of the cache entries */ + @Deprecated public static final class ExpiryBuilder { private Duration create = Duration.INFINITE; diff --git a/ehcache-api/src/main/java/org/ehcache/expiry/Expiry.java b/ehcache-api/src/main/java/org/ehcache/expiry/Expiry.java new file mode 100644 index 0000000000..546ee034d0 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/expiry/Expiry.java @@ -0,0 +1,89 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.expiry; + +/** + * A policy object that governs expiration for mappings in a {@link org.ehcache.Cache Cache}. + *

+ * Previous values are not accessible directly but are rather available through a {@link org.ehcache.ValueSupplier value supplier} + * to indicate that access can require computation (such as deserialization). + *

+ * NOTE: Some cache configurations (eg. caches with eventual consistency) may use local (ie. non-consistent) state + * to decide whether to call {@link #getExpiryForUpdate(Object, org.ehcache.ValueSupplier, Object)} vs. + * {@link #getExpiryForCreation(Object, Object)}. For these cache configurations it is advised to return the same + * value for both of these methods + *

+ * See {@link Expirations} for helper methods to create common {@code Expiry} instances. + * + * @param the key type for the cache + * @param the value type for the cache + * + * @see Expirations + * @see ExpiryPolicy + * + * @deprecated Replaced with {@link ExpiryPolicy} that builds on the {@code java.time} types. + */ +@Deprecated +public interface Expiry { + + /** + * Returns the lifetime of an entry when it is initially added to a {@link org.ehcache.Cache Cache}. + *

+ * This method must not return {@code null}. + *

+ * Exceptions thrown from this method will be swallowed and result in the expiry duration being + * {@link Duration#ZERO ZERO}. + * + * @param key the key of the newly added entry + * @param value the value of the newly added entry + * @return a non-null {@link Duration} + */ + Duration getExpiryForCreation(K key, V value); + + /** + * Returns the expiration {@link Duration} (relative to the current time) when an existing entry is accessed from a + * {@link org.ehcache.Cache Cache}. + *

+ * Returning {@code null} indicates that the expiration time remains unchanged. + *

+ * Exceptions thrown from this method will be swallowed and result in the expiry duration being + * {@link Duration#ZERO ZERO}. + * + * @param key the key of the accessed entry + * @param value a value supplier for the accessed entry + * @return an expiration {@code Duration}, {@code null} means unchanged + */ + Duration getExpiryForAccess(K key, org.ehcache.ValueSupplier value); + + + /** + * Returns the expiration {@link Duration} (relative to the current time) when an existing entry is updated in a + * {@link org.ehcache.Cache Cache}. + *

+ * Returning {@code null} indicates that the expiration time remains unchanged. + *

+ * Exceptions thrown from this method will be swallowed and result in the expiry duration being + * {@link Duration#ZERO ZERO}. + * + * @param key the key of the updated entry + * @param oldValue a value supplier for the previous value of the entry + * @param newValue the new value of the entry + * @return an expiration {@code Duration}, {@code null} means unchanged + */ + Duration getExpiryForUpdate(K key, org.ehcache.ValueSupplier oldValue, V newValue); + +} diff --git a/ehcache-api/src/main/java/org/ehcache/expiry/ExpiryPolicy.java b/ehcache-api/src/main/java/org/ehcache/expiry/ExpiryPolicy.java new file mode 100644 index 0000000000..979d319a25 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/expiry/ExpiryPolicy.java @@ -0,0 +1,118 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.expiry; + +import java.time.Duration; +import java.util.function.Supplier; + +/** + * A policy object that governs expiration for mappings in a {@link org.ehcache.Cache Cache}. + *

+ * Previous values are not accessible directly but are rather available through a value {@code Supplier} + * to indicate that access can require computation (such as deserialization). + *

+ * {@link java.time.Duration#isNegative() Negative durations} are not supported, expiry policy implementation returning such a + * duration will result in immediate expiry, as if the duration was {@link java.time.Duration#ZERO zero}. + *

+ * NOTE: Some cache configurations (eg. caches with eventual consistency) may use local (ie. non-consistent) state + * to decide whether to call {@link #getExpiryForUpdate(Object, Supplier, Object)} vs. + * {@link #getExpiryForCreation(Object, Object)}. For these cache configurations it is advised to return the same + * value for both of these methods + * + * @param the key type for the cache + * @param the value type for the cache + * + */ +public interface ExpiryPolicy { + + /** + * A {@link Duration duration} that represents an infinite time. + */ + Duration INFINITE = Duration.ofNanos(Long.MAX_VALUE); + + /** + * An {@code ExpiryPolicy} that represents a no expiration policy + */ + ExpiryPolicy NO_EXPIRY = new ExpiryPolicy() { + @Override + public String toString() { + return "No Expiry"; + } + + @Override + public Duration getExpiryForCreation(Object key, Object value) { + return INFINITE; + } + + @Override + public Duration getExpiryForAccess(Object key, Supplier value) { + return null; + } + + @Override + public Duration getExpiryForUpdate(Object key, Supplier oldValue, Object newValue) { + return null; + } + }; + + /** + * Returns the lifetime of an entry when it is initially added to a {@link org.ehcache.Cache Cache}. + *

+ * This method must not return {@code null}. + *

+ * Exceptions thrown from this method will be swallowed and result in the expiry duration being + * {@link Duration#ZERO ZERO}. + * + * @param key the key of the newly added entry + * @param value the value of the newly added entry + * @return a non-null {@code Duration} + */ + Duration getExpiryForCreation(K key, V value); + + /** + * Returns the expiration {@link Duration duration} (relative to the current time) when an existing entry + * is accessed from a {@link org.ehcache.Cache Cache}. + *

+ * Returning {@code null} indicates that the expiration time remains unchanged. + *

+ * Exceptions thrown from this method will be swallowed and result in the expiry duration being + * {@link Duration#ZERO ZERO}. + * + * @param key the key of the accessed entry + * @param value a value supplier for the accessed entry + * @return an expiration {@code Duration}, {@code null} means unchanged + */ + Duration getExpiryForAccess(K key, Supplier value); + + + /** + * Returns the expiration {@link Duration duration} (relative to the current time) when an existing entry + * is updated in a {@link org.ehcache.Cache Cache}. + *

+ * Returning {@code null} indicates that the expiration time remains unchanged. + *

+ * Exceptions thrown from this method will be swallowed and result in the expiry duration being + * {@link Duration#ZERO ZERO}. + * + * @param key the key of the updated entry + * @param oldValue a value supplier for the previous value of the entry + * @param newValue the new value of the entry + * @return an expiration {@code Duration}, {@code null} means unchanged + */ + Duration getExpiryForUpdate(K key, Supplier oldValue, V newValue); + +} diff --git a/ehcache-api/src/main/java/org/ehcache/expiry/package-info.java b/ehcache-api/src/main/java/org/ehcache/expiry/package-info.java new file mode 100644 index 0000000000..5882311f93 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/expiry/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * {@link org.ehcache.expiry.ExpiryPolicy Expiry} API of a {@link org.ehcache.Cache Cache}. + */ +package org.ehcache.expiry; diff --git a/api/src/main/java/org/ehcache/package-info.java b/ehcache-api/src/main/java/org/ehcache/package-info.java similarity index 100% rename from api/src/main/java/org/ehcache/package-info.java rename to ehcache-api/src/main/java/org/ehcache/package-info.java diff --git a/api/src/main/java/org/ehcache/spi/copy/Copier.java b/ehcache-api/src/main/java/org/ehcache/spi/copy/Copier.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/copy/Copier.java rename to ehcache-api/src/main/java/org/ehcache/spi/copy/Copier.java diff --git a/api/src/main/java/org/ehcache/spi/copy/CopyProvider.java b/ehcache-api/src/main/java/org/ehcache/spi/copy/CopyProvider.java similarity index 96% rename from api/src/main/java/org/ehcache/spi/copy/CopyProvider.java rename to ehcache-api/src/main/java/org/ehcache/spi/copy/CopyProvider.java index d52f1ff636..076c0cda39 100644 --- a/api/src/main/java/org/ehcache/spi/copy/CopyProvider.java +++ b/ehcache-api/src/main/java/org/ehcache/spi/copy/CopyProvider.java @@ -43,7 +43,7 @@ public interface CopyProvider extends Service { * @param the type to copy to/from * @return a non {@code null} {@link Copier} instance */ - Copier createKeyCopier(Class clazz, Serializer serializer, ServiceConfiguration... configs); + Copier createKeyCopier(Class clazz, Serializer serializer, ServiceConfiguration... configs); /** * Creates a value {@link Copier} with the given parameters. @@ -56,7 +56,7 @@ public interface CopyProvider extends Service { * @param the type to copy to/from * @return a non {@code null} {@link Copier} instance */ - Copier createValueCopier(Class clazz, Serializer serializer, ServiceConfiguration... configs); + Copier createValueCopier(Class clazz, Serializer serializer, ServiceConfiguration... configs); /** * Releases the provided {@link Copier} instance. diff --git a/api/src/main/java/org/ehcache/spi/copy/package-info.java b/ehcache-api/src/main/java/org/ehcache/spi/copy/package-info.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/copy/package-info.java rename to ehcache-api/src/main/java/org/ehcache/spi/copy/package-info.java diff --git a/api/src/main/java/org/ehcache/spi/loaderwriter/BulkCacheLoadingException.java b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/BulkCacheLoadingException.java similarity index 91% rename from api/src/main/java/org/ehcache/spi/loaderwriter/BulkCacheLoadingException.java rename to ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/BulkCacheLoadingException.java index 9ae3e56139..421e5ef0d2 100644 --- a/api/src/main/java/org/ehcache/spi/loaderwriter/BulkCacheLoadingException.java +++ b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/BulkCacheLoadingException.java @@ -42,7 +42,7 @@ public class BulkCacheLoadingException extends CacheLoadingException { * @param failures the map of keys to failure encountered while loading * @param successes the map of keys successfully loaded and their associated value */ - public BulkCacheLoadingException(final Map failures, final Map successes) { + public BulkCacheLoadingException(Map failures, Map successes) { this.failures = Collections.unmodifiableMap(failures); this.successes = Collections.unmodifiableMap(successes); } @@ -60,7 +60,7 @@ public BulkCacheLoadingException(final Map failures, final Map failures, final Map successes) { + public BulkCacheLoadingException(String message, final Map failures, Map successes) { super(message); this.failures = Collections.unmodifiableMap(failures); this.successes = Collections.unmodifiableMap(successes); diff --git a/api/src/main/java/org/ehcache/spi/loaderwriter/BulkCacheWritingException.java b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/BulkCacheWritingException.java similarity index 86% rename from api/src/main/java/org/ehcache/spi/loaderwriter/BulkCacheWritingException.java rename to ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/BulkCacheWritingException.java index 893a0993ad..38e591aa07 100644 --- a/api/src/main/java/org/ehcache/spi/loaderwriter/BulkCacheWritingException.java +++ b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/BulkCacheWritingException.java @@ -43,7 +43,7 @@ public class BulkCacheWritingException extends CacheWritingException { * @param failures the map of keys to failure encountered while loading * @param successes the map of keys successfully loaded and their associated value */ - public BulkCacheWritingException(final Map failures, final Set successes) { + public BulkCacheWritingException(Map failures, Set successes) { this.failures = Collections.unmodifiableMap(failures); this.successes = Collections.unmodifiableSet(successes); } @@ -68,11 +68,9 @@ public Set getSuccesses() { @Override public String getMessage() { - StringBuilder sb = new StringBuilder(); - sb.append("Failed keys :"); - for (Map.Entry entry : failures.entrySet()) { - sb.append("\n ").append(entry.getKey()).append(" : ").append(entry.getValue()); - } + StringBuilder sb = new StringBuilder(13 + failures.size() * 20); // try to guess the final size + sb.append("Failed keys:"); + failures.forEach((k, v) -> sb.append("\n ").append(k).append(" : ").append(v)); return sb.toString(); } diff --git a/api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriter.java b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriter.java similarity index 87% rename from api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriter.java rename to ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriter.java index 599facaa75..d7c40c29e2 100644 --- a/api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriter.java +++ b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriter.java @@ -16,6 +16,7 @@ package org.ehcache.spi.loaderwriter; +import java.util.HashMap; import java.util.Map; /** @@ -75,7 +76,13 @@ public interface CacheLoaderWriter { * @throws BulkCacheLoadingException in case of partial success * @throws Exception in case no values could be loaded */ - Map loadAll(Iterable keys) throws BulkCacheLoadingException, Exception; + default Map loadAll(Iterable keys) throws BulkCacheLoadingException, Exception { + Map entries = new HashMap<>(); + for (K k : keys) { + entries.put(k, load(k)) ; + } + return entries; + } /** * Writes a single mapping. @@ -105,7 +112,11 @@ public interface CacheLoaderWriter { * @throws BulkCacheWritingException in case of partial success * @throws Exception in case no values could be written */ - void writeAll(Iterable> entries) throws BulkCacheWritingException, Exception; + default void writeAll(Iterable> entries) throws BulkCacheWritingException, Exception { + for (Map.Entry entry : entries) { + write(entry.getKey(), entry.getValue()); + } + } /** * Deletes a single mapping. @@ -127,6 +138,10 @@ public interface CacheLoaderWriter { * @throws BulkCacheWritingException in case of partial success * @throws Exception in case no values can be loaded */ - void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception; + default void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception { + for (K k : keys) { + delete(k); + } + } } diff --git a/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriterConfiguration.java b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriterConfiguration.java new file mode 100644 index 0000000000..dee84aec39 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriterConfiguration.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.spi.loaderwriter; + +import org.ehcache.spi.service.ServiceConfiguration; + +/** + * {@link ServiceConfiguration} for the {@link CacheLoaderWriterProvider}. + *

+ * The {@code CacheLoaderWriterProvider} provides write-behind services to a + * {@link org.ehcache.Cache Cache}. + * + * @param representation type + */ +public interface CacheLoaderWriterConfiguration extends ServiceConfiguration { + /** + * {@inheritDoc} + */ + @Override + default Class getServiceType() { + return CacheLoaderWriterProvider.class; + } +} diff --git a/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriterProvider.java b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriterProvider.java new file mode 100644 index 0000000000..cbef48f166 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriterProvider.java @@ -0,0 +1,78 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.spi.loaderwriter; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.spi.service.Service; + +/** + * A {@link Service} that creates {@link CacheLoaderWriter} instances. + *

+ * A {@code CacheManager} will use the {@link #createCacheLoaderWriter(java.lang.String, org.ehcache.config.CacheConfiguration)} + * method to create {@code CacheLoaderWriter} instances for each {@code Cache} it + * manages. + *

+ * For any non {@code null} value returned, the {@code Cache} will be configured to use the + * {@code CacheLoaderWriter} instance returned. + */ +public interface CacheLoaderWriterProvider extends Service { + + /** + * Creates a {@code CacheLoaderWriter} for use with the {@link org.ehcache.Cache Cache} + * of the given alias and configuration. + * + * @param alias the {@code Cache} alias in the {@code CacheManager} + * @param cacheConfiguration the configuration for the associated cache + * @param the loader-writer key type + * @param the loader-writer value type + * + * @return the {@code CacheLoaderWriter} to be used by the {@code Cache} or {@code null} if none + */ + CacheLoaderWriter createCacheLoaderWriter(String alias, CacheConfiguration cacheConfiguration); + + /** + * Releases a {@code CacheLoaderWriter} when the associated {@link org.ehcache.Cache Cache} + * is finished with it. + *

+ * If the {@code CacheLoaderWriter} instance was user provided {@link java.io.Closeable#close() close} + * will not be invoked. + * + * + * @param alias the {@code Cache} alias in the {@code CacheManager} + * @param cacheLoaderWriter the {@code CacheLoaderWriter} being released + * @throws Exception when the release fails + */ + void releaseCacheLoaderWriter(String alias, CacheLoaderWriter cacheLoaderWriter) throws Exception; + + /** + * Returns preconfigured {@link org.ehcache.spi.loaderwriter.CacheLoaderWriterConfiguration} for the given alias + * + * @param alias the {@code Cache} alias in the {@code CacheManager} + * + * @return {@code CacheLoaderWriterConfiguration} configured for the {@code Cache}, otherwise null + */ + CacheLoaderWriterConfiguration getPreConfiguredCacheLoaderWriterConfig(String alias); + + /** + * Checks whether {@link org.ehcache.spi.loaderwriter.CacheLoaderWriter} was provided using jsr api + * + * @param alias the {@code Cache} alias in the {@code CacheManager} + * @return {@code true} if {@code CacheLoaderWriter} was provided using jsr api, otherwise false. + */ + boolean isLoaderJsrProvided(String alias); + +} diff --git a/api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoadingException.java b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoadingException.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoadingException.java rename to ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoadingException.java diff --git a/api/src/main/java/org/ehcache/spi/loaderwriter/CacheWritingException.java b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheWritingException.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/loaderwriter/CacheWritingException.java rename to ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/CacheWritingException.java diff --git a/api/src/main/java/org/ehcache/spi/loaderwriter/WriteBehindConfiguration.java b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/WriteBehindConfiguration.java similarity index 95% rename from api/src/main/java/org/ehcache/spi/loaderwriter/WriteBehindConfiguration.java rename to ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/WriteBehindConfiguration.java index b71622f044..b178e56575 100644 --- a/api/src/main/java/org/ehcache/spi/loaderwriter/WriteBehindConfiguration.java +++ b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/WriteBehindConfiguration.java @@ -24,8 +24,10 @@ *

* The {@code WriteBehindProvider} provides write-behind services to a * {@link org.ehcache.Cache Cache}. + * + * @param representation type */ -public interface WriteBehindConfiguration extends ServiceConfiguration { +public interface WriteBehindConfiguration extends ServiceConfiguration { /** * The concurrency of the write behind engines queues. diff --git a/api/src/main/java/org/ehcache/spi/loaderwriter/WriteBehindProvider.java b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/WriteBehindProvider.java similarity index 98% rename from api/src/main/java/org/ehcache/spi/loaderwriter/WriteBehindProvider.java rename to ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/WriteBehindProvider.java index baa7d0b92a..905faf19a8 100644 --- a/api/src/main/java/org/ehcache/spi/loaderwriter/WriteBehindProvider.java +++ b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/WriteBehindProvider.java @@ -37,7 +37,7 @@ public interface WriteBehindProvider extends Service { * * @return the write-behind decorated loader writer */ - CacheLoaderWriter createWriteBehindLoaderWriter(CacheLoaderWriter cacheLoaderWriter, WriteBehindConfiguration configuration); + CacheLoaderWriter createWriteBehindLoaderWriter(CacheLoaderWriter cacheLoaderWriter, WriteBehindConfiguration configuration); /** * Releases a write-behind decorator when the associated {@link org.ehcache.Cache Cache} diff --git a/api/src/main/java/org/ehcache/spi/loaderwriter/package-info.java b/ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/package-info.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/loaderwriter/package-info.java rename to ehcache-api/src/main/java/org/ehcache/spi/loaderwriter/package-info.java diff --git a/api/src/main/java/org/ehcache/spi/persistence/PersistableResourceService.java b/ehcache-api/src/main/java/org/ehcache/spi/persistence/PersistableResourceService.java similarity index 97% rename from api/src/main/java/org/ehcache/spi/persistence/PersistableResourceService.java rename to ehcache-api/src/main/java/org/ehcache/spi/persistence/PersistableResourceService.java index cede0787b1..b45f4c5516 100644 --- a/api/src/main/java/org/ehcache/spi/persistence/PersistableResourceService.java +++ b/ehcache-api/src/main/java/org/ehcache/spi/persistence/PersistableResourceService.java @@ -16,11 +16,9 @@ package org.ehcache.spi.persistence; -import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourceType; import org.ehcache.CachePersistenceException; -import java.util.Collection; import org.ehcache.config.CacheConfiguration; import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.service.PluralService; @@ -106,6 +104,8 @@ public interface PersistableResourceService extends MaintainableService { /** * An identifier for an existing persistable resource. + * + * @param the associated persistence service type */ - interface PersistenceSpaceIdentifier extends ServiceConfiguration {} + interface PersistenceSpaceIdentifier extends ServiceConfiguration {} } diff --git a/api/src/main/java/org/ehcache/spi/persistence/StateHolder.java b/ehcache-api/src/main/java/org/ehcache/spi/persistence/StateHolder.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/persistence/StateHolder.java rename to ehcache-api/src/main/java/org/ehcache/spi/persistence/StateHolder.java diff --git a/api/src/main/java/org/ehcache/spi/persistence/StateRepository.java b/ehcache-api/src/main/java/org/ehcache/spi/persistence/StateRepository.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/persistence/StateRepository.java rename to ehcache-api/src/main/java/org/ehcache/spi/persistence/StateRepository.java diff --git a/api/src/main/java/org/ehcache/spi/persistence/package-info.java b/ehcache-api/src/main/java/org/ehcache/spi/persistence/package-info.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/persistence/package-info.java rename to ehcache-api/src/main/java/org/ehcache/spi/persistence/package-info.java diff --git a/ehcache-api/src/main/java/org/ehcache/spi/resilience/RecoveryStore.java b/ehcache-api/src/main/java/org/ehcache/spi/resilience/RecoveryStore.java new file mode 100644 index 0000000000..ea8173a556 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/spi/resilience/RecoveryStore.java @@ -0,0 +1,60 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.spi.resilience; + +import org.ehcache.spi.resilience.StoreAccessException; + +/** + * A recovery store is used during entry cleanup done by the {@link ResilienceStrategy}. It's called + * when a {@link org.ehcache.core.spi.store.Store} failed on an entry. Implementations will in general want to get rid + * of this entry which is what the recovery store is used for. + *

+ * Note that the methods on this call with tend to fail since the store already failed once and caused the resilience + * strategy to be called. + * + * @param store key type + */ +public interface RecoveryStore { + + /** + * Obliterate all keys in a store. + * + * @throws StoreAccessException in case of store failure + */ + void obliterate() throws StoreAccessException; + + /** + * Obliterate a given key. + * + * @param key the key to obliterate + * @throws StoreAccessException in case of store failure + */ + void obliterate(K key) throws StoreAccessException; + + /** + * Obliterate a list of keys. + * + * @param keys keys to obliterate + * @throws StoreAccessException in case of store failure + */ + default void obliterate(Iterable keys) throws StoreAccessException { + for (K key : keys) { + obliterate(key); + } + } + +} diff --git a/ehcache-api/src/main/java/org/ehcache/spi/resilience/ResilienceStrategy.java b/ehcache-api/src/main/java/org/ehcache/spi/resilience/ResilienceStrategy.java new file mode 100644 index 0000000000..a709db85b0 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/spi/resilience/ResilienceStrategy.java @@ -0,0 +1,178 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.spi.resilience; + +import java.util.Map; + +import org.ehcache.Cache; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; + +/** + * A strategy for providing cache resilience in the face of failure. + *

+ * An implementation of this interface is used by a cache to decide how to + * recover after internal components of the cache fail. Implementations of + * these methods are expected to take suitable recovery steps. They can then + * choose between allowing the operation to terminate successfully, or throw an + * exception which will be propagated to the thread calling in to the cache. + *

+ * Resilience in this context refers only to resilience against cache failures + * and not to resilience against failures of any underlying + * {@link CacheLoaderWriter}. To this end writer or loader failures will only be + * reported to the strategy in the context of a coincident cache failure. + * Isolated writer and loader exceptions will be thrown directly. + * + * @param the type of the keys used to access data within the cache + * @param the type of the values held within the cache + * + * @author Chris Dennis + */ +public interface ResilienceStrategy { + + /** + * Called when a {@link Cache#get(java.lang.Object)} fails on a cache without + * a cache loader due to an underlying store failure. + * + * @param key the key being retrieved + * @param e the triggered failure + * @return the value to return from the operation + */ + V getFailure(K key, StoreAccessException e); + + /** + * Called when a {@link Cache#containsKey(java.lang.Object)} fails due to an + * underlying store failure, and the resultant cache load operation also fails. + * + * @param key the key being queried + * @param e the triggered failure + * @return the value to return from the operation + */ + boolean containsKeyFailure(K key, StoreAccessException e); + + /** + * Called when a {@link Cache#put(java.lang.Object, java.lang.Object)} fails + * due to an underlying store failure. + * + * @param key the key being put + * @param value the value being put + * @param e the triggered failure + */ + void putFailure(K key, V value, StoreAccessException e); + + /** + * Called when a {@link Cache#remove(java.lang.Object)} fails due to an + * underlying store failure. + * + * @param key the key being removed + * @param e the triggered failure + */ + void removeFailure(K key, StoreAccessException e); + + /** + * Called when a {@link Cache#clear()} fails due to an underlying store + * failure. + * + * @param e the triggered failure + */ + void clearFailure(StoreAccessException e); + + /** + * Called when a cache iterator advancement fails due to an underlying store + * failure. + * + * @param e the triggered failure + * @return an entry to return on a failed iteration + */ + Cache.Entry iteratorFailure(StoreAccessException e); + + /** + * Called when a {@link Cache#putIfAbsent(java.lang.Object, java.lang.Object)} + * fails due to an underlying store failure. + *

+ * If it is known at the time of calling that the key is absent from the cache + * (and the writer if one is present) then {@code knownToBeAbsent} will be + * {@code true}. + * + * @param key the key being put + * @param value the value being put + * @param e the triggered failure + * @return the value to return from the operation + */ + V putIfAbsentFailure(K key, V value, StoreAccessException e); + + /** + * Called when a {@link Cache#remove(Object, Object)} + * fails due to an underlying store failure. + * + * @param key the key being removed + * @param value the value being removed + * @param e the triggered failure + * @return the value to return from the operation + */ + boolean removeFailure(K key, V value, StoreAccessException e); + + /** + * Called when a {@link Cache#replace(java.lang.Object, java.lang.Object)} + * fails due to an underlying store failure. + * + * @param key the key being replaced + * @param value the value being replaced + * @param e the triggered failure + * @return the value to return from the operation + */ + V replaceFailure(K key, V value, StoreAccessException e); + + /** + * Called when a {@link Cache#replace(java.lang.Object, java.lang.Object, java.lang.Object)} + * fails due to an underlying store failure. + * + * @param key the key being replaced + * @param value the expected value + * @param newValue the replacement value + * @param e the triggered failure + * @return the value to return from the operation + */ + boolean replaceFailure(K key, V value, V newValue, StoreAccessException e); + + /** + * Called when a {@link Cache#getAll(java.util.Set)} fails on a cache + * without a cache loader due to an underlying store failure. + * + * @param keys the keys being retrieved + * @param e the triggered failure + * @return the value to return from the operation + */ + Map getAllFailure(Iterable keys, StoreAccessException e); + + /** + * Called when a {@link Cache#putAll(java.util.Map)} fails due to an + * underlying store failure. + * + * @param entries the entries being put + * @param e the triggered failure + */ + void putAllFailure(Map entries, StoreAccessException e); + + /** + * Called when a {@link Cache#removeAll(java.util.Set)} fails due to an + * underlying store failure. + * + * @param keys the keys being removed + * @param e the triggered failure + */ + void removeAllFailure(Iterable keys, StoreAccessException e); +} diff --git a/ehcache-api/src/main/java/org/ehcache/spi/resilience/ResilienceStrategyProvider.java b/ehcache-api/src/main/java/org/ehcache/spi/resilience/ResilienceStrategyProvider.java new file mode 100644 index 0000000000..4265bfdd31 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/spi/resilience/ResilienceStrategyProvider.java @@ -0,0 +1,60 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.spi.resilience; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; + +/** + * A {@link Service} that creates {@link ResilienceStrategy} instances. + *

+ * A {@code CacheManager} will use the {@link #createResilienceStrategy(String, CacheConfiguration, RecoveryStore)} and + * {@link #createResilienceStrategy(String, CacheConfiguration, RecoveryStore, CacheLoaderWriter)} methods to create + * {@code ResilienceStrategy} instances for each {@code Cache} it manages. + */ +public interface ResilienceStrategyProvider extends Service { + + /** + * Creates a {@code ResilienceStrategy} for the {@link org.ehcache.Cache Cache} with the given alias and configuration + * using the given {@link RecoveryStore}. + * + * @param alias the {@code Cache} alias in the {@code CacheManager} + * @param configuration the configuration for the associated cache + * @param recoveryStore the associated recovery store + * @param the stores key type + * @param the stores value type + * @return the {@code ResilienceStrategy} to be used by the {@code Cache} + */ + ResilienceStrategy createResilienceStrategy(String alias, CacheConfiguration configuration, + RecoveryStore recoveryStore); + + /** + * Creates a {@code ResilienceStrategy} for the {@link org.ehcache.Cache Cache} with the given alias and configuration + * using the given {@link RecoveryStore} and {@link CacheLoaderWriter} + * + * @param alias the {@code Cache} alias in the {@code CacheManager} + * @param configuration the configuration for the associated cache + * @param recoveryStore the associated recovery store + * @param loaderWriter the associated loader-writer + * @param the stores key type + * @param the stores value type + * @return the {@code ResilienceStrategy} to be used by the {@code Cache} + */ + ResilienceStrategy createResilienceStrategy(String alias, CacheConfiguration configuration, + RecoveryStore recoveryStore, CacheLoaderWriter loaderWriter); +} diff --git a/core/src/main/java/org/ehcache/core/spi/store/StoreAccessException.java b/ehcache-api/src/main/java/org/ehcache/spi/resilience/StoreAccessException.java similarity index 97% rename from core/src/main/java/org/ehcache/core/spi/store/StoreAccessException.java rename to ehcache-api/src/main/java/org/ehcache/spi/resilience/StoreAccessException.java index 6c4cf9cbd7..f3eb3e6337 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/StoreAccessException.java +++ b/ehcache-api/src/main/java/org/ehcache/spi/resilience/StoreAccessException.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.core.spi.store; +package org.ehcache.spi.resilience; /** * Generic exception used when an internal operation fails on a {@link org.ehcache.Cache}. diff --git a/ehcache-api/src/main/java/org/ehcache/spi/resilience/package-info.java b/ehcache-api/src/main/java/org/ehcache/spi/resilience/package-info.java new file mode 100644 index 0000000000..6e0322fec1 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/spi/resilience/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * {@link org.ehcache.spi.resilience.ResilienceStrategy} API of a {@link org.ehcache.Cache Cache}. + */ +package org.ehcache.spi.resilience; diff --git a/api/src/main/java/org/ehcache/spi/serialization/SerializationProvider.java b/ehcache-api/src/main/java/org/ehcache/spi/serialization/SerializationProvider.java similarity index 95% rename from api/src/main/java/org/ehcache/spi/serialization/SerializationProvider.java rename to ehcache-api/src/main/java/org/ehcache/spi/serialization/SerializationProvider.java index a297362b08..05befb3695 100644 --- a/api/src/main/java/org/ehcache/spi/serialization/SerializationProvider.java +++ b/ehcache-api/src/main/java/org/ehcache/spi/serialization/SerializationProvider.java @@ -50,7 +50,7 @@ public interface SerializationProvider extends Service { * * @throws UnsupportedTypeException if a serializer cannot be created for the given type */ - Serializer createKeySerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException; + Serializer createKeySerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException; /** * Creates a value {@link Serializer} with the given parameters. @@ -65,7 +65,7 @@ public interface SerializationProvider extends Service { * * @throws UnsupportedTypeException if a serializer cannot be created for the given type */ - Serializer createValueSerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException; + Serializer createValueSerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException; /** * Releases the given {@link Serializer} instance. diff --git a/api/src/main/java/org/ehcache/spi/serialization/Serializer.java b/ehcache-api/src/main/java/org/ehcache/spi/serialization/Serializer.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/serialization/Serializer.java rename to ehcache-api/src/main/java/org/ehcache/spi/serialization/Serializer.java diff --git a/api/src/main/java/org/ehcache/spi/serialization/SerializerException.java b/ehcache-api/src/main/java/org/ehcache/spi/serialization/SerializerException.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/serialization/SerializerException.java rename to ehcache-api/src/main/java/org/ehcache/spi/serialization/SerializerException.java diff --git a/api/src/main/java/org/ehcache/spi/serialization/StatefulSerializer.java b/ehcache-api/src/main/java/org/ehcache/spi/serialization/StatefulSerializer.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/serialization/StatefulSerializer.java rename to ehcache-api/src/main/java/org/ehcache/spi/serialization/StatefulSerializer.java diff --git a/api/src/main/java/org/ehcache/spi/serialization/UnsupportedTypeException.java b/ehcache-api/src/main/java/org/ehcache/spi/serialization/UnsupportedTypeException.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/serialization/UnsupportedTypeException.java rename to ehcache-api/src/main/java/org/ehcache/spi/serialization/UnsupportedTypeException.java diff --git a/api/src/main/java/org/ehcache/spi/serialization/package-info.java b/ehcache-api/src/main/java/org/ehcache/spi/serialization/package-info.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/serialization/package-info.java rename to ehcache-api/src/main/java/org/ehcache/spi/serialization/package-info.java diff --git a/api/src/main/java/org/ehcache/spi/service/MaintainableService.java b/ehcache-api/src/main/java/org/ehcache/spi/service/MaintainableService.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/service/MaintainableService.java rename to ehcache-api/src/main/java/org/ehcache/spi/service/MaintainableService.java diff --git a/ehcache-api/src/main/java/org/ehcache/spi/service/OptionalServiceDependencies.java b/ehcache-api/src/main/java/org/ehcache/spi/service/OptionalServiceDependencies.java new file mode 100644 index 0000000000..1bfbb528f2 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/spi/service/OptionalServiceDependencies.java @@ -0,0 +1,39 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.spi.service; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation that allows a {@link Service} implementation to declare an optional dependency on other {@code Service}s. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE) +@Inherited +public @interface OptionalServiceDependencies { + + /** + * Array of {@link Service} dependency classes + * + * @return the dependency class names + */ + String[] value(); +} diff --git a/api/src/main/java/org/ehcache/spi/service/PluralService.java b/ehcache-api/src/main/java/org/ehcache/spi/service/PluralService.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/service/PluralService.java rename to ehcache-api/src/main/java/org/ehcache/spi/service/PluralService.java diff --git a/api/src/main/java/org/ehcache/spi/service/Service.java b/ehcache-api/src/main/java/org/ehcache/spi/service/Service.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/service/Service.java rename to ehcache-api/src/main/java/org/ehcache/spi/service/Service.java diff --git a/ehcache-api/src/main/java/org/ehcache/spi/service/ServiceConfiguration.java b/ehcache-api/src/main/java/org/ehcache/spi/service/ServiceConfiguration.java new file mode 100644 index 0000000000..2358109150 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/spi/service/ServiceConfiguration.java @@ -0,0 +1,67 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.spi.service; + +/** + * A configuration type to be used when interacting with a {@link Service}. + * + * @param the service type this configuration works with + * @param the type of the detached representation + */ +public interface ServiceConfiguration { + + /** + * Indicates which service this configuration works with. + * + * @return the service type + */ + Class getServiceType(); + + /** + * Derive a detached representation from this configuration + * + * @return a detached representation + * @throws UnsupportedOperationException if the configuration has no representation + */ + default R derive() throws UnsupportedOperationException { + throw new UnsupportedOperationException(); + } + + /** + * Construct a new configuration from the given detached representation. + * + * @param representation a detached representation + * @return a new configuration + * @throws UnsupportedOperationException if the configuration has no representation + */ + default ServiceConfiguration build(R representation) throws UnsupportedOperationException { + throw new UnsupportedOperationException(); + } + + /** + * Returns true if this configuration can co-exist with {@code other} in the same cache configuration. + *

+ * The default implementation of {@code compatibleWith} (as used by many of the implementations) considers any + * instance of the same type (or a sub-type) to be incompatible with this instance. + * + * @param other other service configuration + * @return {@code true} if the two configurations are compatible + */ + default boolean compatibleWith(ServiceConfiguration other) { + return !getClass().isInstance(other); + }; +} diff --git a/ehcache-api/src/main/java/org/ehcache/spi/service/ServiceCreationConfiguration.java b/ehcache-api/src/main/java/org/ehcache/spi/service/ServiceCreationConfiguration.java new file mode 100644 index 0000000000..e2636e2263 --- /dev/null +++ b/ehcache-api/src/main/java/org/ehcache/spi/service/ServiceCreationConfiguration.java @@ -0,0 +1,67 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.spi.service; + +/** + * A configuration type used when creating a {@link Service}. + * + * @param the service type this configuration works with + * @param the type of the detached representation + */ +public interface ServiceCreationConfiguration { + + /** + * Indicates which service consumes this configuration at creation. + * + * @return the service type + */ + Class getServiceType(); + + /** + * Derive a detached representation from this configuration + * + * @return a detached representation + * @throws UnsupportedOperationException if the configuration has no representation + */ + default R derive() throws UnsupportedOperationException { + throw new UnsupportedOperationException(); + } + + /** + * Construct a new configuration from the given detached representation. + * + * @param representation a detached representation + * @return a new configuration + * @throws UnsupportedOperationException if the configuration has no representation + */ + default ServiceCreationConfiguration build(R representation) throws UnsupportedOperationException { + throw new UnsupportedOperationException(); + } + + /** + * Returns true if this configuration can co-exist with {@code other} in the same manager configuration. + *

+ * The default implementation of {@code compatibleWith} (as used by many of the implementations) considers any + * instance of the same type (or a sub-type) to be incompatible with this instance. + * + * @param other other service creation configuration + * @return {@code true} if the two configurations are compatible + */ + default boolean compatibleWith(ServiceCreationConfiguration other) { + return !getClass().isInstance(other); + }; +} diff --git a/api/src/main/java/org/ehcache/spi/service/ServiceDependencies.java b/ehcache-api/src/main/java/org/ehcache/spi/service/ServiceDependencies.java similarity index 92% rename from api/src/main/java/org/ehcache/spi/service/ServiceDependencies.java rename to ehcache-api/src/main/java/org/ehcache/spi/service/ServiceDependencies.java index 770e6103ee..8e582d38bd 100644 --- a/api/src/main/java/org/ehcache/spi/service/ServiceDependencies.java +++ b/ehcache-api/src/main/java/org/ehcache/spi/service/ServiceDependencies.java @@ -17,6 +17,7 @@ package org.ehcache.spi.service; import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @@ -26,10 +27,13 @@ */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) +@Inherited public @interface ServiceDependencies { /** * Array of {@link Service} dependency classes + * + * @return the dependency class names */ Class[] value(); } diff --git a/api/src/main/java/org/ehcache/spi/service/ServiceProvider.java b/ehcache-api/src/main/java/org/ehcache/spi/service/ServiceProvider.java similarity index 98% rename from api/src/main/java/org/ehcache/spi/service/ServiceProvider.java rename to ehcache-api/src/main/java/org/ehcache/spi/service/ServiceProvider.java index ffc4a8d9d3..d1f2b0c2d1 100644 --- a/api/src/main/java/org/ehcache/spi/service/ServiceProvider.java +++ b/ehcache-api/src/main/java/org/ehcache/spi/service/ServiceProvider.java @@ -17,6 +17,7 @@ package org.ehcache.spi.service; import java.util.Collection; +import java.util.Optional; /** * A repository of {@link Service} instances that can be used to look them up by type. diff --git a/api/src/main/java/org/ehcache/spi/service/package-info.java b/ehcache-api/src/main/java/org/ehcache/spi/service/package-info.java similarity index 100% rename from api/src/main/java/org/ehcache/spi/service/package-info.java rename to ehcache-api/src/main/java/org/ehcache/spi/service/package-info.java diff --git a/api/src/test/java/org/ehcache/CacheManagerTest.java b/ehcache-api/src/test/java/org/ehcache/CacheManagerTest.java similarity index 100% rename from api/src/test/java/org/ehcache/CacheManagerTest.java rename to ehcache-api/src/test/java/org/ehcache/CacheManagerTest.java diff --git a/api/src/test/java/org/ehcache/UserManagedCacheTest.java b/ehcache-api/src/test/java/org/ehcache/UserManagedCacheTest.java similarity index 100% rename from api/src/test/java/org/ehcache/UserManagedCacheTest.java rename to ehcache-api/src/test/java/org/ehcache/UserManagedCacheTest.java diff --git a/api/src/test/java/org/ehcache/config/units/MemoryUnitTest.java b/ehcache-api/src/test/java/org/ehcache/config/units/MemoryUnitTest.java similarity index 100% rename from api/src/test/java/org/ehcache/config/units/MemoryUnitTest.java rename to ehcache-api/src/test/java/org/ehcache/config/units/MemoryUnitTest.java diff --git a/api/src/test/java/org/ehcache/expiry/DurationTest.java b/ehcache-api/src/test/java/org/ehcache/expiry/DurationTest.java similarity index 98% rename from api/src/test/java/org/ehcache/expiry/DurationTest.java rename to ehcache-api/src/test/java/org/ehcache/expiry/DurationTest.java index 635a288b90..3b694d479e 100644 --- a/api/src/test/java/org/ehcache/expiry/DurationTest.java +++ b/ehcache-api/src/test/java/org/ehcache/expiry/DurationTest.java @@ -26,6 +26,7 @@ import org.junit.Test; +@SuppressWarnings("deprecation") public class DurationTest { @Test diff --git a/ehcache-api/src/test/java/org/ehcache/expiry/ExpirationsTest.java b/ehcache-api/src/test/java/org/ehcache/expiry/ExpirationsTest.java new file mode 100644 index 0000000000..223957c3c5 --- /dev/null +++ b/ehcache-api/src/test/java/org/ehcache/expiry/ExpirationsTest.java @@ -0,0 +1,66 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.expiry; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +import java.util.concurrent.TimeUnit; + +import org.junit.Test; + +@SuppressWarnings("deprecation") +public class ExpirationsTest { + + @Test + public void testNoExpiration() { + Expiry expiry = Expirations.noExpiration(); + assertThat(expiry.getExpiryForCreation(this, this), equalTo(Duration.INFINITE)); + assertThat(expiry.getExpiryForAccess(this, () -> this), nullValue()); + assertThat(expiry.getExpiryForUpdate(this, () -> this, this), nullValue()); + } + + @Test + public void testTTIExpiration() { + Duration duration = new Duration(1L, TimeUnit.SECONDS); + Expiry expiry = Expirations.timeToIdleExpiration(duration); + assertThat(expiry.getExpiryForCreation(this, this), equalTo(duration)); + assertThat(expiry.getExpiryForAccess(this, () -> this), equalTo(duration)); + assertThat(expiry.getExpiryForUpdate(this, () -> this, this), equalTo(duration)); + } + + @Test + public void testTTLExpiration() { + Duration duration = new Duration(1L, TimeUnit.SECONDS); + Expiry expiry = Expirations.timeToLiveExpiration(duration); + assertThat(expiry.getExpiryForCreation(this, this), equalTo(duration)); + assertThat(expiry.getExpiryForAccess(this, () -> this), nullValue()); + assertThat(expiry.getExpiryForUpdate(this, () -> this, this), equalTo(duration)); + } + + @Test + public void testExpiration() { + Duration creation = new Duration(1L, TimeUnit.SECONDS); + Duration access = new Duration(2L, TimeUnit.SECONDS); + Duration update = new Duration(3L, TimeUnit.SECONDS); + Expiry expiry = Expirations.builder().setCreate(creation).setAccess(access).setUpdate(update).build(); + assertThat(expiry.getExpiryForCreation(this, this), equalTo(creation)); + assertThat(expiry.getExpiryForAccess(this, () -> this), equalTo(access)); + assertThat(expiry.getExpiryForUpdate(this, () -> this,this), equalTo(update)); + } + +} diff --git a/ehcache-core/build.gradle b/ehcache-core/build.gradle new file mode 100644 index 0000000000..adf8ced16a --- /dev/null +++ b/ehcache-core/build.gradle @@ -0,0 +1,42 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.internal-module' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 Core module' + description = 'The Core module of Ehcache 3' + } +} + +dependencies { + api project(':ehcache-api') + implementation "org.terracotta:statistics:$parent.statisticVersion" + compileOnly 'org.osgi:osgi.core:6.0.0' + compileOnly 'org.osgi:org.osgi.service.component.annotations:1.3.0' + testImplementation project(':spi-tester') +} + +jar { + bnd ( + 'Bundle-Activator': 'org.ehcache.core.osgi.EhcacheActivator', + 'Import-Package': '!javax.annotation, *', + 'Export-Package': '!org.ehcache.core.internal.*, org.ehcache.core.*' + ) +} diff --git a/core/config/checkstyle-suppressions.xml b/ehcache-core/config/checkstyle-suppressions.xml similarity index 100% rename from core/config/checkstyle-suppressions.xml rename to ehcache-core/config/checkstyle-suppressions.xml diff --git a/core/src/main/java/org/ehcache/core/CacheConfigurationChangeEvent.java b/ehcache-core/src/main/java/org/ehcache/core/CacheConfigurationChangeEvent.java similarity index 100% rename from core/src/main/java/org/ehcache/core/CacheConfigurationChangeEvent.java rename to ehcache-core/src/main/java/org/ehcache/core/CacheConfigurationChangeEvent.java diff --git a/core/src/main/java/org/ehcache/core/CacheConfigurationChangeListener.java b/ehcache-core/src/main/java/org/ehcache/core/CacheConfigurationChangeListener.java similarity index 100% rename from core/src/main/java/org/ehcache/core/CacheConfigurationChangeListener.java rename to ehcache-core/src/main/java/org/ehcache/core/CacheConfigurationChangeListener.java diff --git a/core/src/main/java/org/ehcache/core/CacheConfigurationProperty.java b/ehcache-core/src/main/java/org/ehcache/core/CacheConfigurationProperty.java similarity index 100% rename from core/src/main/java/org/ehcache/core/CacheConfigurationProperty.java rename to ehcache-core/src/main/java/org/ehcache/core/CacheConfigurationProperty.java diff --git a/core/src/main/java/org/ehcache/core/DefaultCacheManagerProviderService.java b/ehcache-core/src/main/java/org/ehcache/core/DefaultCacheManagerProviderService.java similarity index 100% rename from core/src/main/java/org/ehcache/core/DefaultCacheManagerProviderService.java rename to ehcache-core/src/main/java/org/ehcache/core/DefaultCacheManagerProviderService.java diff --git a/ehcache-core/src/main/java/org/ehcache/core/Ehcache.java b/ehcache-core/src/main/java/org/ehcache/core/Ehcache.java new file mode 100644 index 0000000000..787b686e4d --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/Ehcache.java @@ -0,0 +1,361 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core; + +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; + +import org.ehcache.Cache; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.core.events.CacheEventDispatcher; +import org.ehcache.core.util.CollectionUtil; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.Store.ValueHolder; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.core.statistics.BulkOps; +import org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.PutOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.RemoveOutcome; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.loaderwriter.BulkCacheWritingException; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.slf4j.Logger; + +/** + * Implementation of the {@link Cache} interface when no {@link CacheLoaderWriter} is involved. + *

+ * {@code Ehcache} users should not have to depend on this type but rely exclusively on the api types in package + * {@code org.ehcache}. + * + */ +public class Ehcache extends EhcacheBase { + + private final CacheLoaderWriter cacheLoaderWriter; + + /** + * Creates a new {@code Ehcache} based on the provided parameters. + * + * @param configuration the cache configuration + * @param store the store to use + * @param eventDispatcher the event dispatcher + * @param logger the logger + */ + public Ehcache(CacheConfiguration configuration, final Store store, ResilienceStrategy resilienceStrategy, + CacheEventDispatcher eventDispatcher, Logger logger) { + this(new EhcacheRuntimeConfiguration<>(configuration), store, resilienceStrategy, eventDispatcher, logger, new StatusTransitioner(logger), null); + } + + Ehcache(EhcacheRuntimeConfiguration runtimeConfiguration, Store store, ResilienceStrategy resilienceStrategy, + CacheEventDispatcher eventDispatcher, Logger logger, StatusTransitioner statusTransitioner, CacheLoaderWriter cacheLoaderWriter) { + super(runtimeConfiguration, store, resilienceStrategy, eventDispatcher, logger, statusTransitioner); + this.cacheLoaderWriter = cacheLoaderWriter; + } + + public Ehcache(CacheConfiguration configuration, final Store store, ResilienceStrategy resilienceStrategy, + CacheEventDispatcher eventDispatcher, Logger logger, CacheLoaderWriter cacheLoaderWriter) { + super(new EhcacheRuntimeConfiguration<>(configuration), store, resilienceStrategy, eventDispatcher, logger, new StatusTransitioner(logger)); + this.cacheLoaderWriter = cacheLoaderWriter; + } + + /** + * {@inheritDoc} + */ + @Override + protected Store.ValueHolder doGet(K key) throws StoreAccessException { + return store.get(key); + } + + protected Store.PutStatus doPut(K key, V value) throws StoreAccessException { + return store.put(key, value); + } + + protected boolean doRemoveInternal(final K key) throws StoreAccessException { + return store.remove(key); + } + + protected Map doGetAllInternal(Set keys, boolean includeNulls) throws StoreAccessException { + Map> computedMap = store.bulkComputeIfAbsent(keys, new GetAllFunction<>()); + Map result = new HashMap<>(computedMap.size()); + + int hits = 0; + int keyCount = 0; + for (Map.Entry> entry : computedMap.entrySet()) { + keyCount++; + if (entry.getValue() != null) { + result.put(entry.getKey(), entry.getValue().get()); + hits++; + } else if (includeNulls) { + result.put(entry.getKey(), null); + } + } + + addBulkMethodEntriesCount(BulkOps.GET_ALL_HITS, hits); + addBulkMethodEntriesCount(BulkOps.GET_ALL_MISS, keyCount - hits); + return result; + } + + @Override + public void doPutAll(final Map entries) throws StoreAccessException { + // Copy all entries to write into a Map + Map entriesToRemap = CollectionUtil.copyMapButFailOnNull(entries); + + PutAllFunction putAllFunction = new PutAllFunction<>(logger, entriesToRemap, runtimeConfiguration.getExpiryPolicy()); + store.bulkCompute(entries.keySet(), putAllFunction); + addBulkMethodEntriesCount(BulkOps.PUT_ALL, putAllFunction.getActualPutCount().get()); + addBulkMethodEntriesCount(BulkOps.UPDATE_ALL, putAllFunction.getActualUpdateCount().get()); + } + + protected void doRemoveAll(final Set keys) throws BulkCacheWritingException, StoreAccessException { + RemoveAllFunction removeAllFunction = new RemoveAllFunction<>(); + store.bulkCompute(keys, removeAllFunction); + addBulkMethodEntriesCount(BulkOps.REMOVE_ALL, removeAllFunction.getActualRemoveCount().get()); + } + + @Override + public ValueHolder doPutIfAbsent(final K key, final V value, Consumer put) throws StoreAccessException { + ValueHolder result = store.putIfAbsent(key, value, put); + if(result == null) { + put.accept(true); + } + return result; + } + + @Override + protected Store.RemoveStatus doRemove(K key, V value) throws StoreAccessException { + return store.remove(key, value); + } + + @Override + protected V doReplace(K key, V value) throws StoreAccessException { + ValueHolder old = store.replace(key, value); + return old == null ? null : old.get(); + } + + @Override + protected Store.ReplaceStatus doReplace(final K key, final V oldValue, final V newValue) throws StoreAccessException { + return store.replace(key, oldValue, newValue); + } + + @Override + public Jsr107Cache createJsr107Cache() { + return new Jsr107CacheImpl(); + } + + /** + * {@inheritDoc} + */ + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return this.cacheLoaderWriter; + } + + private final class Jsr107CacheImpl extends Jsr107CacheBase { + + @Override + public void compute(K key, final BiFunction computeFunction, + final Supplier replaceEqual, final Supplier invokeWriter, final Supplier withStatsAndEvents) { + putObserver.begin(); + removeObserver.begin(); + getObserver.begin(); + + try { + BiFunction fn = (mappedKey, mappedValue) -> { + if (mappedValue == null) { + getObserver.end(GetOutcome.MISS); + } else { + getObserver.end(GetOutcome.HIT); + } + + return computeFunction.apply(mappedKey, mappedValue); + + }; + + ValueHolder compute = store.computeAndGet(key, fn, replaceEqual, invokeWriter); + V newValue = compute == null ? null : compute.get(); + if (withStatsAndEvents.get()) { + if (newValue == null) { + removeObserver.end(RemoveOutcome.SUCCESS); + } else { + putObserver.end(PutOutcome.PUT); + } + } + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + @Override + public V getAndRemove(K key) { + getObserver.begin(); + removeObserver.begin(); + + ValueHolder existingValue; + try { + existingValue = store.getAndRemove(key); + } catch (StoreAccessException e) { + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); + removeObserver.end(RemoveOutcome.FAILURE); + throw new RuntimeException(e); + } + + V returnValue = existingValue == null ? null : existingValue.get(); + if (returnValue != null) { + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); + removeObserver.end(RemoveOutcome.SUCCESS); + } else { + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS); + } + return returnValue; + } + + @Override + public V getAndPut(K key, final V value) { + getObserver.begin(); + putObserver.begin(); + + ValueHolder existingValue; + try { + existingValue = store.getAndPut(key, value); + } catch (StoreAccessException e) { + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); + putObserver.end(PutOutcome.FAILURE); + throw new RuntimeException(e); + } + + V returnValue = existingValue == null ? null : existingValue.get(); + if (returnValue != null) { + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); + } else { + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS); + } + putObserver.end(PutOutcome.PUT); + return returnValue; + } + } + + // The compute function that will return the keys to their NEW values, taking the keys to their old values as input; + // but this could happen in batches, i.e. not necessary containing all of the entries of the Iterable passed to this method + public static class PutAllFunction implements Function>, Iterable>> { + + private final Logger logger; + private final Map entriesToRemap; + private final ExpiryPolicy expiry; + private final AtomicInteger actualPutCount = new AtomicInteger(); + private final AtomicInteger actualUpdateCount = new AtomicInteger(); + + public PutAllFunction(Logger logger, Map entriesToRemap, ExpiryPolicy expiry) { + this.logger = logger; + this.entriesToRemap = entriesToRemap; + this.expiry = expiry; + } + + @Override + public Iterable> apply(final Iterable> entries) { + Map mutations = new LinkedHashMap<>(); + + // then record we handled these mappings + for (Map.Entry entry: entries) { + K key = entry.getKey(); + V existingValue = entry.getValue(); + V newValue = entriesToRemap.remove(key); + + if (newValueAlreadyExpired(key, existingValue, newValue)) { + mutations.put(key, null); + } else { + actualPutCount.incrementAndGet(); + if(existingValue != null) { + actualUpdateCount.incrementAndGet(); + } + mutations.put(key, newValue); + } + } + + // Finally return the values to be installed in the Cache's Store + return mutations.entrySet(); + } + + public Map getEntriesToRemap() { + return entriesToRemap; + } + + private boolean newValueAlreadyExpired(K key, V oldValue, V newValue) { + return EhcacheBase.newValueAlreadyExpired(logger, expiry, key, oldValue, newValue); + } + + public AtomicInteger getActualPutCount() { + return actualPutCount; + } + + public AtomicInteger getActualUpdateCount() { + return actualUpdateCount; + } + } + + public static class RemoveAllFunction implements Function>, Iterable>> { + + private final AtomicInteger actualRemoveCount = new AtomicInteger(); + + @Override + public Iterable> apply(final Iterable> entries) { + Map results = new LinkedHashMap<>(); + + for (Map.Entry entry : entries) { + K key = entry.getKey(); + V existingValue = entry.getValue(); + + if (existingValue != null) { + actualRemoveCount.incrementAndGet(); + } + results.put(key, null); + } + + return results.entrySet(); + } + + public AtomicInteger getActualRemoveCount() { + return actualRemoveCount; + } + } + + public static class GetAllFunction implements Function, Iterable>> { + + @Override + public Iterable> apply(final Iterable keys) { + int size = CollectionUtil.findBestCollectionSize(keys, 1); // in our current implementation, we have one entry all the time + + List> computeResult = new ArrayList<>(size); + + for (K key : keys) { + computeResult.add(new AbstractMap.SimpleImmutableEntry<>(key, null)); + } + + return computeResult; + } + } + +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/EhcacheBase.java b/ehcache-core/src/main/java/org/ehcache/core/EhcacheBase.java new file mode 100644 index 0000000000..aabae905a3 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/EhcacheBase.java @@ -0,0 +1,807 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core; + +import org.ehcache.Cache; +import org.ehcache.Status; +import org.ehcache.config.CacheRuntimeConfiguration; +import org.ehcache.core.events.CacheEventDispatcher; +import org.ehcache.core.spi.LifeCycled; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.Store.ValueHolder; +import org.ehcache.core.statistics.BulkOps; +import org.ehcache.core.statistics.CacheOperationOutcomes.ClearOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.ConditionalRemoveOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.GetAllOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.PutAllOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.PutIfAbsentOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.PutOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.RemoveAllOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.RemoveOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.ReplaceOutcome; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; +import org.ehcache.spi.loaderwriter.BulkCacheWritingException; +import org.ehcache.spi.loaderwriter.CacheWritingException; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.StoreAccessException; +import org.slf4j.Logger; +import org.terracotta.statistics.StatisticsManager; +import org.terracotta.statistics.observer.OperationObserver; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.atomic.LongAdder; +import java.util.function.Consumer; +import java.util.function.Function; + +import static org.ehcache.core.exceptions.ExceptionFactory.newCacheLoadingException; +import static org.terracotta.statistics.StatisticBuilder.operation; + +/** + * Base implementation of the {@link Cache} interface that is common to all Ehcache implementation + */ +public abstract class EhcacheBase implements InternalCache { + + protected final Logger logger; + + protected final StatusTransitioner statusTransitioner; + + protected final Store store; + protected final ResilienceStrategy resilienceStrategy; + protected final EhcacheRuntimeConfiguration runtimeConfiguration; + + protected final OperationObserver getObserver = operation(GetOutcome.class).named("get").of(this).tag("cache").build(); + protected final OperationObserver getAllObserver = operation(GetAllOutcome.class).named("getAll").of(this).tag("cache").build(); + protected final OperationObserver putObserver = operation(PutOutcome.class).named("put").of(this).tag("cache").build(); + protected final OperationObserver putAllObserver = operation(PutAllOutcome.class).named("putAll").of(this).tag("cache").build(); + protected final OperationObserver removeObserver = operation(RemoveOutcome.class).named("remove").of(this).tag("cache").build(); + protected final OperationObserver removeAllObserver = operation(RemoveAllOutcome.class).named("removeAll").of(this).tag("cache").build(); + protected final OperationObserver conditionalRemoveObserver = operation(ConditionalRemoveOutcome.class).named("conditionalRemove").of(this).tag("cache").build(); + protected final OperationObserver putIfAbsentObserver = operation(PutIfAbsentOutcome.class).named("putIfAbsent").of(this).tag("cache").build(); + protected final OperationObserver replaceObserver = operation(ReplaceOutcome.class).named("replace").of(this).tag("cache").build(); + protected final OperationObserver clearObserver = operation(ClearOutcome.class).named("clear").of(this).tag("cache").build(); + + protected final Map bulkMethodEntries = new EnumMap<>(BulkOps.class); + + /** + * Creates a new {@code EhcacheBase} based on the provided parameters. + * + * @param runtimeConfiguration the cache configuration + * @param store the store to use + * @param eventDispatcher the event dispatcher + * @param logger the logger + */ + EhcacheBase(EhcacheRuntimeConfiguration runtimeConfiguration, Store store, ResilienceStrategy resilienceStrategy, + CacheEventDispatcher eventDispatcher, Logger logger, StatusTransitioner statusTransitioner) { + this.store = store; + runtimeConfiguration.addCacheConfigurationListener(store.getConfigurationChangeListeners()); + StatisticsManager.associate(store).withParent(this); + + this.resilienceStrategy = resilienceStrategy; + + this.runtimeConfiguration = runtimeConfiguration; + runtimeConfiguration.addCacheConfigurationListener(eventDispatcher.getConfigurationChangeListeners()); + + this.logger = logger; + this.statusTransitioner = statusTransitioner; + for (BulkOps bulkOp : BulkOps.values()) { + bulkMethodEntries.put(bulkOp, new LongAdder()); + } + } + + /** + * {@inheritDoc} + */ + @Override + public V get(K key) { + getObserver.begin(); + try { + statusTransitioner.checkAvailable(); + checkNonNull(key); + + try { + Store.ValueHolder valueHolder = doGet(key); + + // Check for expiry first + if (valueHolder == null) { + getObserver.end(GetOutcome.MISS); + return null; + } else { + getObserver.end(GetOutcome.HIT); + return valueHolder.get(); + } + } catch (StoreAccessException e) { + V value = resilienceStrategy.getFailure(key, e); + getObserver.end(GetOutcome.FAILURE); + return value; + } + } catch (Throwable e) { + getObserver.end(GetOutcome.FAILURE); + throw e; + } + } + + protected abstract Store.ValueHolder doGet(K key) throws StoreAccessException; + + protected V getNoLoader(K key) { + getObserver.begin(); + try { + statusTransitioner.checkAvailable(); + checkNonNull(key); + + try { + Store.ValueHolder valueHolder = store.get(key); + + // Check for expiry first + if (valueHolder == null) { + getObserver.end(GetOutcome.MISS); + return null; + } else { + getObserver.end(GetOutcome.HIT); + return valueHolder.get(); + } + } catch (StoreAccessException e) { + V value = resilienceStrategy.getFailure(key, e); + getObserver.end(GetOutcome.FAILURE); + return value; + } + } catch (Throwable e) { + getObserver.end(GetOutcome.FAILURE); + throw e; + } + } + + /** + * {@inheritDoc} + */ + @Override + public void put(K key, V value) { + putObserver.begin(); + try { + statusTransitioner.checkAvailable(); + checkNonNull(key, value); + + try { + Store.PutStatus status = doPut(key, value); + switch (status) { + case PUT: + putObserver.end(PutOutcome.PUT); + break; + case NOOP: + putObserver.end(PutOutcome.NOOP); + break; + default: + throw new AssertionError("Invalid Status."); + } + } catch (StoreAccessException e) { + resilienceStrategy.putFailure(key, value, e); + putObserver.end(PutOutcome.FAILURE); + } + } catch (Throwable e) { + putObserver.end(PutOutcome.FAILURE); + throw e; + } + } + + protected abstract Store.PutStatus doPut(K key, V value) throws StoreAccessException; + + /** + * {@inheritDoc} + */ + @Override + public boolean containsKey(final K key) { + statusTransitioner.checkAvailable(); + checkNonNull(key); + try { + return store.containsKey(key); + } catch (StoreAccessException e) { + return resilienceStrategy.containsKeyFailure(key, e); + } + } + + /** + * {@inheritDoc} + */ + @Override + public void remove(K key) throws CacheWritingException { + removeInternal(key); // ignore return value; + } + + protected boolean removeInternal(final K key) { + removeObserver.begin(); + try { + statusTransitioner.checkAvailable(); + checkNonNull(key); + + boolean removed = false; + try { + removed = doRemoveInternal(key); + if (removed) { + removeObserver.end(RemoveOutcome.SUCCESS); + } else { + removeObserver.end(RemoveOutcome.NOOP); + } + } catch (StoreAccessException e) { + resilienceStrategy.removeFailure(key, e); + removeObserver.end(RemoveOutcome.FAILURE); + } + + return removed; + } catch (Throwable e) { + removeObserver.end(RemoveOutcome.FAILURE); + throw e; + } + } + + protected abstract boolean doRemoveInternal(final K key) throws StoreAccessException; + + /** + * {@inheritDoc} + */ + @Override + public void clear() { + clearObserver.begin(); + try { + statusTransitioner.checkAvailable(); + try { + store.clear(); + clearObserver.end(ClearOutcome.SUCCESS); + } catch (StoreAccessException e) { + resilienceStrategy.clearFailure(e); + clearObserver.end(ClearOutcome.FAILURE); + } + } catch (Throwable e) { + clearObserver.end(ClearOutcome.FAILURE); + throw e; + } + } + + /** + * {@inheritDoc} + */ + @Override + public V putIfAbsent(final K key, final V value) { + putIfAbsentObserver.begin(); + try { + statusTransitioner.checkAvailable(); + checkNonNull(key, value); + + boolean[] put = { false }; + + try { + ValueHolder inCache = doPutIfAbsent(key, value, b -> put[0] = b); + if (put[0]) { + putIfAbsentObserver.end(PutIfAbsentOutcome.PUT); + return null; + } else { + putIfAbsentObserver.end(PutIfAbsentOutcome.HIT); + return inCache.get(); + } + } catch (StoreAccessException e) { + V newValue = resilienceStrategy.putIfAbsentFailure(key, value, e); + putIfAbsentObserver.end(PutIfAbsentOutcome.FAILURE); + return newValue; + } + } catch (Throwable e) { + putIfAbsentObserver.end(PutIfAbsentOutcome.FAILURE); + throw e; + } + } + + protected abstract ValueHolder doPutIfAbsent(K key, V value, Consumer put) throws StoreAccessException; + + /** + * {@inheritDoc} + */ + @Override + public Iterator> iterator() { + statusTransitioner.checkAvailable(); + return new CacheEntryIterator(); + } + + /** + * {@inheritDoc} + */ + @Override + public Map getAll(Set keys) throws BulkCacheLoadingException { + return getAllInternal(keys, true); + } + + protected Map getAllInternal(Set keys, boolean includeNulls) { + getAllObserver.begin(); + try { + statusTransitioner.checkAvailable(); + checkNonNullContent(keys); + if (keys.isEmpty()) { + getAllObserver.end(GetAllOutcome.SUCCESS); + return Collections.emptyMap(); + } + + try { + Map result = doGetAllInternal(keys, includeNulls); + getAllObserver.end(GetAllOutcome.SUCCESS); + return result; + } catch (StoreAccessException e) { + Map result = resilienceStrategy.getAllFailure(keys, e); + getAllObserver.end(GetAllOutcome.FAILURE); + return result; + } + } catch (Throwable e) { + getAllObserver.end(GetAllOutcome.FAILURE); + throw e; + } + } + + protected abstract Map doGetAllInternal(Set keys, boolean includeNulls) throws StoreAccessException; + + /** + * {@inheritDoc} + */ + @Override + public void putAll(Map entries) throws BulkCacheWritingException { + putAllObserver.begin(); + try { + statusTransitioner.checkAvailable(); + checkNonNull(entries); + if(entries.isEmpty()) { + putAllObserver.end(PutAllOutcome.SUCCESS); + return; + } + + try { + doPutAll(entries); + putAllObserver.end(PutAllOutcome.SUCCESS); + } catch (StoreAccessException e) { + resilienceStrategy.putAllFailure(entries, e); + putAllObserver.end(PutAllOutcome.FAILURE); + } + } catch (Exception e) { + putAllObserver.end(PutAllOutcome.FAILURE); + throw e; + } + } + + protected abstract void doPutAll(Map entries) throws StoreAccessException, BulkCacheWritingException; + + protected boolean newValueAlreadyExpired(K key, V oldValue, V newValue) { + return newValueAlreadyExpired(logger, runtimeConfiguration.getExpiryPolicy(), key, oldValue, newValue); + } + + /** + * {@inheritDoc} + */ + @Override + public void removeAll(Set keys) throws BulkCacheWritingException { + removeAllObserver.begin(); + try { + statusTransitioner.checkAvailable(); + checkNonNull(keys); + if (keys.isEmpty()) { + removeAllObserver.end(RemoveAllOutcome.SUCCESS); + return; + } + + for (K key : keys) { + if (key == null) { + throw new NullPointerException(); + } + } + + try { + doRemoveAll(keys); + removeAllObserver.end(RemoveAllOutcome.SUCCESS); + } catch (StoreAccessException e) { + resilienceStrategy.removeAllFailure(keys, e); + removeAllObserver.end(RemoveAllOutcome.FAILURE); + } + } catch (Throwable e) { + removeAllObserver.end(RemoveAllOutcome.FAILURE); + throw e; + } + } + + protected abstract void doRemoveAll(Set keys) throws BulkCacheWritingException, StoreAccessException; + + protected static boolean newValueAlreadyExpired(Logger logger, ExpiryPolicy expiry, K key, V oldValue, V newValue) { + if (newValue == null) { + return false; + } + + Duration duration; + try { + if (oldValue == null) { + duration = expiry.getExpiryForCreation(key, newValue); + } else { + duration = expiry.getExpiryForUpdate(key, () -> oldValue, newValue); + } + } catch (RuntimeException re) { + logger.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); + return true; + } + + return Duration.ZERO.equals(duration); + } + + /** + * {@inheritDoc} + */ + @Override + public boolean remove(K key, V value) { + conditionalRemoveObserver.begin(); + try { + statusTransitioner.checkAvailable(); + checkNonNull(key, value); + + try { + Store.RemoveStatus status = doRemove(key, value); + switch (status) { + case REMOVED: + conditionalRemoveObserver.end(ConditionalRemoveOutcome.SUCCESS); + return true; + case KEY_MISSING: + conditionalRemoveObserver.end(ConditionalRemoveOutcome.FAILURE_KEY_MISSING); + return false; + case KEY_PRESENT: + conditionalRemoveObserver.end(ConditionalRemoveOutcome.FAILURE_KEY_PRESENT); + return false; + default: + throw new AssertionError("Invalid Status: " + status); + } + } catch (StoreAccessException e) { + boolean removed = resilienceStrategy.removeFailure(key, value, e); + conditionalRemoveObserver.end(ConditionalRemoveOutcome.FAILURE); + return removed; + } + } catch (Throwable e) { + conditionalRemoveObserver.end(ConditionalRemoveOutcome.FAILURE); + throw e; + } + } + + protected abstract Store.RemoveStatus doRemove(final K key, final V value) throws StoreAccessException; + + /** + * {@inheritDoc} + */ + @Override + public V replace(K key, V value) { + replaceObserver.begin(); + try { + statusTransitioner.checkAvailable(); + checkNonNull(key, value); + + try { + V result = doReplace(key, value); + if(result == null) { + replaceObserver.end(ReplaceOutcome.MISS_NOT_PRESENT); + } else { + replaceObserver.end(ReplaceOutcome.HIT); + } + return result; + } catch (StoreAccessException e) { + V result = resilienceStrategy.replaceFailure(key, value, e); + replaceObserver.end(ReplaceOutcome.FAILURE); + return result; + } + } catch (Throwable e) { + replaceObserver.end(ReplaceOutcome.FAILURE); + throw e; + } + } + + protected abstract V doReplace(final K key, final V value) throws StoreAccessException; + + /** + * {@inheritDoc} + */ + @Override + public boolean replace(final K key, final V oldValue, final V newValue) { + replaceObserver.begin(); + try { + statusTransitioner.checkAvailable(); + checkNonNull(key, oldValue, newValue); + + try { + Store.ReplaceStatus status = doReplace(key, oldValue, newValue); + switch (status) { + case HIT: + replaceObserver.end(ReplaceOutcome.HIT); + return true; + case MISS_PRESENT: + replaceObserver.end(ReplaceOutcome.MISS_PRESENT); + return false; + case MISS_NOT_PRESENT: + replaceObserver.end(ReplaceOutcome.MISS_NOT_PRESENT); + return false; + default: + throw new AssertionError("Invalid Status:" + status); + } + } catch (StoreAccessException e) { + boolean success = resilienceStrategy.replaceFailure(key, oldValue, newValue, e); + replaceObserver.end(ReplaceOutcome.FAILURE); + return success; + } + } catch (Throwable e) { + replaceObserver.end(ReplaceOutcome.FAILURE); + throw e; + } + } + + protected abstract Store.ReplaceStatus doReplace(K key, V oldValue, V newValue) throws StoreAccessException; + + /** + * {@inheritDoc} + */ + @Override + public CacheRuntimeConfiguration getRuntimeConfiguration() { + return runtimeConfiguration; + } + + /** + * {@inheritDoc} + */ + @Override + public void init() { + statusTransitioner.init().succeeded(); + } + + /** + * {@inheritDoc} + */ + @Override + public void close() { + statusTransitioner.close().succeeded(); + } + + /** + * {@inheritDoc} + */ + @Override + public Status getStatus() { + return statusTransitioner.currentStatus(); + } + + /** + * {@inheritDoc} + */ + @Override + public void addHook(LifeCycled hook) { + statusTransitioner.addHook(hook); + } + + void removeHook(LifeCycled hook) { + statusTransitioner.removeHook(hook); + } + + protected void addBulkMethodEntriesCount(BulkOps op, long count) { + bulkMethodEntries.get(op).add(count); + } + + /** + * {@inheritDoc} + */ + @Override + public Map getBulkMethodEntries() { + return bulkMethodEntries; + } + + protected static void checkNonNull(Object thing) { + Objects.requireNonNull(thing); + } + + protected static void checkNonNull(Object... things) { + for (Object thing : things) { + checkNonNull(thing); + } + } + + protected void checkNonNullContent(Collection collectionOfThings) { + checkNonNull(collectionOfThings); + for (Object thing : collectionOfThings) { + checkNonNull(thing); + } + } + + protected abstract class Jsr107CacheBase implements Jsr107Cache { + + @Override + public void loadAll(Set keys, boolean replaceExistingValues, Function, Map> loadFunction) { + if(keys.isEmpty()) { + return ; + } + if (replaceExistingValues) { + loadAllReplace(keys, loadFunction); + } else { + loadAllAbsent(keys, loadFunction); + } + } + + @Override + public Iterator> specIterator() { + return new SpecIterator<>(this, store); + } + + @Override + public V getNoLoader(K key) { + return EhcacheBase.this.getNoLoader(key); + } + + @Override + public Map getAll(Set keys) { + return getAllInternal(keys, false); + } + + private void loadAllAbsent(Set keys, final Function, Map> loadFunction) { + try { + store.bulkComputeIfAbsent(keys, absentKeys -> cacheLoaderWriterLoadAllForKeys(absentKeys, loadFunction).entrySet()); + } catch (StoreAccessException e) { + throw newCacheLoadingException(e); + } + } + + Map cacheLoaderWriterLoadAllForKeys(Iterable keys, Function, Map> loadFunction) { + try { + Map loaded = loadFunction.apply(keys); + + // put into a new map since we can't assume the 107 cache loader returns things ordered, or necessarily with all the desired keys + Map rv = new LinkedHashMap<>(); + for (K key : keys) { + rv.put(key, loaded.get(key)); + } + return rv; + } catch (Exception e) { + throw newCacheLoadingException(e); + } + } + + private void loadAllReplace(Set keys, final Function, Map> loadFunction) { + try { + store.bulkCompute(keys, entries -> { + Collection keys1 = new ArrayList<>(); + for (Map.Entry entry : entries) { + keys1.add(entry.getKey()); + } + return cacheLoaderWriterLoadAllForKeys(keys1, loadFunction).entrySet(); + }); + } catch (StoreAccessException e) { + throw newCacheLoadingException(e); + } + } + + @Override + public boolean remove(K key) { + return EhcacheBase.this.removeInternal(key); + } + + @Override + public void removeAll() { + Collection failures = new ArrayList<>(); + Store.Iterator>> iterator = store.iterator(); + while (iterator.hasNext()) { + try { + Entry> next = iterator.next(); + remove(next.getKey()); + } catch (StoreAccessException cae) { + failures.add(cae); + } + } + if (!failures.isEmpty()) { + StoreAccessException removeAllFailure = new StoreAccessException("Iteration failures may have prevented a complete removal"); + failures.forEach(removeAllFailure::addSuppressed); + resilienceStrategy.clearFailure(removeAllFailure); + } + } + + } + + private class CacheEntryIterator implements Iterator> { + + private final Store.Iterator>> iterator; + private Cache.Entry> current; + private Cache.Entry> next; + private StoreAccessException nextException; + + public CacheEntryIterator() { + this.iterator = store.iterator(); + advance(); + } + + private void advance() { + try { + while (iterator.hasNext()) { + next = iterator.next(); + if (next != null) { + return; + } + } + next = null; + } catch (RuntimeException re) { + nextException = new StoreAccessException(re); + next = null; + } catch (StoreAccessException cae) { + nextException = cae; + next = null; + } + } + + @Override + public boolean hasNext() { + statusTransitioner.checkAvailable(); + return nextException != null || next != null; + } + + @Override + public Entry next() { + if (!hasNext()) { + throw new NoSuchElementException(); + } + + getObserver.begin(); + if (nextException == null) { + getObserver.end(GetOutcome.HIT); + current = next; + advance(); + return new ValueHolderBasedEntry<>(current); + } else { + getObserver.end(GetOutcome.FAILURE); + StoreAccessException cae = nextException; + nextException = null; + return resilienceStrategy.iteratorFailure(cae); + } + } + + @Override + public void remove() { + statusTransitioner.checkAvailable(); + if (current == null) { + throw new IllegalStateException("No current element"); + } + EhcacheBase.this.remove(current.getKey(), current.getValue().get()); + current = null; + } + } + + private static class ValueHolderBasedEntry implements Cache.Entry { + private final Cache.Entry> storeEntry; + + ValueHolderBasedEntry(Cache.Entry> storeEntry) { + this.storeEntry = storeEntry; + } + + @Override + public K getKey() { + return storeEntry.getKey(); + } + + @Override + public V getValue() { + return storeEntry.getValue().get(); + } + + } +} + diff --git a/core/src/main/java/org/ehcache/core/EhcacheManager.java b/ehcache-core/src/main/java/org/ehcache/core/EhcacheManager.java similarity index 76% rename from core/src/main/java/org/ehcache/core/EhcacheManager.java rename to ehcache-core/src/main/java/org/ehcache/core/EhcacheManager.java index 8288a34bf3..b4a05b71a4 100644 --- a/core/src/main/java/org/ehcache/core/EhcacheManager.java +++ b/ehcache-core/src/main/java/org/ehcache/core/EhcacheManager.java @@ -25,30 +25,33 @@ import org.ehcache.config.Configuration; import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourceType; -import org.ehcache.core.config.BaseCacheConfiguration; import org.ehcache.core.config.DefaultConfiguration; import org.ehcache.core.config.store.StoreEventSourceConfiguration; +import org.ehcache.core.config.store.StoreStatisticsConfiguration; import org.ehcache.core.events.CacheEventDispatcher; import org.ehcache.core.events.CacheEventDispatcherFactory; import org.ehcache.core.events.CacheEventListenerConfiguration; import org.ehcache.core.events.CacheEventListenerProvider; import org.ehcache.core.events.CacheManagerListener; -import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.core.internal.store.StoreSupport; -import org.ehcache.core.internal.util.ClassLoading; +import org.ehcache.core.spi.ServiceLocator; +import org.ehcache.core.resilience.DefaultRecoveryStore; import org.ehcache.core.spi.LifeCycled; import org.ehcache.core.spi.LifeCycledAdapter; import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.spi.service.ServiceUtils; import org.ehcache.core.spi.store.InternalCacheManager; import org.ehcache.core.spi.store.Store; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.core.store.StoreSupport; +import org.ehcache.core.util.ClassLoading; import org.ehcache.event.CacheEventListener; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterConfiguration; import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; -import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; import org.ehcache.spi.loaderwriter.WriteBehindProvider; import org.ehcache.spi.persistence.PersistableResourceService; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.ResilienceStrategyProvider; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; @@ -72,8 +75,12 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.UnaryOperator; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.service.ServiceUtils.findOptionalAmongst; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; /** * Implementation class for the {@link org.ehcache.CacheManager} and {@link PersistentCacheManager} @@ -97,7 +104,7 @@ public class EhcacheManager implements PersistentCacheManager, InternalCacheMana protected final ServiceLocator serviceLocator; public EhcacheManager(Configuration config) { - this(config, Collections.emptyList(), true); + this(config, Collections.emptyList(), true); } public EhcacheManager(Configuration config, Collection services) { @@ -105,36 +112,43 @@ public EhcacheManager(Configuration config, Collection services) { } public EhcacheManager(Configuration config, Collection services, boolean useLoaderInAtomics) { + this(config, dependencies -> dependencies.with(services), useLoaderInAtomics); + } + + public EhcacheManager(Configuration config, UnaryOperator customization, boolean useLoaderInAtomics) { final String simpleName = this.getClass().getSimpleName(); this.simpleName = (simpleName.isEmpty() ? this.getClass().getName() : simpleName); this.configuration = new DefaultConfiguration(config); this.cacheManagerClassLoader = config.getClassLoader() != null ? config.getClassLoader() : ClassLoading.getDefaultClassLoader(); this.useLoaderInAtomics = useLoaderInAtomics; validateServicesConfigs(); - this.serviceLocator = resolveServices(services); + this.serviceLocator = resolveServices(customization); } private void validateServicesConfigs() { - HashSet classes = new HashSet<>(); - for (ServiceCreationConfiguration service : configuration.getServiceCreationConfigurations()) { + Set> classes = new HashSet<>(); + for (ServiceCreationConfiguration service : configuration.getServiceCreationConfigurations()) { if (!classes.add(service.getServiceType())) { throw new IllegalStateException("Duplicate creation configuration for service " + service.getServiceType()); } } } - private ServiceLocator resolveServices(Collection services) { + private ServiceLocator resolveServices(UnaryOperator customization) { ServiceLocator.DependencySet builder = dependencySet() .with(Store.Provider.class) .with(CacheLoaderWriterProvider.class) .with(WriteBehindProvider.class) .with(CacheEventDispatcherFactory.class) .with(CacheEventListenerProvider.class) - .with(services); + .with(ResilienceStrategyProvider.class); + + builder = customization.apply(builder); + if (!builder.contains(CacheManagerProviderService.class)) { builder = builder.with(new DefaultCacheManagerProviderService(this)); } - for (ServiceCreationConfiguration serviceConfig : configuration.getServiceCreationConfigurations()) { + for (ServiceCreationConfiguration serviceConfig : configuration.getServiceCreationConfigurations()) { builder = builder.with(serviceConfig); } return builder.build(); @@ -238,16 +252,16 @@ public Cache createCache(final String alias, CacheConfiguration Cache createCache(final String alias, CacheConfiguration originalConfig, boolean addToConfig) throws IllegalArgumentException { + private Cache createCache(String alias, CacheConfiguration originalConfig, boolean addToConfig) throws IllegalArgumentException { statusTransitioner.checkAvailable(); LOGGER.debug("Creating Cache '{}' in {}.", alias, simpleName); - CacheConfiguration config = adjustConfigurationWithCacheManagerDefaults(originalConfig); + CacheConfiguration config = adjustConfigurationWithCacheManagerDefaults(alias, originalConfig); Class keyType = config.getKeyType(); Class valueType = config.getValueType(); - final CacheHolder value = new CacheHolder(keyType, valueType, null); + CacheHolder value = new CacheHolder(keyType, valueType); if (caches.putIfAbsent(alias, value) != null) { throw new IllegalArgumentException("Cache '" + alias +"' already exists"); } @@ -292,12 +306,12 @@ private Cache createCache(final String alias, CacheConfiguration InternalCache createNewEhcache(final String alias, final CacheConfiguration config, - final Class keyType, final Class valueType) { - Collection> adjustedServiceConfigs = new ArrayList<>(config.getServiceConfigurations()); + InternalCache createNewEhcache(String alias, CacheConfiguration config, + Class keyType, Class valueType) { + Collection> adjustedServiceConfigs = new ArrayList<>(config.getServiceConfigurations()); - List unknownServiceConfigs = new ArrayList<>(); - for (ServiceConfiguration serviceConfig : adjustedServiceConfigs) { + List> unknownServiceConfigs = new ArrayList<>(); + for (ServiceConfiguration serviceConfig : adjustedServiceConfigs) { if (!serviceLocator.knowsServiceFor(serviceConfig)) { unknownServiceConfigs.add(serviceConfig); } @@ -308,45 +322,29 @@ InternalCache createNewEhcache(final String alias, final CacheConfi List lifeCycledList = new ArrayList<>(); - final Store store = getStore(alias, config, keyType, valueType, adjustedServiceConfigs, lifeCycledList); - - final CacheLoaderWriterProvider cacheLoaderWriterProvider = serviceLocator.getService(CacheLoaderWriterProvider.class); - final CacheLoaderWriter decorator ; + CacheLoaderWriterProvider cacheLoaderWriterProvider = serviceLocator.getService(CacheLoaderWriterProvider.class); + CacheLoaderWriter loaderWriter; if(cacheLoaderWriterProvider != null) { - final CacheLoaderWriter loaderWriter; loaderWriter = cacheLoaderWriterProvider.createCacheLoaderWriter(alias, config); - WriteBehindConfiguration writeBehindConfiguration = - ServiceUtils.findSingletonAmongst(WriteBehindConfiguration.class, config.getServiceConfigurations().toArray()); - if(writeBehindConfiguration == null) { - decorator = loaderWriter; - } else { - final WriteBehindProvider factory = serviceLocator.getService(WriteBehindProvider.class); - decorator = factory.createWriteBehindLoaderWriter(loaderWriter, writeBehindConfiguration); - if(decorator != null) { - lifeCycledList.add(new LifeCycledAdapter() { - @Override - public void close() { - factory.releaseWriteBehindLoaderWriter(decorator); - } - }); - } - } if (loaderWriter != null) { lifeCycledList.add(new LifeCycledAdapter() { @Override public void close() throws Exception { - cacheLoaderWriterProvider.releaseCacheLoaderWriter(loaderWriter); + cacheLoaderWriterProvider.releaseCacheLoaderWriter(alias, loaderWriter); } }); } } else { - decorator = null; + loaderWriter = null; } - final CacheEventDispatcherFactory cenlProvider = serviceLocator.getService(CacheEventDispatcherFactory.class); - final CacheEventDispatcher evtService = - cenlProvider.createCacheEventDispatcher(store, adjustedServiceConfigs.toArray(new ServiceConfiguration[adjustedServiceConfigs.size()])); + Store store = getStore(alias, config, keyType, valueType, adjustedServiceConfigs, lifeCycledList, loaderWriter); + + + CacheEventDispatcherFactory cenlProvider = serviceLocator.getService(CacheEventDispatcherFactory.class); + CacheEventDispatcher evtService = + cenlProvider.createCacheEventDispatcher(store, adjustedServiceConfigs.toArray(new ServiceConfiguration[adjustedServiceConfigs.size()])); lifeCycledList.add(new LifeCycledAdapter() { @Override public void close() { @@ -355,26 +353,28 @@ public void close() { }); evtService.setStoreEventSource(store.getStoreEventSource()); - final InternalCache cache; - if (decorator == null) { - cache = new Ehcache<>(config, store, evtService, LoggerFactory.getLogger(Ehcache.class + "-" + alias)); + ResilienceStrategyProvider resilienceProvider = serviceLocator.getService(ResilienceStrategyProvider.class); + ResilienceStrategy resilienceStrategy; + if (loaderWriter == null) { + resilienceStrategy = resilienceProvider.createResilienceStrategy(alias, config, new DefaultRecoveryStore<>(store)); } else { - cache = new EhcacheWithLoaderWriter<>(config, store, decorator, evtService, - useLoaderInAtomics, LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + alias)); + resilienceStrategy = resilienceProvider.createResilienceStrategy(alias, config, new DefaultRecoveryStore<>(store), loaderWriter); } + InternalCache cache = new Ehcache<>(config, store, resilienceStrategy, evtService, LoggerFactory.getLogger(Ehcache.class + "-" + alias), loaderWriter); - final CacheEventListenerProvider evntLsnrFactory = serviceLocator.getService(CacheEventListenerProvider.class); + CacheEventListenerProvider evntLsnrFactory = serviceLocator.getService(CacheEventListenerProvider.class); if (evntLsnrFactory != null) { - Collection evtLsnrConfigs = - ServiceUtils.findAmongst(CacheEventListenerConfiguration.class, config.getServiceConfigurations()); - for (CacheEventListenerConfiguration lsnrConfig: evtLsnrConfigs) { - final CacheEventListener lsnr = evntLsnrFactory.createEventListener(alias, lsnrConfig); + @SuppressWarnings("unchecked") + Collection> evtLsnrConfigs = + ServiceUtils.>>findAmongst((Class) CacheEventListenerConfiguration.class, config.getServiceConfigurations()); + for (CacheEventListenerConfiguration lsnrConfig: evtLsnrConfigs) { + CacheEventListener lsnr = evntLsnrFactory.createEventListener(alias, lsnrConfig); if (lsnr != null) { cache.getRuntimeConfiguration().registerCacheEventListener(lsnr, lsnrConfig.orderingMode(), lsnrConfig.firingMode(), lsnrConfig.fireOn()); lifeCycledList.add(new LifeCycled() { @Override - public void init() throws Exception { + public void init() { // no-op for now } @@ -406,15 +406,16 @@ public void close() throws Exception { * this list may be augmented by the implementation of this method * @param lifeCycledList the {@code List} of {@code LifeCycled} instances used to manage components of the * cache; this list may be augmented by the implementation of this method + * @param loaderWriter the {@code CacheLoaderWriter} to be used by the {@code Cache} * @param the cache key type * @param the cache value type * * @return the {@code Store} instance used to create the cache */ - protected Store getStore(final String alias, final CacheConfiguration config, - final Class keyType, final Class valueType, - final Collection> serviceConfigs, - final List lifeCycledList) { + protected Store getStore(String alias, CacheConfiguration config, + Class keyType, Class valueType, + Collection> serviceConfigs, + List lifeCycledList, CacheLoaderWriter loaderWriter) { final Set> resourceTypes = config.getResourcePools().getResourceTypeSet(); for (ResourceType resourceType : resourceTypes) { @@ -437,12 +438,10 @@ public void close() throws Exception { } } - final Store.Provider storeProvider = StoreSupport.selectStoreProvider(serviceLocator, resourceTypes, serviceConfigs); - Serializer keySerializer = null; Serializer valueSerializer = null; final SerializationProvider serialization = serviceLocator.getService(SerializationProvider.class); - ServiceConfiguration[] serviceConfigArray = serviceConfigs.toArray(new ServiceConfiguration[serviceConfigs.size()]); + ServiceConfiguration[] serviceConfigArray = serviceConfigs.toArray(new ServiceConfiguration[serviceConfigs.size()]); if (serialization != null) { try { final Serializer keySer = serialization.createKeySerializer(keyType, config.getClassLoader(), serviceConfigArray); @@ -480,28 +479,39 @@ public void close() throws Exception { } } - int dispatcherConcurrency; - StoreEventSourceConfiguration eventSourceConfiguration = ServiceUtils.findSingletonAmongst(StoreEventSourceConfiguration.class, config - .getServiceConfigurations() - .toArray()); - if (eventSourceConfiguration != null) { - dispatcherConcurrency = eventSourceConfiguration.getDispatcherConcurrency(); - } else { - dispatcherConcurrency = StoreEventSourceConfiguration.DEFAULT_DISPATCHER_CONCURRENCY; + Collection> serviceConfigurations = config.getServiceConfigurations(); + + @SuppressWarnings("unchecked") + int dispatcherConcurrency = findOptionalAmongst((Class>) (Class) StoreEventSourceConfiguration.class, serviceConfigurations) + .map(StoreEventSourceConfiguration::getDispatcherConcurrency) + .orElse(StoreEventSourceConfiguration.DEFAULT_DISPATCHER_CONCURRENCY); + + boolean operationStatisticsEnabled = findOptionalAmongst(StoreStatisticsConfiguration.class, serviceConfigurations) + .map(StoreStatisticsConfiguration::isOperationStatisticsEnabled) + // By default, we enable statistics only in a tiered environment + .orElseGet(() -> config.getResourcePools().getResourceTypeSet().size() > 1); + + Store.Configuration storeConfiguration = new StoreConfigurationImpl<>(config, dispatcherConcurrency, + operationStatisticsEnabled, keySerializer, valueSerializer, loaderWriter, useLoaderInAtomics); + + Store.Provider storeProvider = StoreSupport.selectWrapperStoreProvider(serviceLocator, serviceConfigs); + if (storeProvider == null) { + storeProvider = StoreSupport.selectStoreProvider(serviceLocator, resourceTypes, serviceConfigs); } - Store.Configuration storeConfiguration = new StoreConfigurationImpl<>(config, dispatcherConcurrency, keySerializer, valueSerializer); - final Store store = storeProvider.createStore(storeConfiguration, serviceConfigArray); + Store store = storeProvider.createStore(storeConfiguration, serviceConfigArray); + + AtomicReference storeProviderRef = new AtomicReference<>(storeProvider); lifeCycledList.add(new LifeCycled() { @Override - public void init() throws Exception { - storeProvider.initStore(store); + public void init() { + storeProviderRef.get().initStore(store); } @Override public void close() { - storeProvider.releaseStore(store); + storeProviderRef.get().releaseStore(store); } }); @@ -521,17 +531,24 @@ private PersistableResourceService getPersistableResourceService(ResourceType /** * adjusts the config to reflect new classloader & serialization provider */ - private CacheConfiguration adjustConfigurationWithCacheManagerDefaults(CacheConfiguration config) { - ClassLoader cacheClassLoader = config.getClassLoader(); - if (cacheClassLoader == null) { - cacheClassLoader = cacheManagerClassLoader; + private CacheConfiguration adjustConfigurationWithCacheManagerDefaults(String alias, CacheConfiguration config) { + if (config.getClassLoader() == null && cacheManagerClassLoader != null) { + config = config.derive().withClassLoader(cacheManagerClassLoader).build(); } - if (cacheClassLoader != config.getClassLoader() ) { - config = new BaseCacheConfiguration<>(config.getKeyType(), config.getValueType(), - config.getEvictionAdvisor(), cacheClassLoader, config.getExpiry(), - config.getResourcePools(), config.getServiceConfigurations().toArray( - new ServiceConfiguration[config.getServiceConfigurations().size()])); + + + CacheLoaderWriterConfiguration loaderWriterConfiguration = findSingletonAmongst(CacheLoaderWriterConfiguration.class, config.getServiceConfigurations()); + if (loaderWriterConfiguration == null) { + CacheLoaderWriterProvider loaderWriterProvider = serviceLocator.getService(CacheLoaderWriterProvider.class); + CacheLoaderWriterConfiguration preConfiguredCacheLoaderWriterConfig = loaderWriterProvider.getPreConfiguredCacheLoaderWriterConfig(alias); + if (preConfiguredCacheLoaderWriterConfig != null) { + config = config.derive().withService(preConfiguredCacheLoaderWriterConfig).build(); + } + if (loaderWriterProvider.isLoaderJsrProvided(alias)) { + config = config.derive().withService(new CacheLoaderWriterConfiguration() {}).build(); + } } + return config; } @@ -756,10 +773,9 @@ private static final class CacheHolder { private volatile InternalCache cache; private volatile boolean isValueSet = false; - CacheHolder(Class keyType, Class valueType, InternalCache cache) { + CacheHolder(Class keyType, Class valueType) { this.keyType = keyType; this.valueType = valueType; - this.cache = cache; } InternalCache retrieve(Class refKeyType, Class refValueType) { diff --git a/ehcache-core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java b/ehcache-core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java new file mode 100644 index 0000000000..e4e326ad47 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java @@ -0,0 +1,191 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.CacheRuntimeConfiguration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.FluentCacheConfigurationBuilder; +import org.ehcache.core.events.EventListenerWrapper; +import org.ehcache.event.CacheEventListener; +import org.ehcache.event.EventFiring; +import org.ehcache.event.EventOrdering; +import org.ehcache.event.EventType; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.service.ServiceConfiguration; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.EnumSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; + +import static java.util.Collections.unmodifiableCollection; + +class EhcacheRuntimeConfiguration implements CacheRuntimeConfiguration, InternalRuntimeConfiguration, HumanReadable { + + private final CacheConfiguration config; + + private final Collection> addedServiceConfigurations = new ArrayList<>(); + private volatile ResourcePools resourcePools; + + private final List cacheConfigurationListenerList + = new CopyOnWriteArrayList<>(); + + EhcacheRuntimeConfiguration(CacheConfiguration config) { + this.config = config; + this.resourcePools = config.getResourcePools(); + } + + @Override + public synchronized void updateResourcePools(ResourcePools pools) { + + if(pools == null) { + throw new NullPointerException("Pools to be updated cannot be null"); + } + + ResourcePools updatedResourcePools = config.getResourcePools().validateAndMerge(pools); + fireCacheConfigurationChange(CacheConfigurationProperty.UPDATE_SIZE, config.getResourcePools(), updatedResourcePools); + this.resourcePools = updatedResourcePools; + } + + @Override + public Collection> getServiceConfigurations() { + Collection> configurations = new ArrayList<>(config.getServiceConfigurations()); + configurations.addAll(addedServiceConfigurations); + return unmodifiableCollection(configurations); + } + + @Override + public Class getKeyType() { + return config.getKeyType(); + } + + @Override + public Class getValueType() { + return config.getValueType(); + } + + @Override + public EvictionAdvisor getEvictionAdvisor() { + return config.getEvictionAdvisor(); + } + + @Override + public ClassLoader getClassLoader() { + return config.getClassLoader(); + } + + @SuppressWarnings("deprecation") + @Override + public org.ehcache.expiry.Expiry getExpiry() { + return config.getExpiry(); + } + + @Override + public ExpiryPolicy getExpiryPolicy() { + return config.getExpiryPolicy(); + } + + @Override + public ResourcePools getResourcePools() { + return this.resourcePools; + } + + @Override + public FluentCacheConfigurationBuilder derive() { + FluentCacheConfigurationBuilder builder = config.derive(); + for (ServiceConfiguration service : addedServiceConfigurations) { + builder = builder.withService(service); + } + return builder.updateResourcePools(existing -> resourcePools); + } + + @Override + public boolean addCacheConfigurationListener(List listeners) { + return this.cacheConfigurationListenerList.addAll(listeners); + } + + @Override + public boolean removeCacheConfigurationListener(CacheConfigurationChangeListener listener) { + return this.cacheConfigurationListenerList.remove(listener); + } + + @Override + public synchronized void deregisterCacheEventListener(CacheEventListener listener) { + fireCacheConfigurationChange(CacheConfigurationProperty.REMOVE_LISTENER, listener, listener); + } + + @Override + public synchronized void registerCacheEventListener(CacheEventListener listener, EventOrdering ordering, + EventFiring firing, Set forEventTypes) { + EventListenerWrapper listenerWrapper = new EventListenerWrapper<>(listener, firing, ordering, EnumSet.copyOf(forEventTypes)); + fireCacheConfigurationChange(CacheConfigurationProperty.ADD_LISTENER, listenerWrapper, listenerWrapper); + } + + private Collection copy(Collection collection) { + if (collection == null) { + return null; + } + + return unmodifiableCollection(new ArrayList<>(collection)); + } + + @SuppressWarnings("unchecked") + private void fireCacheConfigurationChange(CacheConfigurationProperty prop, final T oldValue, final T newValue) { + if ((oldValue != null && !oldValue.equals(newValue)) || newValue != null) { + for (CacheConfigurationChangeListener cacheConfigurationListener : cacheConfigurationListenerList) { + cacheConfigurationListener.cacheConfigurationChange(new CacheConfigurationChangeEvent(prop, oldValue, newValue)); + } + } + } + + @Override + public String readableString() { + StringBuilder serviceConfigurationsToStringBuilder = new StringBuilder(); + for (ServiceConfiguration serviceConfiguration : getServiceConfigurations()) { + serviceConfigurationsToStringBuilder + .append("\n ") + .append("- "); + if(serviceConfiguration instanceof HumanReadable) { + serviceConfigurationsToStringBuilder + .append(((HumanReadable)serviceConfiguration).readableString()) + .append("\n"); + } else { + serviceConfigurationsToStringBuilder + .append(serviceConfiguration.getClass().getName()) + .append("\n"); + } + } + + if(serviceConfigurationsToStringBuilder.length() > 0) { + serviceConfigurationsToStringBuilder.deleteCharAt(serviceConfigurationsToStringBuilder.length() -1); + } else { + serviceConfigurationsToStringBuilder.append(" None"); + } + + return + "keyType: " + getKeyType().getName() + "\n" + + "valueType: " + getValueType().getName() + "\n" + + "serviceConfigurations:" + serviceConfigurationsToStringBuilder.toString().replace("\n", "\n ") + "\n" + + "evictionAdvisor: " + ((getEvictionAdvisor() != null) ? getEvictionAdvisor().getClass().getName() : "None") + "\n" + + "expiry: " + getExpiryPolicy() + "\n" + + "resourcePools: " + "\n " + ((resourcePools instanceof HumanReadable) ? ((HumanReadable)resourcePools).readableString() : "").replace("\n", "\n "); + } +} diff --git a/core/src/main/java/org/ehcache/core/HumanReadable.java b/ehcache-core/src/main/java/org/ehcache/core/HumanReadable.java similarity index 100% rename from core/src/main/java/org/ehcache/core/HumanReadable.java rename to ehcache-core/src/main/java/org/ehcache/core/HumanReadable.java diff --git a/core/src/main/java/org/ehcache/core/InternalCache.java b/ehcache-core/src/main/java/org/ehcache/core/InternalCache.java similarity index 97% rename from core/src/main/java/org/ehcache/core/InternalCache.java rename to ehcache-core/src/main/java/org/ehcache/core/InternalCache.java index 462999b604..f05edd63e8 100644 --- a/core/src/main/java/org/ehcache/core/InternalCache.java +++ b/ehcache-core/src/main/java/org/ehcache/core/InternalCache.java @@ -44,7 +44,7 @@ public interface InternalCache extends UserManagedCache { * * @return Jsr107Cache */ - Jsr107Cache getJsr107Cache(); + Jsr107Cache createJsr107Cache(); /** * CacheLoaderWriter diff --git a/core/src/main/java/org/ehcache/core/InternalRuntimeConfiguration.java b/ehcache-core/src/main/java/org/ehcache/core/InternalRuntimeConfiguration.java similarity index 100% rename from core/src/main/java/org/ehcache/core/InternalRuntimeConfiguration.java rename to ehcache-core/src/main/java/org/ehcache/core/InternalRuntimeConfiguration.java diff --git a/core/src/main/java/org/ehcache/core/InternalStatus.java b/ehcache-core/src/main/java/org/ehcache/core/InternalStatus.java similarity index 100% rename from core/src/main/java/org/ehcache/core/InternalStatus.java rename to ehcache-core/src/main/java/org/ehcache/core/InternalStatus.java diff --git a/core/src/main/java/org/ehcache/core/Jsr107Cache.java b/ehcache-core/src/main/java/org/ehcache/core/Jsr107Cache.java similarity index 100% rename from core/src/main/java/org/ehcache/core/Jsr107Cache.java rename to ehcache-core/src/main/java/org/ehcache/core/Jsr107Cache.java diff --git a/core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java b/ehcache-core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java similarity index 92% rename from core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java rename to ehcache-core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java index c2108c46ae..8b54fa9768 100644 --- a/core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java +++ b/ehcache-core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java @@ -31,6 +31,7 @@ import org.ehcache.spi.loaderwriter.CacheWritingException; import org.ehcache.core.spi.LifeCycled; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.ResilienceStrategy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -63,14 +64,10 @@ public class PersistentUserManagedEhcache implements PersistentUserManaged * @param eventDispatcher the event dispatcher * @param id an id for this cache */ - public PersistentUserManagedEhcache(CacheConfiguration configuration, Store store, DiskResourceService diskPersistenceService, CacheLoaderWriter cacheLoaderWriter, CacheEventDispatcher eventDispatcher, String id) { + public PersistentUserManagedEhcache(CacheConfiguration configuration, Store store, ResilienceStrategy resilienceStrategy, DiskResourceService diskPersistenceService, CacheLoaderWriter cacheLoaderWriter, CacheEventDispatcher eventDispatcher, String id) { this.logger = LoggerFactory.getLogger(PersistentUserManagedEhcache.class.getName() + "-" + id); this.statusTransitioner = new StatusTransitioner(logger); - if (cacheLoaderWriter == null) { - this.cache = new Ehcache<>(new EhcacheRuntimeConfiguration<>(configuration), store, eventDispatcher, logger, statusTransitioner); - } else { - this.cache = new EhcacheWithLoaderWriter<>(new EhcacheRuntimeConfiguration<>(configuration), store, cacheLoaderWriter, eventDispatcher, true, logger, statusTransitioner); - } + this.cache = new Ehcache<>(new EhcacheRuntimeConfiguration<>(configuration), store, resilienceStrategy, eventDispatcher, logger, statusTransitioner, cacheLoaderWriter); this.diskPersistenceService = diskPersistenceService; this.id = id; } diff --git a/core/src/main/java/org/ehcache/core/SpecIterator.java b/ehcache-core/src/main/java/org/ehcache/core/SpecIterator.java similarity index 95% rename from core/src/main/java/org/ehcache/core/SpecIterator.java rename to ehcache-core/src/main/java/org/ehcache/core/SpecIterator.java index b772a6fa88..2bf64c7444 100644 --- a/core/src/main/java/org/ehcache/core/SpecIterator.java +++ b/ehcache-core/src/main/java/org/ehcache/core/SpecIterator.java @@ -17,7 +17,7 @@ import org.ehcache.Cache; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import java.util.Iterator; @@ -59,7 +59,7 @@ public Cache.Entry next() { current = next; - final V nextValue = nextValueHolder.value(); + final V nextValue = nextValueHolder.get(); return new Cache.Entry() { @Override public K getKey() { diff --git a/core/src/main/java/org/ehcache/core/StatusTransitioner.java b/ehcache-core/src/main/java/org/ehcache/core/StatusTransitioner.java similarity index 98% rename from core/src/main/java/org/ehcache/core/StatusTransitioner.java rename to ehcache-core/src/main/java/org/ehcache/core/StatusTransitioner.java index 6994d4ab9e..1727920038 100644 --- a/core/src/main/java/org/ehcache/core/StatusTransitioner.java +++ b/ehcache-core/src/main/java/org/ehcache/core/StatusTransitioner.java @@ -220,7 +220,7 @@ public void succeeded() { public StateTransitionException failed(Throwable t) { if (st.done()) { if (t != null) { - throw (AssertionError) new AssertionError("Throwable cannot be thrown if Transition is done.").initCause(t); + throw new AssertionError("Throwable cannot be thrown if Transition is done.", t); } return null; } diff --git a/core/src/main/java/org/ehcache/core/collections/ConcurrentWeakIdentityHashMap.java b/ehcache-core/src/main/java/org/ehcache/core/collections/ConcurrentWeakIdentityHashMap.java similarity index 100% rename from core/src/main/java/org/ehcache/core/collections/ConcurrentWeakIdentityHashMap.java rename to ehcache-core/src/main/java/org/ehcache/core/collections/ConcurrentWeakIdentityHashMap.java diff --git a/ehcache-core/src/main/java/org/ehcache/core/config/CoreConfigurationBuilder.java b/ehcache-core/src/main/java/org/ehcache/core/config/CoreConfigurationBuilder.java new file mode 100644 index 0000000000..10fc4fa125 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/config/CoreConfigurationBuilder.java @@ -0,0 +1,210 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.config; + +import org.ehcache.Cache; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.config.FluentCacheConfigurationBuilder; +import org.ehcache.config.FluentConfigurationBuilder; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; +import java.util.function.UnaryOperator; +import java.util.stream.Collectors; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.unmodifiableCollection; +import static java.util.Collections.unmodifiableMap; +import static java.util.Objects.requireNonNull; +import static java.util.stream.Collectors.toList; +import static java.util.stream.Collectors.toMap; + +public class CoreConfigurationBuilder> implements FluentConfigurationBuilder { + + private final Map> caches; + private final Collection> serviceConfigurations; + private final ClassLoader classLoader; + + /** + * Create a configuration builder seeded from the given configuration. + *

+ * Calling {@link #build()} on the returned builder will produce a functionally equivalent configuration to + * {@code seed}. + * + * @param seed configuration to duplicate + * @return a new configuration builder + */ + protected static CoreConfigurationBuilder newConfigurationBuilder(Configuration seed) { + return new CoreConfigurationBuilder<>(new CoreConfigurationBuilder<>(new CoreConfigurationBuilder<>(new CoreConfigurationBuilder<>(), + seed.getCacheConfigurations()), seed.getServiceCreationConfigurations()), seed.getClassLoader()); + } + + protected CoreConfigurationBuilder() { + this.caches = emptyMap(); + this.serviceConfigurations = emptyList(); + this.classLoader = null; + } + + protected CoreConfigurationBuilder(CoreConfigurationBuilder builder, Map> caches) { + this.caches = unmodifiableMap(caches); + this.serviceConfigurations = builder.serviceConfigurations; + this.classLoader = builder.classLoader; + } + + protected CoreConfigurationBuilder(CoreConfigurationBuilder builder, Collection> serviceConfigurations) { + this.caches = builder.caches; + this.serviceConfigurations = unmodifiableCollection(serviceConfigurations); + this.classLoader = builder.classLoader; + } + + protected CoreConfigurationBuilder(CoreConfigurationBuilder builder, ClassLoader classLoader) { + this.caches = builder.caches; + this.serviceConfigurations = builder.serviceConfigurations; + this.classLoader = classLoader; + } + + @Override + public Configuration build() { + return new DefaultConfiguration(caches, classLoader, serviceConfigurations.toArray(new ServiceCreationConfiguration[serviceConfigurations.size()])); + } + + @Override + public CacheConfiguration getCache(String alias) { + return caches.get(alias); + } + + @Override + public B withCache(String alias, CacheConfiguration config) { + Map> newCaches = new HashMap<>(caches); + newCaches.put(alias, config); + return newBuilderWith(newCaches); + } + + @Override + public B withoutCache(String alias) { + Map> newCaches = new HashMap<>(caches); + newCaches.remove(alias); + return newBuilderWith(newCaches); + } + + @Override + public B updateCache(String alias, UnaryOperator> update) { + CacheConfiguration existing = getCache(alias); + if (existing == null) { + throw new IllegalArgumentException("Cache does not exist"); + } else { + return withCache(alias, update.apply(existing.derive()).build()); + } + } + + @Override + public B updateCaches(UnaryOperator> update) { + return newBuilderWith(caches.entrySet().stream().collect( + toMap(Map.Entry::getKey, e -> update.apply(e.getValue().derive()).build()) + )); + } + + @Override + public > Collection getServices(Class configurationType) { + return serviceConfigurations.stream().filter(service -> configurationType.isAssignableFrom(service.getClass())).map(configurationType::cast).collect(toList()); + } + + @Override + public B withService(ServiceCreationConfiguration config) { + List> newServiceConfigurations = new ArrayList<>(serviceConfigurations); + newServiceConfigurations.removeIf(other -> !other.compatibleWith(config) || !config.compatibleWith(other)); + newServiceConfigurations.add(config); + return newBuilderWith(newServiceConfigurations); + } + + @Override + public > B withoutServices(Class clazz, Predicate predicate) { + List> newServiceConfigurations = new ArrayList<>(serviceConfigurations); + newServiceConfigurations.removeIf(c -> clazz.isInstance(c) && predicate.test(clazz.cast(c))); + return newBuilderWith(newServiceConfigurations); + } + + @Override + public > B updateServices(Class clazz, UnaryOperator update) { + @SuppressWarnings("unchecked") + Collection> existing = getServices(clazz); + + if (existing.isEmpty()) { + throw new IllegalStateException("Cannot updates service configurations. No services exist"); + } else { + B otherBuilder = withoutServices(clazz); + for (ServiceCreationConfiguration configuration : existing) { + ServiceCreationConfiguration replacement = configuration.build(update.apply(configuration.derive())); + if (replacement == null) { + throw new NullPointerException(configuration.getClass().getSimpleName() + ".build(...) returned a null configuration instance"); + } else { + otherBuilder = otherBuilder.withService(replacement); + } + } + return otherBuilder; + } + } + + @Override + public ClassLoader getClassLoader() { + return classLoader; + } + + @Override + public B withClassLoader(ClassLoader classLoader) { + return newBuilderWith(requireNonNull(classLoader)); + } + + @Override + public B withDefaultClassLoader() { + return newBuilderWith((ClassLoader) null); + } + + @SuppressWarnings("unchecked") + protected B newBuilderWith(Map> caches) { + if (getClass().equals(CoreConfigurationBuilder.class)) { + return (B) new CoreConfigurationBuilder<>(this, caches); + } else { + throw new AssertionError(); + } + } + + @SuppressWarnings("unchecked") + protected B newBuilderWith(Collection> serviceConfigurations) { + if (getClass().equals(CoreConfigurationBuilder.class)) { + return (B) new CoreConfigurationBuilder<>(this, serviceConfigurations); + } else { + throw new AssertionError(); + } + } + + @SuppressWarnings("unchecked") + protected B newBuilderWith(ClassLoader classLoader) { + if (getClass().equals(CoreConfigurationBuilder.class)) { + return (B) new CoreConfigurationBuilder<>(this, classLoader); + } else { + throw new AssertionError(); + } + } + +} diff --git a/core/src/main/java/org/ehcache/core/config/DefaultConfiguration.java b/ehcache-core/src/main/java/org/ehcache/core/config/DefaultConfiguration.java similarity index 91% rename from core/src/main/java/org/ehcache/core/config/DefaultConfiguration.java rename to ehcache-core/src/main/java/org/ehcache/core/config/DefaultConfiguration.java index 8c733026be..835f08b477 100644 --- a/core/src/main/java/org/ehcache/core/config/DefaultConfiguration.java +++ b/ehcache-core/src/main/java/org/ehcache/core/config/DefaultConfiguration.java @@ -26,12 +26,14 @@ import org.ehcache.config.CacheConfiguration; import org.ehcache.config.CacheRuntimeConfiguration; import org.ehcache.config.Configuration; +import org.ehcache.config.FluentConfigurationBuilder; import org.ehcache.core.HumanReadable; -import org.ehcache.core.internal.util.ClassLoading; +import org.ehcache.core.util.ClassLoading; import org.ehcache.spi.service.ServiceCreationConfiguration; import static java.util.Collections.unmodifiableCollection; import static java.util.Collections.unmodifiableMap; +import static org.ehcache.core.config.CoreConfigurationBuilder.newConfigurationBuilder; /** * Base implementation of {@link Configuration}. @@ -39,7 +41,7 @@ public final class DefaultConfiguration implements Configuration, HumanReadable { private final ConcurrentMap> caches; - private final Collection> services; + private final Collection> services; private final ClassLoader classLoader; /** @@ -66,7 +68,7 @@ public DefaultConfiguration(Configuration cfg) { * * @see #addCacheConfiguration(String, CacheConfiguration) */ - public DefaultConfiguration(ClassLoader classLoader, ServiceCreationConfiguration... services) { + public DefaultConfiguration(ClassLoader classLoader, ServiceCreationConfiguration... services) { this(emptyCacheMap(), classLoader, services); } @@ -78,7 +80,7 @@ public DefaultConfiguration(ClassLoader classLoader, ServiceCreationConfiguratio * @param classLoader the class loader to use for user types * @param services an array of service configurations */ - public DefaultConfiguration(Map> caches, ClassLoader classLoader, ServiceCreationConfiguration... services) { + public DefaultConfiguration(Map> caches, ClassLoader classLoader, ServiceCreationConfiguration... services) { this.services = unmodifiableCollection(Arrays.asList(services)); this.caches = new ConcurrentHashMap<>(caches); this.classLoader = classLoader == null ? ClassLoading.getDefaultClassLoader() : classLoader; @@ -96,7 +98,7 @@ public DefaultConfiguration(Map> caches, ClassL * {@inheritDoc} */ @Override - public Collection> getServiceCreationConfigurations() { + public Collection> getServiceCreationConfigurations() { return services; } @@ -108,6 +110,11 @@ public ClassLoader getClassLoader() { return classLoader; } + @Override + public FluentConfigurationBuilder derive() { + return newConfigurationBuilder(this); + } + private static Map> emptyCacheMap() { return Collections.emptyMap(); } @@ -168,7 +175,7 @@ public String readableString() { } StringBuilder serviceCreationConfigurationsToStringBuilder = new StringBuilder(); - for (ServiceCreationConfiguration serviceCreationConfiguration : services) { + for (ServiceCreationConfiguration serviceCreationConfiguration : services) { serviceCreationConfigurationsToStringBuilder.append("- "); if(serviceCreationConfiguration instanceof HumanReadable) { serviceCreationConfigurationsToStringBuilder diff --git a/ehcache-core/src/main/java/org/ehcache/core/config/ExpiryUtils.java b/ehcache-core/src/main/java/org/ehcache/core/config/ExpiryUtils.java new file mode 100644 index 0000000000..407b76ee57 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/config/ExpiryUtils.java @@ -0,0 +1,215 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.config; + +import org.ehcache.expiry.ExpiryPolicy; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.time.temporal.TemporalUnit; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +/** + * ExpiryUtils + */ +@SuppressWarnings("deprecation") +public class ExpiryUtils { + + private static final Logger LOG = LoggerFactory.getLogger(ExpiryUtils.class); + + public static boolean isExpiryDurationInfinite(Duration duration) { + return duration.compareTo(ExpiryPolicy.INFINITE) >= 0; + } + + public static org.ehcache.expiry.Expiry convertToExpiry(ExpiryPolicy expiryPolicy) { + + if (expiryPolicy == ExpiryPolicy.NO_EXPIRY) { + @SuppressWarnings("unchecked") + org.ehcache.expiry.Expiry expiry = (org.ehcache.expiry.Expiry) org.ehcache.expiry.Expirations.noExpiration(); + return expiry; + } + + return new org.ehcache.expiry.Expiry() { + + @Override + public org.ehcache.expiry.Duration getExpiryForCreation(K key, V value) { + return convertDuration(expiryPolicy.getExpiryForCreation(key, value)); + } + + @Override + public org.ehcache.expiry.Duration getExpiryForAccess(K key, org.ehcache.ValueSupplier value) { + return convertDuration(expiryPolicy.getExpiryForAccess(key, value::value)); + } + + @Override + public org.ehcache.expiry.Duration getExpiryForUpdate(K key, org.ehcache.ValueSupplier oldValue, V newValue) { + return convertDuration(expiryPolicy.getExpiryForUpdate(key, oldValue::value, newValue)); + } + + @Override + public String toString() { + return "Expiry wrapper of {" + expiryPolicy + " }"; + } + }; + } + + private static org.ehcache.expiry.Duration convertDuration(Duration duration) { + if (duration == null) { + return null; + } + if (duration.isNegative()) { + throw new IllegalArgumentException("Ehcache duration cannot be negative and so does not accept negative java.time.Duration: " + duration); + } + if (duration.isZero()) { + return org.ehcache.expiry.Duration.ZERO; + } else { + long nanos = duration.getNano(); + if (nanos == 0) { + return org.ehcache.expiry.Duration.of(duration.getSeconds(), TimeUnit.SECONDS); + } + long seconds = duration.getSeconds(); + long secondsInNanos = TimeUnit.SECONDS.toNanos(seconds); + if (secondsInNanos != Long.MAX_VALUE && Long.MAX_VALUE - secondsInNanos > nanos) { + return org.ehcache.expiry.Duration.of(duration.toNanos(), TimeUnit.NANOSECONDS); + } else { + long secondsInMicros = TimeUnit.SECONDS.toMicros(seconds); + if (secondsInMicros != Long.MAX_VALUE && Long.MAX_VALUE - secondsInMicros > nanos / 1_000) { + return org.ehcache.expiry.Duration.of(secondsInMicros + nanos / 1_000, TimeUnit.MICROSECONDS); + } else { + long secondsInMillis = TimeUnit.SECONDS.toMillis(seconds); + if (secondsInMillis != Long.MAX_VALUE && Long.MAX_VALUE - secondsInMillis > nanos / 1_000_000) { + return org.ehcache.expiry.Duration.of(duration.toMillis(), TimeUnit.MILLISECONDS); + } + } + } + return org.ehcache.expiry.Duration.of(seconds, TimeUnit.SECONDS); + } + } + + public static ExpiryPolicy convertToExpiryPolicy(org.ehcache.expiry.Expiry expiry) { + if (expiry == org.ehcache.expiry.Expirations.noExpiration()) { + @SuppressWarnings("unchecked") + ExpiryPolicy expiryPolicy = (ExpiryPolicy) ExpiryPolicy.NO_EXPIRY; + return expiryPolicy; + } + + return new ExpiryPolicy() { + @Override + public Duration getExpiryForCreation(K key, V value) { + org.ehcache.expiry.Duration duration = expiry.getExpiryForCreation(key, value); + return convertDuration(duration); + } + + @Override + public Duration getExpiryForAccess(K key, Supplier value) { + org.ehcache.expiry.Duration duration = expiry.getExpiryForAccess(key, value::get); + return convertDuration(duration); + } + + @Override + public Duration getExpiryForUpdate(K key, Supplier oldValue, V newValue) { + org.ehcache.expiry.Duration duration = expiry.getExpiryForUpdate(key, oldValue::get, newValue); + return convertDuration(duration); + } + + @Override + public String toString() { + return "Expiry wrapper of {" + expiry + " }"; + } + + private Duration convertDuration(org.ehcache.expiry.Duration duration) { + if (duration == null) { + return null; + } + if (duration.isInfinite()) { + return ExpiryPolicy.INFINITE; + } + try { + return Duration.of(duration.getLength(), jucTimeUnitToTemporalUnit(duration.getTimeUnit())); + } catch (ArithmeticException e) { + return ExpiryPolicy.INFINITE; + } + } + }; + } + + public static TemporalUnit jucTimeUnitToTemporalUnit(TimeUnit timeUnit) { + switch (timeUnit) { + case NANOSECONDS: + return ChronoUnit.NANOS; + case MICROSECONDS: + return ChronoUnit.MICROS; + case MILLISECONDS: + return ChronoUnit.MILLIS; + case SECONDS: + return ChronoUnit.SECONDS; + case MINUTES: + return ChronoUnit.MINUTES; + case HOURS: + return ChronoUnit.HOURS; + case DAYS: + return ChronoUnit.DAYS; + default: + throw new AssertionError("Unkown TimeUnit: " + timeUnit); + } + } + + public static long getExpirationMillis(long now, Duration duration) { + try { + return duration.plusMillis(now).toMillis(); + } catch (ArithmeticException e) { + return Long.MAX_VALUE; + } + + } + + /** + * Returns the expiry for creation duration returned by the provided {@link ExpiryPolicy} but checks for immediate + * expiry, null expiry and exceptions. In all those cases, {@code null} will be returned. + * + * @param key key to pass to {@link ExpiryPolicy#getExpiryForCreation(Object, Object)} + * @param value value to pass to to pass to {@link ExpiryPolicy#getExpiryForCreation(Object, Object)} + * @param expiry expiry queried + * @param type of key + * @param type of value + * @return the duration returned by to pass to {@link ExpiryPolicy#getExpiryForCreation(Object, Object)}, {@code null} + * if the call throws an exception, if the returned duration is {@code null} or if it is lower or equal to 0 + */ + public static Duration getExpiryForCreation(K key, V value, ExpiryPolicy expiry) { + Duration duration; + try { + duration = expiry.getExpiryForCreation(key, value); + } catch (RuntimeException e) { + LOG.error("Expiry computation caused an exception - Expiry duration will be 0", e); + return Duration.ZERO; + } + + if (duration == null) { + LOG.error("Expiry for creation can't be null - Expiry duration will be 0"); + return Duration.ZERO; + } + + if (Duration.ZERO.compareTo(duration) >= 0) { + return Duration.ZERO; + } + + return duration; + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/config/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/config/package-info.java new file mode 100644 index 0000000000..205a0afd5d --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/config/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package holding core configuration implementations and utilities. + */ +package org.ehcache.core.config; diff --git a/core/src/main/java/org/ehcache/core/config/store/StoreEventSourceConfiguration.java b/ehcache-core/src/main/java/org/ehcache/core/config/store/StoreEventSourceConfiguration.java similarity index 83% rename from core/src/main/java/org/ehcache/core/config/store/StoreEventSourceConfiguration.java rename to ehcache-core/src/main/java/org/ehcache/core/config/store/StoreEventSourceConfiguration.java index 6625eaab5d..1e814f673a 100644 --- a/core/src/main/java/org/ehcache/core/config/store/StoreEventSourceConfiguration.java +++ b/ehcache-core/src/main/java/org/ehcache/core/config/store/StoreEventSourceConfiguration.java @@ -16,15 +16,14 @@ package org.ehcache.core.config.store; -import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.service.ServiceConfiguration; /** * {@link ServiceConfiguration} used by the {@link org.ehcache.core.EhcacheManager} to populate the dispatcher - * concurrency in the {@link StoreConfigurationImpl}. + * concurrency in the {@link org.ehcache.core.store.StoreConfigurationImpl}. */ -public interface StoreEventSourceConfiguration extends ServiceConfiguration { +public interface StoreEventSourceConfiguration extends ServiceConfiguration { /** * Default dispatcher concurrency diff --git a/ehcache-core/src/main/java/org/ehcache/core/config/store/StoreStatisticsConfiguration.java b/ehcache-core/src/main/java/org/ehcache/core/config/store/StoreStatisticsConfiguration.java new file mode 100644 index 0000000000..834ffa6cc6 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/config/store/StoreStatisticsConfiguration.java @@ -0,0 +1,56 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.config.store; + +import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.service.ServiceConfiguration; + +/** + * Configure if statistics are unable on stores. By default they are enabled in a tiered + * configuration to accurately track the usage of each tier. If a store is + * standing alone, then they will be disabled by default since they are a mirror + * of the cache statistics. + *

+ * Note that statistics about the store size, mapping and so on are not affected + * by this configuration. Only operation statistics (e.g. get/put counts) are disabled. + */ +public class StoreStatisticsConfiguration implements ServiceConfiguration { + + private final boolean operationStatisticsEnabled; + + public StoreStatisticsConfiguration(boolean operationStatisticsEnabled) { + this.operationStatisticsEnabled = operationStatisticsEnabled; + } + + public boolean isOperationStatisticsEnabled() { + return operationStatisticsEnabled; + } + + @Override + public Class getServiceType() { + return Store.Provider.class; + } + + @Override + public Boolean derive() { + return isOperationStatisticsEnabled(); + } + + @Override + public StoreStatisticsConfiguration build(Boolean enabled) { + return new StoreStatisticsConfiguration(enabled); + } +} diff --git a/core/src/main/java/org/ehcache/core/config/store/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/config/store/package-info.java similarity index 100% rename from core/src/main/java/org/ehcache/core/config/store/package-info.java rename to ehcache-core/src/main/java/org/ehcache/core/config/store/package-info.java diff --git a/core/src/main/java/org/ehcache/core/events/CacheEventDispatcher.java b/ehcache-core/src/main/java/org/ehcache/core/events/CacheEventDispatcher.java similarity index 100% rename from core/src/main/java/org/ehcache/core/events/CacheEventDispatcher.java rename to ehcache-core/src/main/java/org/ehcache/core/events/CacheEventDispatcher.java diff --git a/core/src/main/java/org/ehcache/core/events/CacheEventDispatcherFactory.java b/ehcache-core/src/main/java/org/ehcache/core/events/CacheEventDispatcherFactory.java similarity index 96% rename from core/src/main/java/org/ehcache/core/events/CacheEventDispatcherFactory.java rename to ehcache-core/src/main/java/org/ehcache/core/events/CacheEventDispatcherFactory.java index 8bf715ed09..a6781f5216 100644 --- a/core/src/main/java/org/ehcache/core/events/CacheEventDispatcherFactory.java +++ b/ehcache-core/src/main/java/org/ehcache/core/events/CacheEventDispatcherFactory.java @@ -38,7 +38,7 @@ public interface CacheEventDispatcherFactory extends Service { * * @return the {@link CacheEventDispatcher} */ - CacheEventDispatcher createCacheEventDispatcher(Store store, ServiceConfiguration... serviceConfigs); + CacheEventDispatcher createCacheEventDispatcher(Store store, ServiceConfiguration... serviceConfigs); /** * Releases an instance of {@link CacheEventDispatcher}, causing it to shutdown and release all diff --git a/core/src/main/java/org/ehcache/core/events/CacheEventListenerConfiguration.java b/ehcache-core/src/main/java/org/ehcache/core/events/CacheEventListenerConfiguration.java similarity index 92% rename from core/src/main/java/org/ehcache/core/events/CacheEventListenerConfiguration.java rename to ehcache-core/src/main/java/org/ehcache/core/events/CacheEventListenerConfiguration.java index 445c84ebba..18bc84296d 100644 --- a/core/src/main/java/org/ehcache/core/events/CacheEventListenerConfiguration.java +++ b/ehcache-core/src/main/java/org/ehcache/core/events/CacheEventListenerConfiguration.java @@ -26,7 +26,7 @@ /** * Configuration contract for setting up {@link org.ehcache.event.CacheEvent} system in a cache. */ -public interface CacheEventListenerConfiguration extends ServiceConfiguration { +public interface CacheEventListenerConfiguration extends ServiceConfiguration { /** * Indicates which {@link EventFiring firing mode} to use diff --git a/core/src/main/java/org/ehcache/core/events/CacheEventListenerProvider.java b/ehcache-core/src/main/java/org/ehcache/core/events/CacheEventListenerProvider.java similarity index 96% rename from core/src/main/java/org/ehcache/core/events/CacheEventListenerProvider.java rename to ehcache-core/src/main/java/org/ehcache/core/events/CacheEventListenerProvider.java index dda77e64fa..4c90b67b63 100644 --- a/core/src/main/java/org/ehcache/core/events/CacheEventListenerProvider.java +++ b/ehcache-core/src/main/java/org/ehcache/core/events/CacheEventListenerProvider.java @@ -35,7 +35,7 @@ public interface CacheEventListenerProvider extends Service { * * @return the CacheEventListener to be registered with the given {@link org.ehcache.Cache} */ - CacheEventListener createEventListener(String alias, ServiceConfiguration serviceConfiguration); + CacheEventListener createEventListener(String alias, ServiceConfiguration serviceConfiguration); /** * Releases a given {@link org.ehcache.event.CacheEventListener} diff --git a/core/src/main/java/org/ehcache/core/events/CacheEvents.java b/ehcache-core/src/main/java/org/ehcache/core/events/CacheEvents.java similarity index 100% rename from core/src/main/java/org/ehcache/core/events/CacheEvents.java rename to ehcache-core/src/main/java/org/ehcache/core/events/CacheEvents.java diff --git a/core/src/main/java/org/ehcache/core/events/CacheManagerListener.java b/ehcache-core/src/main/java/org/ehcache/core/events/CacheManagerListener.java similarity index 100% rename from core/src/main/java/org/ehcache/core/events/CacheManagerListener.java rename to ehcache-core/src/main/java/org/ehcache/core/events/CacheManagerListener.java diff --git a/core/src/main/java/org/ehcache/core/internal/events/EventListenerWrapper.java b/ehcache-core/src/main/java/org/ehcache/core/events/EventListenerWrapper.java similarity index 94% rename from core/src/main/java/org/ehcache/core/internal/events/EventListenerWrapper.java rename to ehcache-core/src/main/java/org/ehcache/core/events/EventListenerWrapper.java index e8474f8e1f..2076b68d81 100644 --- a/core/src/main/java/org/ehcache/core/internal/events/EventListenerWrapper.java +++ b/ehcache-core/src/main/java/org/ehcache/core/events/EventListenerWrapper.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.core.internal.events; +package org.ehcache.core.events; import org.ehcache.event.CacheEvent; import org.ehcache.event.CacheEventListener; @@ -73,7 +73,7 @@ public boolean equals(Object other) { if (!(other instanceof EventListenerWrapper)) { return false; } - EventListenerWrapper l2 = (EventListenerWrapper)other; + EventListenerWrapper l2 = (EventListenerWrapper)other; return listener.equals(l2.listener); } @@ -82,7 +82,7 @@ public void onEvent(CacheEvent event) { listener.onEvent(event); } - public CacheEventListener getListener() { + public CacheEventListener getListener() { return listener; } diff --git a/core/src/main/java/org/ehcache/core/events/NullStoreEventDispatcher.java b/ehcache-core/src/main/java/org/ehcache/core/events/NullStoreEventDispatcher.java similarity index 86% rename from core/src/main/java/org/ehcache/core/events/NullStoreEventDispatcher.java rename to ehcache-core/src/main/java/org/ehcache/core/events/NullStoreEventDispatcher.java index 8723d1b365..ff26a97fd7 100644 --- a/core/src/main/java/org/ehcache/core/events/NullStoreEventDispatcher.java +++ b/ehcache-core/src/main/java/org/ehcache/core/events/NullStoreEventDispatcher.java @@ -16,10 +16,11 @@ package org.ehcache.core.events; -import org.ehcache.ValueSupplier; import org.ehcache.core.spi.store.events.StoreEventFilter; import org.ehcache.core.spi.store.events.StoreEventListener; +import java.util.function.Supplier; + /** * NullStoreEventDispatcher */ @@ -31,12 +32,12 @@ public static StoreEventDispatcher nullStoreEventDispatcher() { private final StoreEventSink storeEventSink = new StoreEventSink() { @Override - public void evicted(K key, ValueSupplier value) { + public void evicted(K key, Supplier value) { // Do nothing } @Override - public void expired(K key, ValueSupplier value) { + public void expired(K key, Supplier value) { // Do nothing } @@ -46,12 +47,12 @@ public void created(K key, V value) { } @Override - public void updated(K key, ValueSupplier previousValue, V newValue) { + public void updated(K key, Supplier previousValue, V newValue) { // Do nothing } @Override - public void removed(K key, ValueSupplier removed) { + public void removed(K key, Supplier removed) { // Do nothing } }; @@ -96,6 +97,11 @@ public void setEventOrdering(boolean ordering) { // Do nothing } + @Override + public void setSynchronous(boolean synchronous) { + // Do nothing + } + @Override public boolean isEventOrdering() { return false; diff --git a/core/src/main/java/org/ehcache/core/events/StateChangeListener.java b/ehcache-core/src/main/java/org/ehcache/core/events/StateChangeListener.java similarity index 100% rename from core/src/main/java/org/ehcache/core/events/StateChangeListener.java rename to ehcache-core/src/main/java/org/ehcache/core/events/StateChangeListener.java diff --git a/core/src/main/java/org/ehcache/core/events/StoreEventDispatcher.java b/ehcache-core/src/main/java/org/ehcache/core/events/StoreEventDispatcher.java similarity index 100% rename from core/src/main/java/org/ehcache/core/events/StoreEventDispatcher.java rename to ehcache-core/src/main/java/org/ehcache/core/events/StoreEventDispatcher.java diff --git a/core/src/main/java/org/ehcache/core/events/StoreEventSink.java b/ehcache-core/src/main/java/org/ehcache/core/events/StoreEventSink.java similarity index 86% rename from core/src/main/java/org/ehcache/core/events/StoreEventSink.java rename to ehcache-core/src/main/java/org/ehcache/core/events/StoreEventSink.java index 4dbe7426ad..f817573a1b 100644 --- a/core/src/main/java/org/ehcache/core/events/StoreEventSink.java +++ b/ehcache-core/src/main/java/org/ehcache/core/events/StoreEventSink.java @@ -16,7 +16,7 @@ package org.ehcache.core.events; -import org.ehcache.ValueSupplier; +import java.util.function.Supplier; /** * Interface on which {@link org.ehcache.core.spi.store.Store} operations are to record events. @@ -29,7 +29,7 @@ public interface StoreEventSink { * @param key removed key * @param value value supplier of removed value */ - void removed(K key, ValueSupplier value); + void removed(K key, Supplier value); /** * Indicates the mapping was updated. @@ -38,7 +38,7 @@ public interface StoreEventSink { * @param oldValue value supplier of old value * @param newValue the new value */ - void updated(K key, ValueSupplier oldValue, V newValue); + void updated(K key, Supplier oldValue, V newValue); /** * Indicates the mapping was expired. @@ -46,7 +46,7 @@ public interface StoreEventSink { * @param key the expired key * @param value value supplier of expired value */ - void expired(K key, ValueSupplier value); + void expired(K key, Supplier value); /** * Indicates a mapping was created. @@ -62,5 +62,5 @@ public interface StoreEventSink { * @param key the evicted key * @param value value supplier of evicted value */ - void evicted(K key, ValueSupplier value); + void evicted(K key, Supplier value); } diff --git a/core/src/main/java/org/ehcache/core/events/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/events/package-info.java similarity index 100% rename from core/src/main/java/org/ehcache/core/events/package-info.java rename to ehcache-core/src/main/java/org/ehcache/core/events/package-info.java diff --git a/ehcache-core/src/main/java/org/ehcache/core/exceptions/ExceptionFactory.java b/ehcache-core/src/main/java/org/ehcache/core/exceptions/ExceptionFactory.java new file mode 100644 index 0000000000..520ddc9ba7 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/exceptions/ExceptionFactory.java @@ -0,0 +1,76 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.exceptions; + +import org.ehcache.spi.loaderwriter.CacheLoadingException; +import org.ehcache.spi.loaderwriter.CacheWritingException; + +/** + * Factory to help creation of {@link CacheLoadingException} and {@link CacheWritingException}. + */ +public final class ExceptionFactory { + + private ExceptionFactory() { + throw new UnsupportedOperationException("Thou shalt not instantiate me!"); + } + + /** + * Creates a new {@code CacheWritingException} with the provided exception as cause. + * + * @param e the cause + * @return a cache writing exception + */ + public static CacheWritingException newCacheWritingException(Exception e) { + return new CacheWritingException(e); + } + + /** + * Creates a new {@code CacheLoadingException} with the provided exception as cause. + * + * @param e the cause + * @return a cache loading exception + */ + public static CacheLoadingException newCacheLoadingException(Exception e) { + return new CacheLoadingException(e); + } + + /** + * Creates a new {@code CacheWritingException} with the provided exception as cause and a suppressed one. + * + * @param e the cause + * @param suppressed the suppressed exception to add to the new exception + * @return a cache writing exception + */ + public static CacheWritingException newCacheWritingException(Exception e, Exception suppressed) { + CacheWritingException ne = new CacheWritingException(e); + ne.addSuppressed(suppressed); + return ne; + } + + /** + * Creates a new {@code CacheLoadingException} with the provided exception as cause and a suppressed one. + * + * @param e the cause + * @param suppressed the suppressed exception to add to the new exception + * @return a cache loading exception + */ + public static CacheLoadingException newCacheLoadingException(Exception e, Exception suppressed) { + CacheLoadingException ne = new CacheLoadingException(e); + ne.addSuppressed(e); + return ne; + } +} diff --git a/core/src/main/java/org/ehcache/core/exceptions/StorePassThroughException.java b/ehcache-core/src/main/java/org/ehcache/core/exceptions/StorePassThroughException.java similarity index 85% rename from core/src/main/java/org/ehcache/core/exceptions/StorePassThroughException.java rename to ehcache-core/src/main/java/org/ehcache/core/exceptions/StorePassThroughException.java index 219ca9787b..9bcc5beff6 100644 --- a/core/src/main/java/org/ehcache/core/exceptions/StorePassThroughException.java +++ b/ehcache-core/src/main/java/org/ehcache/core/exceptions/StorePassThroughException.java @@ -16,7 +16,7 @@ package org.ehcache.core.exceptions; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; /** * A generic wrapper runtime exception that will not be caught and @@ -48,6 +48,13 @@ public StorePassThroughException(final Throwable cause) { super(cause); } + @Override + public synchronized Throwable fillInStackTrace() { + // skip the stack trace filling because this exception is just a placeholder and won't ever be caught outside of + // a store + return this; + } + /** * Helper method for handling runtime exceptions. *

@@ -59,11 +66,11 @@ public StorePassThroughException(final Throwable cause) { * @return StoreAccessException to be thrown * @throws RuntimeException if {@code re} is a {@code StorePassThroughException} containing a {@code RuntimeException} */ - public static StoreAccessException handleRuntimeException(RuntimeException re) { + public static StoreAccessException handleException(Exception re) { if(re instanceof StorePassThroughException) { Throwable cause = re.getCause(); if(cause instanceof RuntimeException) { - throw (RuntimeException) cause; + throw (RuntimeException) cause; } else { return new StoreAccessException(cause); } diff --git a/core/src/main/java/org/ehcache/core/exceptions/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/exceptions/package-info.java similarity index 100% rename from core/src/main/java/org/ehcache/core/exceptions/package-info.java rename to ehcache-core/src/main/java/org/ehcache/core/exceptions/package-info.java diff --git a/ehcache-core/src/main/java/org/ehcache/core/internal/resilience/ThrowingResilienceStrategy.java b/ehcache-core/src/main/java/org/ehcache/core/internal/resilience/ThrowingResilienceStrategy.java new file mode 100644 index 0000000000..b8a6b0cae9 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/internal/resilience/ThrowingResilienceStrategy.java @@ -0,0 +1,89 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.internal.resilience; + +import org.ehcache.Cache; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.StoreAccessException; + +import java.util.Map; + +public class ThrowingResilienceStrategy implements ResilienceStrategy { + @Override + public V getFailure(K key, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public boolean containsKeyFailure(K key, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public void putFailure(K key, V value, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public void removeFailure(K key, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public void clearFailure(StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public Cache.Entry iteratorFailure(StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public V putIfAbsentFailure(K key, V value, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public boolean removeFailure(K key, V value, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public V replaceFailure(K key, V value, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public boolean replaceFailure(K key, V value, V newValue, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public Map getAllFailure(Iterable keys, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public void putAllFailure(Map entries, StoreAccessException e) { + throw new RuntimeException(e); + } + + @Override + public void removeAllFailure(Iterable keys, StoreAccessException e) { + throw new RuntimeException(e); + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DefaultCacheStatistics.java b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DefaultCacheStatistics.java new file mode 100644 index 0000000000..294d8c81f2 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DefaultCacheStatistics.java @@ -0,0 +1,243 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.internal.statistics; + +import org.ehcache.core.InternalCache; +import org.ehcache.core.statistics.BulkOps; +import org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.PutOutcome; +import org.ehcache.core.statistics.CacheStatistics; +import org.ehcache.core.statistics.ChainedOperationObserver; +import org.ehcache.core.statistics.OperationStatistic; +import org.ehcache.core.statistics.TierStatistics; +import org.ehcache.core.statistics.ValueStatistic; +import org.terracotta.statistics.ValueStatistics; + +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import static org.ehcache.core.statistics.CacheOperationOutcomes.ConditionalRemoveOutcome; +import static org.ehcache.core.statistics.CacheOperationOutcomes.PutIfAbsentOutcome; +import static org.ehcache.core.statistics.CacheOperationOutcomes.RemoveOutcome; +import static org.ehcache.core.statistics.CacheOperationOutcomes.ReplaceOutcome; +import static org.ehcache.core.internal.statistics.StatsUtils.findLowestTier; +import static org.ehcache.core.internal.statistics.StatsUtils.findOperationStatisticOnChildren; +import static org.ehcache.core.internal.statistics.StatsUtils.findTiers; +import static org.ehcache.core.statistics.SuppliedValueStatistic.counter; + +/** + * Contains usage statistics relative to a given cache. + */ +public class DefaultCacheStatistics implements CacheStatistics { + + private volatile CompensatingCounters compensatingCounters = CompensatingCounters.empty(); + + private final org.terracotta.statistics.OperationStatistic get; + private final org.terracotta.statistics.OperationStatistic put; + private final org.terracotta.statistics.OperationStatistic remove; + private final org.terracotta.statistics.OperationStatistic putIfAbsent; + private final org.terracotta.statistics.OperationStatistic replace; + private final org.terracotta.statistics.OperationStatistic conditionalRemove; + + private final InternalCache cache; + + private final Map tierStatistics; + private final TierStatistics lowestTier; + + private final Map> knownStatistics; + + public DefaultCacheStatistics(InternalCache cache) { + this.cache = cache; + + get = findOperationStatisticOnChildren(cache, GetOutcome.class, "get"); + put = findOperationStatisticOnChildren(cache, PutOutcome.class, "put"); + remove = findOperationStatisticOnChildren(cache, RemoveOutcome.class, "remove"); + putIfAbsent = findOperationStatisticOnChildren(cache, PutIfAbsentOutcome.class, "putIfAbsent"); + replace = findOperationStatisticOnChildren(cache, ReplaceOutcome.class, "replace"); + conditionalRemove = findOperationStatisticOnChildren(cache, ConditionalRemoveOutcome.class, "conditionalRemove"); + + String[] tierNames = findTiers(cache); + + String lowestTierName = findLowestTier(tierNames); + TierStatistics lowestTier = null; + + tierStatistics = new HashMap<>(tierNames.length); + for (String tierName : tierNames) { + DefaultTierStatistics tierStatistics = new DefaultTierStatistics(cache, tierName); + this.tierStatistics.put(tierName, tierStatistics); + if (lowestTierName.equals(tierName)) { + lowestTier = tierStatistics; + } + } + this.lowestTier = lowestTier; + + knownStatistics = createKnownStatistics(); + } + + @Override + public , S extends ChainedOperationObserver> void registerDerivedStatistic(Class outcomeClass, String statName, S derivedStatistic) { + OperationStatistic stat = new DelegatingOperationStatistic<>(findOperationStatisticOnChildren(cache, outcomeClass, statName)); + stat.addDerivedStatistic(derivedStatistic); + } + + private Map> createKnownStatistics() { + Map> knownStatistics = new HashMap<>(30); + knownStatistics.put("Cache:HitCount", ValueStatistics.counter(this::getCacheHits)); + knownStatistics.put("Cache:MissCount", ValueStatistics.counter(this::getCacheMisses)); + knownStatistics.put("Cache:PutCount", ValueStatistics.counter(this::getCachePuts)); + knownStatistics.put("Cache:RemovalCount", ValueStatistics.counter(this::getCacheRemovals)); + knownStatistics.put("Cache:EvictionCount", ValueStatistics.counter(this::getCacheEvictions)); + knownStatistics.put("Cache:ExpirationCount", ValueStatistics.counter(this::getCacheExpirations)); + + for (DefaultTierStatistics tier : tierStatistics.values()) { + knownStatistics.putAll(tier.getKnownStatistics()); + } + + return Collections.unmodifiableMap(knownStatistics); + } + + public Map> getKnownStatistics() { + return knownStatistics; + } + + @Override + public Map getTierStatistics() { + return Collections.unmodifiableMap(tierStatistics); + } + + @Override + public void clear() { + compensatingCounters = compensatingCounters.snapshot(this); + } + + @Override + public long getCacheHits() { + return normalize(getHits() - compensatingCounters.cacheHits); + } + + @Override + public float getCacheHitPercentage() { + long cacheHits = getCacheHits(); + return normalize((float) cacheHits / (cacheHits + getCacheMisses())) * 100.0f; + } + + @Override + public long getCacheMisses() { + return normalize(getMisses() - compensatingCounters.cacheMisses); + } + + @Override + public float getCacheMissPercentage() { + long cacheMisses = getCacheMisses(); + return normalize((float) cacheMisses / (getCacheHits() + cacheMisses)) * 100.0f; + } + + @Override + public long getCacheGets() { + return normalize(getHits() + getMisses() - compensatingCounters.cacheGets); + } + + @Override + public long getCachePuts() { + return normalize(getBulkCount(BulkOps.PUT_ALL) + + put.sum(EnumSet.of(PutOutcome.PUT)) + + putIfAbsent.sum(EnumSet.of(PutIfAbsentOutcome.PUT)) + + replace.sum(EnumSet.of(ReplaceOutcome.HIT)) - + compensatingCounters.cachePuts); + } + + @Override + public long getCacheRemovals() { + return normalize(getBulkCount(BulkOps.REMOVE_ALL) + + remove.sum(EnumSet.of(RemoveOutcome.SUCCESS)) + + conditionalRemove.sum(EnumSet.of(ConditionalRemoveOutcome.SUCCESS)) - + compensatingCounters.cacheRemovals); + } + + @Override + public long getCacheEvictions() { + return normalize(lowestTier.getEvictions()); + } + + @Override + public long getCacheExpirations() { + return normalize(lowestTier.getExpirations()); + } + + private long getMisses() { + return getBulkCount(BulkOps.GET_ALL_MISS) + + get.sum(EnumSet.of(GetOutcome.MISS)) + + putIfAbsent.sum(EnumSet.of(PutIfAbsentOutcome.PUT)) + + replace.sum(EnumSet.of(ReplaceOutcome.MISS_NOT_PRESENT)) + + conditionalRemove.sum(EnumSet.of(ConditionalRemoveOutcome.FAILURE_KEY_MISSING)); + } + + private long getHits() { + return getBulkCount(BulkOps.GET_ALL_HITS) + + get.sum(EnumSet.of(GetOutcome.HIT)) + + putIfAbsent.sum(EnumSet.of(PutIfAbsentOutcome.HIT)) + + replace.sum(EnumSet.of(ReplaceOutcome.HIT, ReplaceOutcome.MISS_PRESENT)) + + conditionalRemove.sum(EnumSet.of(ConditionalRemoveOutcome.SUCCESS, ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); + } + + private long getBulkCount(BulkOps bulkOps) { + return cache.getBulkMethodEntries().get(bulkOps).longValue(); + } + + private static long normalize(long value) { + return Math.max(0, value); + } + + private static float normalize(float value) { + if (Float.isNaN(value)) { + return 0.0f; + } + return Math.min(1.0f, Math.max(0.0f, value)); + } + + private static class CompensatingCounters { + final long cacheHits; + final long cacheMisses; + final long cacheGets; + final long cachePuts; + final long cacheRemovals; + + private CompensatingCounters(long cacheHits, long cacheMisses, long cacheGets, long cachePuts, long cacheRemovals) { + this.cacheHits = cacheHits; + this.cacheMisses = cacheMisses; + this.cacheGets = cacheGets; + this.cachePuts = cachePuts; + this.cacheRemovals = cacheRemovals; + } + + static CompensatingCounters empty() { + return new CompensatingCounters(0, 0, 0, 0, 0); + } + + CompensatingCounters snapshot(DefaultCacheStatistics statistics) { + return new CompensatingCounters( + cacheHits + statistics.getHits(), + cacheMisses + statistics.getMisses(), + cacheGets + statistics.getCacheGets(), + cachePuts + statistics.getCachePuts(), + cacheRemovals + statistics.getCacheRemovals()); + } + } + +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DefaultStatisticsService.java b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DefaultStatisticsService.java new file mode 100644 index 0000000000..45c5edcd38 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DefaultStatisticsService.java @@ -0,0 +1,190 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.internal.statistics; + +import org.ehcache.Cache; +import org.ehcache.Status; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.core.InternalCache; +import org.ehcache.core.events.CacheManagerListener; +import org.ehcache.core.spi.service.CacheManagerProviderService; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.spi.store.InternalCacheManager; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.statistics.CacheStatistics; +import org.ehcache.core.statistics.OperationObserver; +import org.ehcache.core.statistics.OperationStatistic; +import org.ehcache.core.statistics.StatisticType; +import org.ehcache.core.statistics.ZeroOperationStatistic; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.statistics.MappedOperationStatistic; +import org.terracotta.statistics.StatisticsManager; + +import java.io.Serializable; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.function.Supplier; + +import static org.terracotta.statistics.StatisticBuilder.operation; + +/** + * Default implementation using the statistics calculated by the observers set on the caches. + */ +@ServiceDependencies(CacheManagerProviderService.class) +public class DefaultStatisticsService implements StatisticsService, CacheManagerListener { + + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultStatisticsService.class); + + private final ConcurrentMap cacheStatistics = new ConcurrentHashMap<>(); + + private volatile InternalCacheManager cacheManager; + + @Override + public CacheStatistics getCacheStatistics(String cacheName) { + CacheStatistics stats = cacheStatistics.get(cacheName); + if (stats == null) { + throw new IllegalArgumentException("Unknown cache: " + cacheName); + } + return stats; + } + + @Override + public void registerWithParent(Object toAssociate, Object parent) { + StatisticsManager.associate(toAssociate).withParent(parent); + } + + @Override + public , T extends Enum> OperationStatistic registerStoreStatistics(Store store, String targetName, int tierHeight, String tag, Map> translation, String statisticName) { + + Class outcomeType = getOutcomeType(translation); + + // If the original stat doesn't exist, we do not need to translate it + if (StatsUtils.hasOperationStat(store, outcomeType, targetName)) { + + MappedOperationStatistic operationStatistic = new MappedOperationStatistic<>(store, translation, statisticName, tierHeight, targetName, tag); + StatisticsManager.associate(operationStatistic).withParent(store); + return new DelegatedMappedOperationStatistics<>(operationStatistic); + } else { + return ZeroOperationStatistic.get(); + } + } + + /** + * From the Map of translation, we extract one of the items to get the declaring class of the enum. + * + * @param translation translation map + * @param type of the outcome + * @param type of the possible translations + * @return the outcome type + */ + private static , T extends Enum> Class getOutcomeType(Map> translation) { + Map.Entry> first = translation.entrySet().iterator().next(); + return first.getValue().iterator().next().getDeclaringClass(); + } + + @Override + public void deRegisterFromParent(Object toDisassociate, Object parent) { + StatisticsManager.dissociate(toDisassociate).fromParent(parent); + } + + @Override + public void cleanForNode(Object node) { + StatisticsManager.nodeFor(node).clean(); + } + + @Override + public void registerStatistic(Object context, String name, StatisticType type, Set tags, Supplier valueSupplier) { + StatisticsManager.createPassThroughStatistic(context, name, tags, convert(type), valueSupplier); + } + + @Override + public > OperationObserver createOperationStatistics(String name, Class outcome, String tag, Object context) { + return new DelegatingOperationObserver<>(operation(outcome).named(name).of(context).tag(tag).build()); + } + + @Override + public void start(ServiceProvider serviceProvider) { + LOGGER.debug("Starting service"); + + CacheManagerProviderService cacheManagerProviderService = serviceProvider.getService(CacheManagerProviderService.class); + cacheManager = cacheManagerProviderService.getCacheManager(); + cacheManager.registerListener(this); + } + + @Override + public void stop() { + LOGGER.debug("Stopping service"); + cacheManager.deregisterListener(this); + cacheStatistics.clear(); + } + + @Override + public void stateTransition(Status from, Status to) { + LOGGER.debug("Moving from " + from + " to " + to); + switch (to) { + case AVAILABLE: + registerAllCaches(); + break; + case UNINITIALIZED: + cacheManager.deregisterListener(this); + cacheStatistics.clear(); + break; + case MAINTENANCE: + throw new IllegalStateException("Should not be started in maintenance mode"); + default: + throw new AssertionError("Unsupported state: " + to); + } + } + + private void registerAllCaches() { + for (Map.Entry> entry : cacheManager.getRuntimeConfiguration().getCacheConfigurations().entrySet()) { + String alias = entry.getKey(); + CacheConfiguration configuration = entry.getValue(); + Cache cache = cacheManager.getCache(alias, configuration.getKeyType(), configuration.getValueType()); + cacheAdded(alias, cache); + } + } + + @Override + public void cacheAdded(String alias, Cache cache) { + LOGGER.debug("Cache added " + alias); + cacheStatistics.put(alias, new DefaultCacheStatistics((InternalCache) cache)); + } + + @Override + public void cacheRemoved(String alias, Cache cache) { + LOGGER.debug("Cache removed " + alias); + cacheStatistics.remove(alias); + } + + private static org.terracotta.statistics.StatisticType convert(StatisticType type) { + switch (type) { + case COUNTER: + return org.terracotta.statistics.StatisticType.COUNTER; + case GAUGE: + return org.terracotta.statistics.StatisticType.GAUGE; + default: + throw new IllegalArgumentException("Untranslatable statistic type : " + type); + } + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/statistics/DefaultStatisticsServiceFactory.java b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DefaultStatisticsServiceFactory.java similarity index 79% rename from impl/src/main/java/org/ehcache/impl/internal/statistics/DefaultStatisticsServiceFactory.java rename to ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DefaultStatisticsServiceFactory.java index 2e8aaf561d..c88dbdd6c9 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/statistics/DefaultStatisticsServiceFactory.java +++ b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DefaultStatisticsServiceFactory.java @@ -14,21 +14,23 @@ * limitations under the License. */ -package org.ehcache.impl.internal.statistics; +package org.ehcache.core.internal.statistics; import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.core.spi.service.StatisticsService; import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; +@Component public class DefaultStatisticsServiceFactory implements ServiceFactory { @Override - public StatisticsService create(ServiceCreationConfiguration serviceConfiguration) { + public StatisticsService create(ServiceCreationConfiguration serviceConfiguration) { return new DefaultStatisticsService(); } @Override - public Class getServiceType() { - return StatisticsService.class; + public Class getServiceType() { + return DefaultStatisticsService.class; } } diff --git a/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DefaultTierStatistics.java b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DefaultTierStatistics.java new file mode 100755 index 0000000000..3a5bcf2c84 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DefaultTierStatistics.java @@ -0,0 +1,246 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.internal.statistics; + +import org.ehcache.Cache; +import org.ehcache.core.statistics.StoreOperationOutcomes; +import org.ehcache.core.statistics.TierOperationOutcomes; +import org.ehcache.core.statistics.TierStatistics; +import org.ehcache.core.statistics.ValueStatistic; +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.ValueStatistics; +import org.terracotta.statistics.ZeroOperationStatistic; + +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.function.Supplier; + +import static org.ehcache.core.internal.statistics.StatsUtils.findStatisticOnDescendants; +import static org.ehcache.core.statistics.SuppliedValueStatistic.counter; +import static org.ehcache.core.statistics.SuppliedValueStatistic.gauge; + +/** + * Contains usage statistics relative to a given tier. + */ +public class DefaultTierStatistics implements TierStatistics { + + private volatile CompensatingCounters compensatingCounters = CompensatingCounters.empty(); + + private final Map> knownStatistics; + + private final OperationStatistic get; + private final OperationStatistic put; + private final OperationStatistic putIfAbsent; + private final OperationStatistic replace; + private final OperationStatistic conditionalReplace; + private final OperationStatistic remove; + private final OperationStatistic conditionalRemove; + private final OperationStatistic eviction; + private final OperationStatistic expiration; + private final OperationStatistic compute; + private final OperationStatistic computeIfAbsent; + + //Ehcache default to -1 if unavailable, but the management layer needs optional or null + // (since -1 can be a normal value for a stat). + private final Optional> mapping; + private final Optional> allocatedMemory; + private final Optional> occupiedMemory; + + public DefaultTierStatistics(Cache cache, String tierName) { + + get = findOperationStatistic(cache, tierName, "tier", "get"); + put = findOperationStatistic(cache, tierName, "put"); + putIfAbsent = findOperationStatistic(cache, tierName, "putIfAbsent"); + replace = findOperationStatistic(cache, tierName, "replace"); + conditionalReplace = findOperationStatistic(cache, tierName, "conditionalReplace"); + remove = findOperationStatistic(cache, tierName, "remove"); + conditionalRemove = findOperationStatistic(cache, tierName, "conditionalRemove"); + eviction = findOperationStatistic(cache, tierName, "tier", "eviction"); + expiration = findOperationStatistic(cache, tierName, "expiration"); + compute = findOperationStatistic(cache, tierName, "compute"); + computeIfAbsent = findOperationStatistic(cache, tierName, "computeIfAbsent"); + + mapping = findValueStatistics(cache, tierName, "mappings"); + allocatedMemory = findValueStatistics(cache, tierName, "allocatedMemory"); + occupiedMemory = findValueStatistics(cache, tierName, "occupiedMemory"); + + Map> knownStatistics = createKnownStatistics(tierName); + this.knownStatistics = Collections.unmodifiableMap(knownStatistics); + } + + private Map> createKnownStatistics(String tierName) { + Map> knownStatistics = new HashMap<>(7); + addIfPresent(knownStatistics, tierName + ":HitCount", get, this::getHits); + addIfPresent(knownStatistics, tierName + ":MissCount", get, this::getMisses); + addIfPresent(knownStatistics, tierName + ":PutCount", put, this::getPuts); + addIfPresent(knownStatistics, tierName + ":RemovalCount", remove, this::getRemovals); + + // These two a special because they are used by the cache so they should always be there + knownStatistics.put(tierName + ":EvictionCount", ValueStatistics.counter(this::getEvictions)); + knownStatistics.put(tierName + ":ExpirationCount", ValueStatistics.counter(this::getExpirations)); + + mapping.ifPresent(longValueStatistic -> knownStatistics.put(tierName + ":MappingCount", ValueStatistics.gauge(this::getMappings))); + allocatedMemory.ifPresent(longValueStatistic -> knownStatistics.put(tierName + ":AllocatedByteSize", ValueStatistics.gauge(this::getAllocatedByteSize))); + occupiedMemory.ifPresent(longValueStatistic -> knownStatistics.put(tierName + ":OccupiedByteSize", ValueStatistics.gauge(this::getOccupiedByteSize))); + return knownStatistics; + } + + /** + * Add the statistic as a known statistic only if the reference statistic is available. We consider that the reference statistic can only be + * an instance of {@code ZeroOperationStatistic} when statistics are disabled. + * + * @param knownStatistics map of known statistics + * @param name the name of the statistic to add + * @param reference the reference statistic that should be available for the statistic to be added + * @param valueSupplier the supplier that will provide the current value for the statistic + * @param type of the supplied value + */ + private static void addIfPresent(Map> knownStatistics, String name, OperationStatistic reference, Supplier valueSupplier) { + if(!(reference instanceof ZeroOperationStatistic)) { + knownStatistics.put(name, ValueStatistics.counter(valueSupplier)); + } + } + + public Map> getKnownStatistics() { + return knownStatistics; + } + + private > OperationStatistic findOperationStatistic(Cache cache, String tierName, String tag, String stat) { + return StatsUtils.>findStatisticOnDescendants(cache, tierName, tag, stat).orElse(ZeroOperationStatistic.get()); + } + + private > OperationStatistic findOperationStatistic(Cache cache, String tierName, String stat) { + return StatsUtils.>findStatisticOnDescendants(cache, tierName, stat).orElse(ZeroOperationStatistic.get()); + } + + private Optional> findValueStatistics(Cache cache, String tierName, String statName) { + return findStatisticOnDescendants(cache, tierName, statName); + } + + /** + * Reset the values for this tier. However, note that {@code mapping, allocatedMemory, occupiedMemory} + * but be reset since it doesn't make sense. + */ + @Override + public void clear() { + compensatingCounters = compensatingCounters.snapshot(this); + } + + @Override + public long getHits() { + return get.sum(EnumSet.of(TierOperationOutcomes.GetOutcome.HIT)) + + putIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.HIT)) + + replace.sum(EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.REPLACED)) + + compute.sum(EnumSet.of(StoreOperationOutcomes.ComputeOutcome.HIT)) + + computeIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.ComputeIfAbsentOutcome.HIT)) + + conditionalReplace.sum(EnumSet.of(StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED)) + + conditionalRemove.sum(EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED)) - + compensatingCounters.hits; + } + + @Override + public long getMisses() { + return get.sum(EnumSet.of(TierOperationOutcomes.GetOutcome.MISS)) + + putIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.PUT)) + + replace.sum(EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.MISS)) + + computeIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.ComputeIfAbsentOutcome.NOOP)) + + conditionalReplace.sum(EnumSet.of(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS)) + + conditionalRemove.sum(EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS)) - + compensatingCounters.misses; + } + + @Override + public long getPuts() { + return put.sum(EnumSet.of(StoreOperationOutcomes.PutOutcome.PUT)) + + putIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.PUT)) + + compute.sum(EnumSet.of(StoreOperationOutcomes.ComputeOutcome.PUT)) + + computeIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.ComputeIfAbsentOutcome.PUT)) + + replace.sum(EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.REPLACED)) + + conditionalReplace.sum(EnumSet.of(StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED)) - + compensatingCounters.puts; + } + + @Override + public long getRemovals() { + return remove.sum(EnumSet.of(StoreOperationOutcomes.RemoveOutcome.REMOVED)) + + compute.sum(EnumSet.of(StoreOperationOutcomes.ComputeOutcome.REMOVED)) + + conditionalRemove.sum(EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED)) - + compensatingCounters.removals; + } + + @Override + public long getEvictions() { + return eviction.sum(EnumSet.of(TierOperationOutcomes.EvictionOutcome.SUCCESS)) - + compensatingCounters.evictions; + } + + @Override + public long getExpirations() { + return expiration.sum() - compensatingCounters.expirations; + } + + @Override + public long getMappings() { + return mapping.map(org.terracotta.statistics.ValueStatistic::value).orElse(-1L); + } + + @Override + public long getAllocatedByteSize() { + return allocatedMemory.map(org.terracotta.statistics.ValueStatistic::value).orElse(-1L); + } + + @Override + public long getOccupiedByteSize() { + return occupiedMemory.map(org.terracotta.statistics.ValueStatistic::value).orElse(-1L); + } + + private static class CompensatingCounters { + final long hits; + final long misses; + final long puts; + final long removals; + final long evictions; + final long expirations; + + private CompensatingCounters(long hits, long misses, long puts, long removals, long evictions, long expirations) { + this.hits = hits; + this.misses = misses; + this.puts = puts; + this.removals = removals; + this.evictions = evictions; + this.expirations = expirations; + } + + static CompensatingCounters empty() { + return new CompensatingCounters(0, 0, 0, 0, 0, 0); + } + + CompensatingCounters snapshot(DefaultTierStatistics statistics) { + return new CompensatingCounters( + statistics.getHits() + hits, + statistics.getMisses() + misses, + statistics.getPuts() + puts, + statistics.getRemovals() + removals, + statistics.getEvictions() + evictions, + statistics.getExpirations() + expirations + ); + } + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DelegatedMappedOperationStatistics.java b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DelegatedMappedOperationStatistics.java new file mode 100644 index 0000000000..65e8c624f2 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DelegatedMappedOperationStatistics.java @@ -0,0 +1,109 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.internal.statistics; + +import org.ehcache.core.statistics.ChainedOperationObserver; +import org.ehcache.core.statistics.OperationStatistic; +import org.terracotta.statistics.MappedOperationStatistic; + +import java.util.Collection; +import java.util.Set; +import java.util.stream.Collectors; + +public class DelegatedMappedOperationStatistics, D extends Enum> implements OperationStatistic { + + private final MappedOperationStatistic delegate; + + public DelegatedMappedOperationStatistics(MappedOperationStatistic operationStatistic) { + this.delegate = operationStatistic; + } + + @Override + public Class type() { + return delegate.type(); + } + + @Override + public long count(D type) { + return delegate.count(type); + } + + @Override + public long sum(Set types) { + return delegate.sum(types); + } + + @Override + public long sum() { + return delegate.sum(); + } + + @Override + public void begin() { + delegate.begin(); + } + + @Override + public void end(D result) { + delegate.end(result); + } + + @Override + public void addDerivedStatistic(ChainedOperationObserver derived) { + delegate.addDerivedStatistic(convert(derived)); + } + + @Override + public void removeDerivedStatistic(ChainedOperationObserver derived) { + delegate.removeDerivedStatistic(convert(derived)); + } + + @Override + public Collection> getDerivedStatistics() { + Collection> derivedStatistics = delegate.getDerivedStatistics(); + return derivedStatistics.stream().map(this::revert).collect(Collectors.toSet()); + } + + private ChainedOperationObserver revert(org.terracotta.statistics.observer.ChainedOperationObserver observer) { + return new ChainedOperationObserver() { + @Override + public void begin(long time) { + observer.begin(time); + } + + @Override + public void end(long time, long latency, D result) { + observer.end(time, latency, result); + } + }; + } + + private org.terracotta.statistics.observer.ChainedOperationObserver convert(ChainedOperationObserver observer) { + return new org.terracotta.statistics.observer.ChainedOperationObserver() { + @Override + public void begin(long time) { + observer.begin(time); + } + + @Override + public void end(long time, long latency, D result) { + observer.end(time, latency, result); + } + }; + } + + +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DelegatingOperationObserver.java b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DelegatingOperationObserver.java new file mode 100644 index 0000000000..a6b8e9223f --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DelegatingOperationObserver.java @@ -0,0 +1,37 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.internal.statistics; + +import org.ehcache.core.statistics.OperationObserver; + +public class DelegatingOperationObserver> implements OperationObserver { + + private final org.terracotta.statistics.observer.OperationObserver observer; + + public DelegatingOperationObserver(org.terracotta.statistics.observer.OperationObserver operationObserver) { + this.observer = operationObserver; + } + + @Override + public void begin() { + this.observer.begin(); + } + + @Override + public void end(T result) { + this.observer.end(result); + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DelegatingOperationStatistic.java b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DelegatingOperationStatistic.java new file mode 100644 index 0000000000..0eb00e07df --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/DelegatingOperationStatistic.java @@ -0,0 +1,106 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.internal.statistics; + +import org.ehcache.core.statistics.ChainedOperationObserver; +import org.ehcache.core.statistics.OperationStatistic; + +import java.util.Collection; +import java.util.Set; +import java.util.stream.Collectors; + +public class DelegatingOperationStatistic> implements OperationStatistic { + + private final org.terracotta.statistics.OperationStatistic delegate; + + public DelegatingOperationStatistic(org.terracotta.statistics.OperationStatistic statistic) { + this.delegate = statistic; + } + + @Override + public Class type() { + return delegate.type(); + } + + @Override + public long count(T type) { + return delegate.count(type); + } + + @Override + public long sum(Set types) { + return delegate.sum(types); + } + + @Override + public long sum() { + return delegate.sum(); + } + + @Override + public void begin() { + delegate.begin(); + } + + @Override + public void end(T result) { + delegate.end(result); + } + + @Override + public void addDerivedStatistic(ChainedOperationObserver derived) { + delegate.addDerivedStatistic(convert(derived)); + } + + @Override + public void removeDerivedStatistic(ChainedOperationObserver derived) { + delegate.removeDerivedStatistic(convert(derived)); + } + + @Override + public Collection> getDerivedStatistics() { + Collection> derivedStatistics = delegate.getDerivedStatistics(); + return derivedStatistics.stream().map(this::revert).collect(Collectors.toSet()); + } + + private ChainedOperationObserver revert(org.terracotta.statistics.observer.ChainedOperationObserver observer) { + return new ChainedOperationObserver() { + @Override + public void begin(long time) { + observer.begin(time); + } + + @Override + public void end(long time, long latency, T result) { + observer.end(time, latency, result); + } + }; + } + + private org.terracotta.statistics.observer.ChainedOperationObserver convert(ChainedOperationObserver observer) { + return new org.terracotta.statistics.observer.ChainedOperationObserver() { + @Override + public void begin(long time) { + observer.begin(time); + } + + @Override + public void end(long time, long latency, T result) { + observer.end(time, latency, result); + } + }; + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/StatsUtils.java b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/StatsUtils.java new file mode 100644 index 0000000000..ed0a424911 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/internal/statistics/StatsUtils.java @@ -0,0 +1,266 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.internal.statistics; + +import java.util.Collections; +import java.util.EnumSet; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.function.Consumer; + +import org.ehcache.Cache; +import org.ehcache.core.statistics.CacheOperationOutcomes; +import org.ehcache.core.statistics.StoreOperationOutcomes; +import org.terracotta.context.ContextManager; +import org.terracotta.context.TreeNode; +import org.terracotta.context.query.Matcher; +import org.terracotta.context.query.Matchers; +import org.terracotta.context.query.Query; +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.derived.OperationResultFilter; + +import static org.terracotta.context.query.Matchers.*; +import static org.terracotta.context.query.QueryBuilder.queryBuilder; + +/** + * Class allowing to query cache and tier statistics + */ +public final class StatsUtils { + + private StatsUtils() {} + + public static Matcher> hasTag(final String tag) { + return hasAttribute("tags", new Matcher>() { + @Override + protected boolean matchesSafely(Set object) { + return object.contains(tag); + } + }); + } + + public static Matcher> hasProperty(final String key, final String value) { + return hasAttribute("properties", new Matcher>() { + @Override + protected boolean matchesSafely(Map properties) { + Object val = properties.get(key); + return val != null && value.equals(val); + } + }); + } + + /** + * Search for a statistic on the descendant of the context that matches the tag and statistic name. + * + * @param context the context of the query + * @param discriminator a filter on the discriminator property + * @param tag the tag we are looking for + * @param statName statistic name + * @param type of the statistic that will be returned + * @return the wanted statistic or null if no such statistic is found + * @throws RuntimeException when more than one matching statistic is found + */ + public static Optional findStatisticOnDescendants(Object context, String discriminator, String tag, String statName) { + + @SuppressWarnings("unchecked") + Set statResult = queryBuilder() + .descendants() + .filter(context(attributes(Matchers.allOf( + hasAttribute("name", statName), + hasProperty("discriminator", discriminator), + hasTag(tag))))) + .build().execute(Collections.singleton(ContextManager.nodeFor(context))); + + if (statResult.size() > 1) { + throw new RuntimeException("One stat expected for " + statName + " but found " + statResult.size()); + } + + if (statResult.size() == 1) { + @SuppressWarnings("unchecked") + T result = (T) statResult.iterator().next().getContext().attributes().get("this"); + return Optional.ofNullable(result); + } + + // No such stat in this context + return Optional.empty(); + } + + /** + * Search for a statistic on the descendant of the context that matches the tag and statistic name. + * + * @param context the context of the query + * @param tag the tag we are looking for + * @param statName statistic name + * @param type of the statistic that will be returned + * @return the wanted statistic or null if no such statistic is found + * @throws RuntimeException when more than one matching statistic is found + */ + public static Optional findStatisticOnDescendants(Object context, String tag, String statName) { + + @SuppressWarnings("unchecked") + Set statResult = queryBuilder() + .descendants() + .filter(context(attributes(Matchers.allOf( + hasAttribute("name", statName), + hasTag(tag))))) + .build().execute(Collections.singleton(ContextManager.nodeFor(context))); + + if (statResult.size() > 1) { + throw new RuntimeException("One stat expected for " + statName + " but found " + statResult.size()); + } + + if (statResult.size() == 1) { + @SuppressWarnings("unchecked") + T result = (T) statResult.iterator().next().getContext().attributes().get("this"); + return Optional.ofNullable(result); + } + + // No such stat in this context + return Optional.empty(); + } + + /** + * Find an operation statistic attached (as a children) to this context that matches the statistic name and type + * + * @param context the context of the query + * @param type type of the operation statistic + * @param statName statistic name + * @param type of the operation statistic content + * @return the operation statistic searched for + * @throws RuntimeException if 0 or more than 1 result is found + */ + public static > OperationStatistic findOperationStatisticOnChildren(Object context, Class type, String statName) { + @SuppressWarnings("unchecked") + Query query = queryBuilder() + .children() + .filter(context(attributes(Matchers.allOf(hasAttribute("name", statName), hasAttribute("type", type))))) + .build(); + + Set result = query.execute(Collections.singleton(ContextManager.nodeFor(context))); + if (result.size() > 1) { + throw new RuntimeException("result must be unique"); + } + if (result.isEmpty()) { + throw new RuntimeException("result must not be null"); + } + @SuppressWarnings("unchecked") + OperationStatistic statistic = (OperationStatistic) result.iterator().next().getContext().attributes().get("this"); + return statistic; + } + + /** + * Find the list of tiers of a cache. We assume a lot of things here. + *

    + *
  • The "eviction" statistic is available on the tier
  • + *
  • That the tiers have only one tag attribute
  • + *
  • That this tag contains the tier name
  • + *
  • That the only descendants having an "eviction" statistic are the tiers
  • + *
+ * + * @param cache the context for looking for tiers + * @return an array of tier names + * @throws RuntimeException if not tiers are found or if tiers have multiple tags + */ + public static String[] findTiers(Cache cache) { + // Here I'm randomly taking the eviction observer because it exists on all tiers + @SuppressWarnings("unchecked") + Query statQuery = queryBuilder() + .descendants() + .filter(context(attributes(Matchers.allOf(hasAttribute("name", "eviction"), hasAttribute("type", StoreOperationOutcomes.EvictionOutcome.class))))) + .build(); + + Set statResult = statQuery.execute(Collections.singleton(ContextManager.nodeFor(cache))); + + if (statResult.isEmpty()) { + throw new RuntimeException("Failed to find tiers using the eviction observer, valid result Set sizes must 1 or more"); + } + + String[] tiers = new String[statResult.size()]; + + int i = 0; + for (TreeNode treeNode : statResult) { + Set tags = (Set) treeNode.getContext().attributes().get("tags"); + if (tags.size() != 1) { + throw new RuntimeException("We expect tiers to have only one tag"); + } + + String storeType = tags.iterator().next().toString(); + tiers[i++] = storeType; + } + return tiers; + } + + /** + * Find the lowest tier from a list of tier. We assume a lot of things here that the tiers depth + * magically matches the alphabetical order. + * + * @param tiers all tiers + * @return the lowest tier + */ + public static String findLowestTier(String[] tiers) { + //if only 1 store then you don't need to find the lowest tier + if (tiers.length == 1) { + return tiers[0]; + } + + //we expect at least one tier + if (tiers.length == 0) { + throw new RuntimeException("No existing tier"); + } + + // We rely here on the alphabetical order matching the depth order so from highest to lowest we have + // OnHeap, OffHeap, Disk, Clustered + String lowestTier = tiers[0]; + for (int i = 1; i < tiers.length; i++) { + if (tiers[i].compareTo(lowestTier) < 0) { + lowestTier = tiers[i]; + } + } + + return lowestTier; + } + + public static > boolean hasOperationStat(Object rootNode, Class statisticType, String statName) { + Query q = queryBuilder().descendants() + .filter(context(identifier(subclassOf(OperationStatistic.class)))) + .filter(context(attributes(Matchers.allOf( + hasAttribute("name", statName), + hasAttribute("this", new Matcher>() { + @Override + protected boolean matchesSafely(OperationStatistic object) { + return object.type().equals(statisticType); + } + }) + )))) + .build(); + + Set result = q.execute(Collections.singleton(ContextManager.nodeFor(rootNode))); + + if (result.size() > 1) { + throw new RuntimeException("a zero or a single stat was expected; found " + result.size()); + } + + return !result.isEmpty(); + } + + public static void registerClearNotification(String alias, Cache cache, Consumer cacheClear) { + OperationStatistic clear = StatsUtils.findOperationStatisticOnChildren(cache, + CacheOperationOutcomes.ClearOutcome.class, "clear"); + clear.addDerivedStatistic(new OperationResultFilter<>(EnumSet.of(CacheOperationOutcomes.ClearOutcome.SUCCESS), + (time, latency) -> cacheClear.accept(alias))); + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/internal/util/ValueSuppliers.java b/ehcache-core/src/main/java/org/ehcache/core/internal/util/ValueSuppliers.java new file mode 100644 index 0000000000..3ddf09f839 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/internal/util/ValueSuppliers.java @@ -0,0 +1,41 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.internal.util; + +/** + * Utility for creating basic {@link org.ehcache.ValueSupplier} instances + * + * @deprecated Now using {@code Supplier} for {@link org.ehcache.expiry.ExpiryPolicy} + */ +@Deprecated +public final class ValueSuppliers { + + /** + * Returns a basic {@link org.ehcache.ValueSupplier} that serves the value passed in + * + * @param value the value to hold + * @param the value type + * @return a value supplier with the given value + */ + public static org.ehcache.ValueSupplier supplierOf(final V value) { + return () -> value; + } + + private ValueSuppliers() { + // Not instantiable + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/osgi/EhcacheActivator.java b/ehcache-core/src/main/java/org/ehcache/core/osgi/EhcacheActivator.java new file mode 100644 index 0000000000..485af41100 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/osgi/EhcacheActivator.java @@ -0,0 +1,68 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.osgi; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.core.util.ClassLoading; +import org.osgi.framework.BundleActivator; +import org.osgi.framework.BundleContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.atomic.AtomicReference; + +import static java.util.Spliterators.spliterator; +import static java.util.stream.Collectors.joining; +import static java.util.stream.StreamSupport.stream; + +public class EhcacheActivator implements BundleActivator { + + public static final String OSGI_LOADING = "org.ehcache.core.osgi"; + + private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheActivator.class); + + private static final AtomicReference CORE_BUNDLE = new AtomicReference<>(); + + @Override + public void start(BundleContext context) throws Exception { + BundleContext currentContext = CORE_BUNDLE.getAndUpdate(current -> current == null ? context : current); + if (currentContext == null) { + String greeting = "Detected OSGi Environment (core is in bundle: " + context.getBundle() + ")"; + if ("false".equalsIgnoreCase(context.getProperty(OSGI_LOADING))) { + SafeOsgi.disableOSGiServiceLoading(); + LOGGER.info(greeting + ": OSGi Based Service Loading Disabled Via System/Framework Property - Extensions Outside This Bundle Will Not Be Detected"); + LOGGER.debug("JDK Service Loading Sees:\n\t" + stream(spliterator(ClassLoading.servicesOfType(ServiceFactory.class).iterator(), Long.MAX_VALUE, 0), false) + .map(sf -> sf.getServiceType().getName()).collect(joining("\n\t"))); + } else { + SafeOsgi.enableOSGiServiceLoading(); + LOGGER.info(greeting + ": Using OSGi Based Service Loading"); + } + } else { + throw new IllegalStateException("Multiple bundle instances running against the same core classes: existing bundle: " + currentContext.getBundle() + " new bundle: " + context.getBundle()); + } + } + + @Override + public void stop(BundleContext context) throws Exception { + SafeOsgi.disableOSGiServiceLoading(); + CORE_BUNDLE.set(null); + } + + public static BundleContext getCoreBundle() { + return CORE_BUNDLE.get(); + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/osgi/OsgiServiceLoader.java b/ehcache-core/src/main/java/org/ehcache/core/osgi/OsgiServiceLoader.java new file mode 100644 index 0000000000..9dac70dd91 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/osgi/OsgiServiceLoader.java @@ -0,0 +1,53 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.osgi; + +import org.osgi.framework.BundleContext; +import org.osgi.framework.InvalidSyntaxException; +import org.osgi.framework.ServiceReference; + +import java.util.Collection; +import java.util.List; +import java.util.stream.Stream; + +import static java.util.stream.Collectors.toList; + +/** + * An OSGi service based equivalent to {@link java.util.ServiceLoader}. + *

+ * This class is used by the {@link org.ehcache.core.spi.ServiceLocator ServiceLocator} (via + * {@link org.ehcache.core.util.ClassLoading#servicesOfType(Class) ClassLoading.servicesOfType(Class)}) to discover services when running inside an OSGi + * environment. This is needed when the required Ehcache services are split across multiple OSGi bundles. + */ +public class OsgiServiceLoader { + + /** + * Locate all services of type {@code T}. + * + * @param serviceType concrete service class + * @param service type + * @return an iterable of {@code T} services + */ + public static Iterable load(Class serviceType) { + try { + BundleContext coreBundle = EhcacheActivator.getCoreBundle(); + return coreBundle.getServiceReferences(serviceType, null).stream().map(coreBundle::getService).collect(toList()); + } catch (InvalidSyntaxException e) { + throw new AssertionError(e); + } + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/osgi/SafeOsgi.java b/ehcache-core/src/main/java/org/ehcache/core/osgi/SafeOsgi.java new file mode 100644 index 0000000000..f36e3a32ad --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/osgi/SafeOsgi.java @@ -0,0 +1,63 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.osgi; + +/** + * A classpath-safe decoupler for the OSGi service loading status. + *

+ * This class provides an OSGi class-decoupled way of checking whether OSGi service loading should be used. It is safe + * to load and call methods on this class when OSGi classes are not present. + */ +public final class SafeOsgi { + + private static volatile boolean OSGI_SERVICE_LOADING; + + /** + * Returns {@code true} if OSGi based service loading should be used. + *

+ * A {@code true} return indicates that Ehcache is running in an OSGi environment and that the user has enabled OSGi + * based service loading. + * + * @return {@code true} if OSGi service loading is enabled. + */ + public static boolean useOSGiServiceLoading() { + return OSGI_SERVICE_LOADING; + } + + /** + * Marks OSGi service loading as enabled. + *

+ * This is called by the {@link EhcacheActivator} when the user has enabled OSGi service loading. + */ + static void enableOSGiServiceLoading() { + OSGI_SERVICE_LOADING = true; + } + + /** + * Marks OSGi service loading as enabled. + *

+ * This is called by the {@link EhcacheActivator} when the user has not enabled OSGi service loading, and also when + * the Ehcache core bundle is stopped. + */ + static void disableOSGiServiceLoading() { + OSGI_SERVICE_LOADING = false; + } + + private SafeOsgi() { + //static holder + } +} diff --git a/core/src/main/java/org/ehcache/core/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/package-info.java similarity index 100% rename from core/src/main/java/org/ehcache/core/package-info.java rename to ehcache-core/src/main/java/org/ehcache/core/package-info.java diff --git a/ehcache-core/src/main/java/org/ehcache/core/resilience/DefaultRecoveryStore.java b/ehcache-core/src/main/java/org/ehcache/core/resilience/DefaultRecoveryStore.java new file mode 100644 index 0000000000..c7276d3c94 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/resilience/DefaultRecoveryStore.java @@ -0,0 +1,45 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.resilience; + +import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.resilience.RecoveryStore; +import org.ehcache.spi.resilience.StoreAccessException; + +/** + * Default implementation of the {@link RecoveryStore}. + * + * It maps each obliterate operation to the equivalent remove operation. + */ +public class DefaultRecoveryStore implements RecoveryStore { + + private final Store store; + + public DefaultRecoveryStore(Store store) { + this.store = store; + } + + @Override + public void obliterate() throws StoreAccessException { + store.clear(); + } + + @Override + public void obliterate(K key) throws StoreAccessException { + store.remove(key); + } + +} diff --git a/core/src/main/java/org/ehcache/core/spi/LifeCycled.java b/ehcache-core/src/main/java/org/ehcache/core/spi/LifeCycled.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/LifeCycled.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/LifeCycled.java diff --git a/core/src/main/java/org/ehcache/core/spi/LifeCycledAdapter.java b/ehcache-core/src/main/java/org/ehcache/core/spi/LifeCycledAdapter.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/LifeCycledAdapter.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/LifeCycledAdapter.java diff --git a/ehcache-core/src/main/java/org/ehcache/core/spi/ServiceLocator.java b/ehcache-core/src/main/java/org/ehcache/core/spi/ServiceLocator.java new file mode 100644 index 0000000000..97bf72ed8e --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/ServiceLocator.java @@ -0,0 +1,630 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.spi; + +import org.ehcache.config.Builder; +import org.ehcache.spi.service.OptionalServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.spi.service.PluralService; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.core.spi.service.ServiceFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Deque; +import java.util.HashMap; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.OptionalInt; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptySet; +import static java.util.Collections.newSetFromMap; +import static java.util.Collections.singleton; +import static java.util.Collections.unmodifiableSet; +import static java.util.stream.Collectors.groupingBy; +import static java.util.stream.Collectors.toList; +import static java.util.stream.StreamSupport.stream; +import static org.ehcache.core.util.ClassLoading.delegationChain; +import static org.ehcache.core.util.ClassLoading.getDefaultClassLoader; +import static org.ehcache.core.util.ClassLoading.servicesOfType; + +/** + * Provides discovery and tracking services for {@link Service} implementations. + */ +public final class ServiceLocator implements ServiceProvider { + + private static final Logger LOGGER = LoggerFactory.getLogger(ServiceLocator.class); + private final ServiceMap services; + + private final ReadWriteLock runningLock = new ReentrantReadWriteLock(); + + private final AtomicBoolean running = new AtomicBoolean(false); + + public static DependencySet dependencySet() { + return new DependencySet(); + } + + private ServiceLocator(ServiceMap services) { + this.services = services; + } + + @Override + public T getService(Class serviceType) { + if (serviceType.isAnnotationPresent(PluralService.class)) { + throw new IllegalArgumentException(serviceType.getName() + " is marked as a PluralService"); + } + final Collection registeredServices = getServicesOfType(serviceType); + if (registeredServices.size() > 1) { + throw new AssertionError("The non-PluralService type" + serviceType.getName() + + " has more than one service registered"); + } + return (registeredServices.isEmpty() ? null : registeredServices.iterator().next()); + } + + @Override + public Collection getServicesOfType(Class serviceType) { + return services.get(serviceType); + } + + public boolean knowsServiceFor(ServiceConfiguration serviceConfig) { + return services.contains(serviceConfig.getServiceType()); + } + + public void startAllServices() { + Deque started = new LinkedList<>(); + final Lock lock = runningLock.writeLock(); + lock.lock(); + try { + if (!running.compareAndSet(false, true)) { + throw new IllegalStateException("Already started!"); + } + + /* + * This ensures that we start services in dependency order + */ + LinkedList unstarted = new LinkedList<>(services.all()); + int totalServices = unstarted.size(); + long start = System.currentTimeMillis(); + LOGGER.debug("Starting {} Services...", totalServices); + while (!unstarted.isEmpty()) { + boolean startedSomething = false; + for (Iterator it = unstarted.iterator(); it.hasNext(); ) { + Service s = it.next(); + if (hasUnstartedDependencies(s, unstarted)) { + LOGGER.trace("Delaying starting {}", s); + } else { + LOGGER.trace("Starting {}", s); + s.start(this); + started.push(s); + it.remove(); + startedSomething = true; + } + } + if (startedSomething) { + LOGGER.trace("Cycle complete: " + unstarted.size() + " Services remaining"); + } else { + throw new IllegalStateException("Cyclic dependency in Service set: " + unstarted); + } + } + LOGGER.debug("All Services successfully started, {} Services in {}ms", totalServices, System.currentTimeMillis() - start); + } catch (Exception e) { + while(!started.isEmpty()) { + Service toBeStopped = started.pop(); + try { + toBeStopped.stop(); + } catch (Exception e1) { + LOGGER.error("Stopping Service failed due to ", e1); + } + } + throw e; + } finally { + lock.unlock(); + } + } + + public void stopAllServices() throws Exception { + Exception firstException = null; + Lock lock = runningLock.writeLock(); + lock.lock(); + try { + if(!running.compareAndSet(true, false)) { + throw new IllegalStateException("Already stopped!"); + } + + /* + * This ensures that we stop services in dependency order + */ + Collection running = new LinkedList<>(services.all()); + int totalServices = running.size(); + long start = System.currentTimeMillis(); + LOGGER.debug("Stopping {} Services...", totalServices); + while (!running.isEmpty()) { + boolean stoppedSomething = false; + for (Iterator it = running.iterator(); it.hasNext(); ) { + Service s = it.next(); + if (hasRunningDependents(s, running)) { + LOGGER.trace("Delaying stopping {}", s); + } else { + LOGGER.trace("Stopping {}", s); + try { + s.stop(); + } catch (Exception e) { + if (firstException == null) { + firstException = e; + } else { + LOGGER.error("Stopping Service failed due to ", e); + } + } + it.remove(); + stoppedSomething = true; + } + } + if (stoppedSomething) { + LOGGER.trace("Cycle complete: " + running.size() + " Services remaining"); + } else { + throw new AssertionError("Cyclic dependency in Service set: " + running); + } + } + LOGGER.debug("All Services successfully stopped, {} Services in {}ms", totalServices, System.currentTimeMillis() - start); + } finally { + lock.unlock(); + } + if(firstException != null) { + throw firstException; + } + } + + private boolean hasUnstartedDependencies(Service service, Iterable unstarted) { + for (Class dep : identifyTransitiveDependenciesOf(service.getClass())) { + for (Service s : unstarted) { + if (dep.isInstance(s)) { + return true; + } + } + } + return false; + } + + private boolean hasRunningDependents(Service service, Iterable running) { + for (Service runningService : running) { + Set> dependencyClasses = identifyTransitiveDependenciesOf(runningService.getClass()); + for (Class dependencyClass : dependencyClasses) { + if (dependencyClass.isInstance(service)) { + return true; + } + } + } + return false; + } + + public static class DependencySet implements Builder { + + @SuppressWarnings({"rawtypes", "unchecked"}) + private final Iterable> serviceFactories = (Iterable) servicesOfType(ServiceFactory.class); + + private final ServiceMap provided = new ServiceMap(); + private final Set> requested = new HashSet<>(); + private boolean includeMandatoryServices = true; + + public DependencySet with(Service service) { + provided.add(service); + return this; + } + + public DependencySet with(Iterable services) { + for (Service s : services) { + with(s); + } + return this; + } + + public DependencySet with(ServiceCreationConfiguration config) { + Class serviceType = config.getServiceType(); + + //TODO : This stanza is due to the way we use configure the JSR-107 service + if (provided.contains(serviceType) && !serviceType.isAnnotationPresent(PluralService.class)) { + return this; + } + + @SuppressWarnings("unchecked") + Collection> typedServiceFactories = stream(serviceFactories.spliterator(), false) + .filter(f -> serviceType.isAssignableFrom(f.getServiceType())).map(f -> (ServiceFactory) f) + .collect(toList()); + + OptionalInt highestRank = typedServiceFactories.stream().mapToInt(ServiceFactory::rank).max(); + + if (highestRank.isPresent()) { + typedServiceFactories.stream().filter(f -> highestRank.getAsInt() == f.rank()).forEach(f -> with(f.create(config))); + return this; + } else { + throw new IllegalStateException("No factories exist for " + serviceType); + } + } + + public DependencySet with(Class clazz) { + requested.add(clazz); + return this; + } + + + public DependencySet withoutMandatoryServices() { + includeMandatoryServices = false; + return this; + } + + public boolean contains(Class serviceClass) { + return provided.contains(serviceClass); + } + + public T providerOf(Class serviceClass) { + if (serviceClass.isAnnotationPresent(PluralService.class)) { + throw new IllegalArgumentException("Cannot retrieve single provider for plural service"); + } else { + Collection providers = providersOf(serviceClass); + switch (providers.size()) { + case 0: + return null; + case 1: + return providers.iterator().next(); + default: + throw new AssertionError(); + } + } + } + + public Collection providersOf(Class serviceClass) { + return provided.get(serviceClass); + } + + @Override + public ServiceLocator build() { + try { + ServiceMap resolvedServices = new ServiceMap(); + + for (Service service : provided.all()) { + resolvedServices = lookupDependenciesOf(resolvedServices, service.getClass()).add(service); + } + + for (Class request : requested) { + if (request.isAnnotationPresent(PluralService.class)) { + try { + resolvedServices = lookupService(resolvedServices, request); + } catch (DependencyException e) { + if (!resolvedServices.contains(request)) { + throw e; + } + } + } else if (!resolvedServices.contains(request)) { + resolvedServices = lookupService(resolvedServices, request); + } + } + + if (includeMandatoryServices) { + for (List> factories : stream(serviceFactories.spliterator(), false).collect(groupingBy(ServiceFactory::getServiceType)).values()) { + OptionalInt highestRank = factories.stream().mapToInt(ServiceFactory::rank).max(); + + if (highestRank.isPresent()) { + for (ServiceFactory manadatory : factories.stream().filter(ServiceFactory::isMandatory).filter(f -> highestRank.getAsInt() == f.rank()).collect(toList())) { + if (!resolvedServices.contains(manadatory.getServiceType())) { + Service service = manadatory.create(null); + resolvedServices = lookupDependenciesOf(resolvedServices, service.getClass()).add(service); + } + } + } + } + } + + return new ServiceLocator(resolvedServices); + } catch (DependencyException e) { + throw new IllegalStateException(e); + } + } + + ServiceMap lookupDependenciesOf(ServiceMap resolved, Class requested) throws DependencyException { + for (Class dependency : identifyImmediateDependenciesOf(requested)) { + try { + resolved = lookupService(resolved, dependency); + } catch (DependencyException de) { + OptionalServiceDependencies optionalAnnotation = requested.getAnnotation(OptionalServiceDependencies.class); + if (optionalAnnotation != null && Arrays.asList(optionalAnnotation.value()).contains(dependency.getName())) { + LOGGER.debug("Skipping optional dependency of {} that cannot be looked up: {}", requested, dependency); + continue; + } + throw de; + } + } + return resolved; + } + + private ServiceMap lookupService(ServiceMap resolved, Class requested) throws DependencyException { + //Have we already resolved this dependency? + if (resolved.contains(requested) && !requested.isAnnotationPresent(PluralService.class)) { + return resolved; + } + //Attempt resolution from the provided services + resolved = new ServiceMap(resolved).addAll(provided.get(requested)); + if (resolved.contains(requested) && !requested.isAnnotationPresent(PluralService.class)) { + return resolved; + } + Collection> serviceFactories = discoverServices(resolved, requested); + if (serviceFactories.size() > 1 && !requested.isAnnotationPresent(PluralService.class)) { + throw new DependencyException("Multiple factories for non-plural service"); + } + for(ServiceFactory factory : serviceFactories) { + if (!resolved.contains(factory.getServiceType())) { + try { + resolved = lookupDependenciesOf(resolved, factory.getServiceType()); + } catch (DependencyException e) { + continue; + } + + T service = factory.create(null); + + //we copy the service map so that if upstream dependency resolution fails we don't pollute the real resolved set + resolved = new ServiceMap(resolved).add(service); + } + } + if (resolved.contains(requested)) { + return resolved; + } else { + throw new DependencyException("Failed to find provider with satisfied dependency set for " + requested + " [candidates " + serviceFactories + "]"); + } + } + + /** + * For the {@link Service} class specified, attempt to instantiate the service using the + * {@link ServiceFactory} infrastructure. + * + * @param serviceClass the {@code Service} type to create + * @param the type of the {@code Service} + * + * @return the collection of created services; may be empty + * + * @throws IllegalStateException if the configured service is already registered or the configured service + * implements a {@code Service} subtype that is not marked with the {@link PluralService} annotation + * but is already registered + */ + private Collection> discoverServices(ServiceMap resolved, Class serviceClass) { + @SuppressWarnings("unchecked") + Collection> typedServiceFactories = stream(serviceFactories.spliterator(), false) + .filter(f -> serviceClass.isAssignableFrom(f.getServiceType())).map(f -> (ServiceFactory) f) + .filter(f -> !f.getClass().isAnnotationPresent(ServiceFactory.RequiresConfiguration.class)) + .filter(f -> !provided.contains(f.getServiceType())) + .filter(f -> !resolved.contains(f.getServiceType())) + .collect(toList()); + + OptionalInt highestRank = typedServiceFactories.stream().mapToInt(ServiceFactory::rank).max(); + + if (highestRank.isPresent()) { + return typedServiceFactories.stream().filter(f -> highestRank.getAsInt() == f.rank()).collect(toList()); + } else { + return emptyList(); + } + } + } + + private static Collection> getAllInterfaces(final Class clazz) { + ArrayList> interfaces = new ArrayList<>(); + for (Class c = clazz; c != null; c = c.getSuperclass()) { + for (Class i : c.getInterfaces()) { + interfaces.add(i); + interfaces.addAll(getAllInterfaces(i)); + } + } + return interfaces; + } + + private static Set> identifyImmediateDependenciesOf(final Class clazz) { + if (clazz == null) { + return emptySet(); + } + + Set> dependencies = new HashSet<>(); + ServiceDependencies annotation = clazz.getAnnotation(ServiceDependencies.class); + if (annotation != null) { + for (final Class dependency : annotation.value()) { + if (Service.class.isAssignableFrom(dependency)) { + @SuppressWarnings("unchecked") + Class serviceDependency = (Class) dependency; + dependencies.add(serviceDependency); + } else { + throw new IllegalStateException("Service dependency declared by " + clazz.getName() + + " is not a Service: " + dependency.getName()); + } + } + } + OptionalServiceDependencies optionalAnnotation = clazz.getAnnotation(OptionalServiceDependencies.class); + if (optionalAnnotation != null) { + for (String className : optionalAnnotation.value()) { + try { + Class dependencyClass = delegationChain(getDefaultClassLoader(), clazz.getClassLoader()).loadClass(className); + if (Service.class.isAssignableFrom(dependencyClass)) { + @SuppressWarnings("unchecked") + Class serviceDependency = (Class) dependencyClass; + dependencies.add(serviceDependency); + } else { + throw new IllegalStateException("Service dependency declared by " + className + " is not a Service: " + dependencyClass.getName()); + } + } catch (ClassNotFoundException ignored) { + // dependency is optional so we ignore it + } + } + } + + for (Class interfaceClazz : clazz.getInterfaces()) { + if (Service.class.isAssignableFrom(interfaceClazz)) { + dependencies.addAll(identifyImmediateDependenciesOf(Service.class.getClass().cast(interfaceClazz))); + } + } + + dependencies.addAll(identifyImmediateDependenciesOf(clazz.getSuperclass())); + + return dependencies; + } + + private static Set> identifyTransitiveDependenciesOf(final Class clazz) { + + Set> dependencies = identifyImmediateDependenciesOf(clazz); + for (Class dependencyClass : dependencies) { + if (dependencyClass == clazz) { + throw new IllegalStateException("Circular dependency found. Service " + clazz.getName() + " cannot depend on itself."); + } + } + Set> transitive = new HashSet<>(dependencies.size() * 3); // 3 is my feeling of how many there should be per class at most + transitive.addAll(dependencies); + + for (Class klazz : dependencies) { + Set> identified = identifyTransitiveDependenciesOf(klazz); + for (Class dep : identified) { + if(dep == clazz) { + throw new IllegalStateException("Circular dependency found. A dependency of service " + clazz.getName() + " depends on it."); + } + } + transitive.addAll(identified); + } + + return transitive; + } + + private static class DependencyException extends Exception { + private static final long serialVersionUID = -5269926129639323941L; + + public DependencyException(String s) { + super(s); + } + } + + private static class ServiceMap { + + private final Map, Set> services; + + public ServiceMap(ServiceMap resolved) { + this.services = new HashMap<>(); + for (Map.Entry, Set> e : resolved.services.entrySet()) { + Set copy = newSetFromMap(new IdentityHashMap()); + copy.addAll(e.getValue()); + this.services.put(e.getKey(), copy); + } + } + + public ServiceMap() { + this.services = new HashMap<>(); + } + + public Set get(Class serviceType) { + @SuppressWarnings("unchecked") + Set s = (Set) services.get(serviceType); + if (s == null) { + return emptySet(); + } else { + return unmodifiableSet(s); + } + } + + public ServiceMap addAll(Iterable services) { + for (Service s : services) { + add(s); + } + return this; + } + + public ServiceMap add(Service service) { + Set> serviceClazzes = new HashSet<>(); + + serviceClazzes.add(service.getClass()); + for (Class i : getAllInterfaces(service.getClass())) { + if (Service.class != i && Service.class.isAssignableFrom(i)) { + + @SuppressWarnings("unchecked") + Class serviceClass = (Class) i; + + serviceClazzes.add(serviceClass); + } + } + + /* + * Register the concrete service under all Service subtypes it implements. If + * the Service subtype is annotated with @PluralService, permit multiple registrations; + * otherwise, fail the registration, + */ + for (Class serviceClazz : serviceClazzes) { + if (serviceClazz.isAnnotationPresent(PluralService.class)) { + // Permit multiple registrations + Set registeredServices = services.get(serviceClazz); + if (registeredServices == null) { + registeredServices = new LinkedHashSet<>(); + services.put(serviceClazz, registeredServices); + } + registeredServices.add(service); + } else { + // Only a single registration permitted + Set registeredServices = services.get(serviceClazz); + if (registeredServices == null || registeredServices.isEmpty()) { + services.put(serviceClazz, singleton(service)); + } else if (!registeredServices.contains(service)) { + final StringBuilder message = new StringBuilder("Duplicate service implementation(s) found for ") + .append(service.getClass()); + for (Class serviceClass : serviceClazzes) { + if (!serviceClass.isAnnotationPresent(PluralService.class)) { + Set s = this.services.get(serviceClass); + final Service declaredService = s == null ? null : s.iterator().next(); + if (declaredService != null) { + message + .append("\n\t\t- ") + .append(serviceClass) + .append(" already has ") + .append(declaredService.getClass()); + } + } + } + throw new IllegalStateException(message.toString()); + } + } + } + return this; + } + + public Set all() { + Set all = newSetFromMap(new IdentityHashMap()); + for (Set s : services.values()) { + all.addAll(s); + } + return unmodifiableSet(all); + } + + public boolean contains(Class request) { + return services.containsKey(request); + } + } +} diff --git a/core/src/main/java/org/ehcache/core/spi/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/spi/package-info.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/package-info.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/package-info.java diff --git a/core/src/main/java/org/ehcache/core/spi/service/CacheManagerProviderService.java b/ehcache-core/src/main/java/org/ehcache/core/spi/service/CacheManagerProviderService.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/service/CacheManagerProviderService.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/service/CacheManagerProviderService.java diff --git a/core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java b/ehcache-core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java similarity index 99% rename from core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java index b2afe0c857..cde1fd48d4 100644 --- a/core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java @@ -31,4 +31,4 @@ public interface DiskResourceService extends PersistableResourceService { * @return a {@link FileBasedPersistenceContext} */ FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException; -} \ No newline at end of file +} diff --git a/core/src/main/java/org/ehcache/core/spi/service/ExecutionService.java b/ehcache-core/src/main/java/org/ehcache/core/spi/service/ExecutionService.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/service/ExecutionService.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/service/ExecutionService.java diff --git a/core/src/main/java/org/ehcache/core/spi/service/FileBasedPersistenceContext.java b/ehcache-core/src/main/java/org/ehcache/core/spi/service/FileBasedPersistenceContext.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/service/FileBasedPersistenceContext.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/service/FileBasedPersistenceContext.java diff --git a/core/src/main/java/org/ehcache/core/spi/service/LocalPersistenceService.java b/ehcache-core/src/main/java/org/ehcache/core/spi/service/LocalPersistenceService.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/service/LocalPersistenceService.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/service/LocalPersistenceService.java diff --git a/ehcache-core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java b/ehcache-core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java new file mode 100644 index 0000000000..54c1688e3e --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java @@ -0,0 +1,76 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.spi.service; + +import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.Target; + +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +/** + * A factory abstraction that can create {@link Service} instances. + */ +public interface ServiceFactory { + + /** + * Returns {@code true} if this factory's services are mandatory in all environments. + * + * @return {@code true} if this factory's services are mandatory + */ + default boolean isMandatory() { + return false; + } + + /** + * Returns an optional ranking integer is used to choose a service factory when multiple factories are available for + * the same service type. Higher ranking value service factories are preferred. + * + * @return a factory ranking value + */ + default int rank() { + return 1; + } + + /** + * Creates an instance of the service using the passed in {@link ServiceCreationConfiguration}. + *

+ * Note that a {@code null} configuration may be supported or even required by a service implementation. + * + * @param configuration the creation configuration, can be {@code null} for some services + * @return the new service, not {@link Service#start(ServiceProvider) started} + */ + T create(ServiceCreationConfiguration configuration); + + /** + * Queries a {@code ServiceFactory} to know which concrete {@link Service} type it produces. + * + * @return the concrete class of the produced service. + */ + Class getServiceType(); + + + @Retention(RUNTIME) + @Target(ElementType.TYPE) + @interface RequiresConfiguration { + + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/spi/service/ServiceUtils.java b/ehcache-core/src/main/java/org/ehcache/core/spi/service/ServiceUtils.java new file mode 100644 index 0000000000..faa6df0a1a --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/service/ServiceUtils.java @@ -0,0 +1,121 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.spi.service; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * Helper class to find a service or service configuration matching the wanted type. Note that the class + * is named {@code ServiceUtils} but it would actually work with anything, not only service implementations. + */ +public final class ServiceUtils { + + private ServiceUtils() { + // No instance possible + } + + private static Stream findStreamAmongst(Class clazz, Collection instances) { + return instances.stream() + .filter(clazz::isInstance) + .map(clazz::cast); + } + + /** + * Find instances of {@code clazz} among the {@code instances}. + * + * @param clazz searched class + * @param instances instances looked at + * @param type of the searched instances + * @return the list of compatible instances + */ + public static Collection findAmongst(Class clazz, Collection instances) { + return findStreamAmongst(clazz, instances) + .collect(Collectors.toList()); + } + + /** + * Find instances of {@code clazz} among the {@code instances}. + * + * @param clazz searched class + * @param instances instances looked at + * @param type of the searched instances + * @return the list of compatible instances + */ + public static Collection findAmongst(Class clazz, Object ... instances) { + return findAmongst(clazz, Arrays.asList(instances)); + } + + /** + * Find the only expected instance of {@code clazz} among the {@code instances}. + * + * @param clazz searched class + * @param instances instances looked at + * @param type of the searched instance + * @return the compatible instance or null if none are found + * @throws IllegalArgumentException if more than one matching instance + */ + public static T findSingletonAmongst(Class clazz, Collection instances) { + return findOptionalAmongst(clazz, instances) + .orElse(null); + } + + /** + * Find the only expected instance of {@code clazz} among the {@code instances}. + * + * @param clazz searched class + * @param instances instances looked at + * @param type of the searched instance + * @return the optionally found compatible instance + * @throws IllegalArgumentException if more than one matching instance + */ + public static Optional findOptionalAmongst(Class clazz, Collection instances) { + return findStreamAmongst(clazz, instances) + .reduce((i1, i2) -> { + throw new IllegalArgumentException("More than one " + clazz.getName() + " found"); + }); + } + + /** + * Find the only expected instance of {@code clazz} among the {@code instances}. + * + * @param clazz searched class + * @param instances instances looked at + * @param type of the searched instance + * @return the compatible instance or null if none are found + * @throws IllegalArgumentException if more than one matching instance + */ + public static T findSingletonAmongst(Class clazz, Object ... instances) { + return findSingletonAmongst(clazz, Arrays.asList(instances)); + } + + /** + * Find the only expected instance of {@code clazz} among the {@code instances}. + * + * @param clazz searched class + * @param instances instances looked at + * @param type of the searched instance + * @return the optionally found compatible instance + * @throws IllegalArgumentException if more than one matching instance + */ + public static Optional findOptionalAmongst(Class clazz, Object ... instances) { + return findOptionalAmongst(clazz, Arrays.asList(instances)); + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/spi/service/StatisticsService.java b/ehcache-core/src/main/java/org/ehcache/core/spi/service/StatisticsService.java new file mode 100644 index 0000000000..720edee96f --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/service/StatisticsService.java @@ -0,0 +1,97 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.spi.service; + +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.statistics.CacheStatistics; +import org.ehcache.core.statistics.OperationObserver; +import org.ehcache.core.statistics.OperationStatistic; +import org.ehcache.core.statistics.StatisticType; +import org.ehcache.spi.service.Service; + +import java.io.Serializable; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; + +/** + * Service providing raw statistics for cache and tier usage. + */ +public interface StatisticsService extends Service { + + /** + * Return the object containing the statistics for a given cache name. + * + * @param cacheName name (alias) of the cache + * @return all the cache statistics + */ + CacheStatistics getCacheStatistics(String cacheName); + + /** + * Registers the object to parent + * @param toAssociate object to associate + * @param parent to which object is associated + */ + void registerWithParent(Object toAssociate, Object parent); + + /** + * Registers store of the cache for statistics + * @param store {@link Store} of the cache to be registered + * @param targetName statistics name after translation + * @param tierHeight of the store + * @param tag with which the statistics is associated + * @param translation relationship among maintained statistics + * @param statisticName name of the statistic + * @return statistics for the store + */ + , T extends Enum> OperationStatistic registerStoreStatistics(Store store, String targetName, int tierHeight, String tag, Map> translation, String statisticName); + + /** + * De-registers object from the parent + * @param toDeassociate object to dissociate + * @param parent to which object is associated + */ + void deRegisterFromParent(Object toDeassociate, Object parent); + + /** + * Clears all associations + * @param node for which all associations are cleared + */ + void cleanForNode(Object node); + + /** + * Register statistics with value supplier + * @param context association object + * @param name of the statistics + * @param type StatisticType to be registered + * @param tags with which the statistics is associated + * @param valueSupplier supplies the value to maintain statistics + * @param the generic type + */ + void registerStatistic(Object context, String name, StatisticType type, Set tags, Supplier valueSupplier); + + /** + * Create operation statistic for provided type + * @param name of the operation observer + * @param outcome Class of the type of statistic + * @param tag with which the statistics is associated + * @param context association object + * @return the observer for the provided statistics + */ + > OperationObserver createOperationStatistics(String name, Class outcome, String tag, Object context); + +} diff --git a/core/src/main/java/org/ehcache/core/spi/service/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/spi/service/package-info.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/service/package-info.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/service/package-info.java diff --git a/ehcache-core/src/main/java/org/ehcache/core/spi/store/AbstractValueHolder.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/AbstractValueHolder.java new file mode 100644 index 0000000000..b7a3b270fe --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/store/AbstractValueHolder.java @@ -0,0 +1,167 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.spi.store; + +import org.ehcache.core.config.ExpiryUtils; + +import java.time.Duration; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLongFieldUpdater; + +import static java.lang.String.format; +import static org.ehcache.core.config.ExpiryUtils.isExpiryDurationInfinite; + +/** + * @author Ludovic Orban + */ +public abstract class AbstractValueHolder implements Store.ValueHolder { + + private final long id; + private final long creationTime; + @SuppressWarnings("CanBeFinal") + private volatile long lastAccessTime; + @SuppressWarnings("CanBeFinal") + private volatile long expirationTime; + + @SuppressWarnings("rawtypes") + private static final AtomicLongFieldUpdater ACCESSTIME_UPDATER = AtomicLongFieldUpdater.newUpdater(AbstractValueHolder.class, "lastAccessTime"); + @SuppressWarnings("rawtypes") + private static final AtomicLongFieldUpdater EXPIRATIONTIME_UPDATER = AtomicLongFieldUpdater.newUpdater(AbstractValueHolder.class, "expirationTime"); + + protected AbstractValueHolder(long id, long creationTime) { + this(id, creationTime, NO_EXPIRE); + } + + protected AbstractValueHolder(long id, long creationTime, long expirationTime) { + this.id = id; + this.creationTime = creationTime; + this.expirationTime = expirationTime; + this.lastAccessTime = creationTime; + } + + @Override + public long creationTime() { + return creationTime; + } + + /** + * Set the new expiration time in milliseconds. Can be {@link #NO_EXPIRE} if the entry + * shouldn't expire. + * + * @param expirationTime new expiration time + */ + public void setExpirationTime(long expirationTime) { + if (expirationTime == NO_EXPIRE) { + updateExpirationTime(NO_EXPIRE); + } else if (expirationTime < 0) { + throw new IllegalArgumentException("invalid expiration time: " + expirationTime); + } else { + updateExpirationTime(expirationTime); + } + } + + private void updateExpirationTime(long update) { + while (true) { + long current = this.expirationTime; + if (current >= update) { + break; + } + if (EXPIRATIONTIME_UPDATER.compareAndSet(this, current, update)) { + break; + } + } + } + + public void accessed(long now, Duration expiration) { + if (expiration != null) { + if (isExpiryDurationInfinite(expiration)) { + setExpirationTime(Store.ValueHolder.NO_EXPIRE); + } else { + long newExpirationTime = ExpiryUtils.getExpirationMillis(now, expiration); + setExpirationTime(newExpirationTime); + } + } + setLastAccessTime(now); + } + + @Override + public long expirationTime() { + return this.expirationTime; + } + + @Override + public boolean isExpired(long expirationTime) { + long expire = this.expirationTime; + if (expire == NO_EXPIRE) { + return false; + } + return expire <= expirationTime; + } + + @Override + public long lastAccessTime() { + return lastAccessTime; + } + + /** + * Set the last time this entry was accessed in milliseconds. + * + * @param lastAccessTime last time the entry was accessed + */ + public void setLastAccessTime(long lastAccessTime) { + while (true) { + long current = this.lastAccessTime; + if (current >= lastAccessTime) { + break; + } + if (ACCESSTIME_UPDATER.compareAndSet(this, current, lastAccessTime)) { + break; + } + } + } + + @Override + public int hashCode() { + int result = 1; + result = 31 * result + (int)(creationTime ^ (creationTime >>> 32)); + result = 31 * result + (int)(lastAccessTime ^ (lastAccessTime >>> 32)); + result = 31 * result + (int)(expirationTime ^ (expirationTime >>> 32)); + return result; + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof AbstractValueHolder) { + AbstractValueHolder other = (AbstractValueHolder) obj; + return + other.creationTime == creationTime && + other.expirationTime == expirationTime && + other.lastAccessTime == lastAccessTime; + } + return false; + } + + @Override + public long getId() { + return id; + } + + @Override + public String toString() { + return format("%s", get()); + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/spi/store/AbstractWrapperStoreProvider.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/AbstractWrapperStoreProvider.java new file mode 100644 index 0000000000..63e21e0b75 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/store/AbstractWrapperStoreProvider.java @@ -0,0 +1,102 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.spi.store; + +import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.spi.service.OptionalServiceDependencies; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceProvider; + +import java.util.Arrays; +import java.util.Map; + +import static org.ehcache.core.store.StoreSupport.selectStoreProvider; + +@OptionalServiceDependencies("org.ehcache.core.spi.service.StatisticsService") +public abstract class AbstractWrapperStoreProvider implements WrapperStore.Provider { + + private volatile ServiceProvider serviceProvider; + + private final Map, StoreReference> createdStores = new ConcurrentWeakIdentityHashMap<>(); + + + @Override + public Store createStore(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + + Store.Provider underlyingStoreProvider = selectStoreProvider(serviceProvider, storeConfig.getResourcePools().getResourceTypeSet(), + Arrays.asList(serviceConfigs)); + Store store = underlyingStoreProvider.createStore(storeConfig, serviceConfigs); + + Store wrappedStore = wrap(store, storeConfig, serviceConfigs); + StatisticsService statisticsService = serviceProvider.getService(StatisticsService.class); + if (statisticsService != null) { + statisticsService.registerWithParent(store, wrappedStore); + } + createdStores.put(wrappedStore, new StoreReference<>(store, underlyingStoreProvider)); + return wrappedStore; + } + + protected abstract Store wrap(Store store, Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs); + + @Override + public void releaseStore(Store resource) { + StoreReference storeRef = createdStores.remove(resource); + if (storeRef != null) { + storeRef.release(); + } + } + + @Override + public void initStore(Store resource) { + StoreReference storeRef = createdStores.get(resource); + if (storeRef != null) { + storeRef.init(); + } + } + + @Override + public void start(ServiceProvider serviceProvider) { + this.serviceProvider = serviceProvider; + } + + @Override + public void stop() { + this.createdStores.clear(); + this.serviceProvider = null; + } + + + private static class StoreReference { + + private final Store store; + private final Store.Provider provider; + + public StoreReference(Store store, Store.Provider provider) { + this.store = store; + this.provider = provider; + } + + public void release() { + provider.releaseStore(store); + } + + public void init() { + provider.initStore(store); + } + } +} diff --git a/core/src/main/java/org/ehcache/core/spi/store/ConfigurationChangeSupport.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/ConfigurationChangeSupport.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/store/ConfigurationChangeSupport.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/ConfigurationChangeSupport.java diff --git a/core/src/main/java/org/ehcache/core/spi/store/InternalCacheManager.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/InternalCacheManager.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/store/InternalCacheManager.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/InternalCacheManager.java diff --git a/core/src/main/java/org/ehcache/core/spi/store/Store.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/Store.java similarity index 87% rename from core/src/main/java/org/ehcache/core/spi/store/Store.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/Store.java index a9dcc41d16..d3c0fd82ff 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/Store.java +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/store/Store.java @@ -17,12 +17,13 @@ package org.ehcache.core.spi.store; import org.ehcache.Cache; -import org.ehcache.ValueSupplier; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; -import org.ehcache.expiry.Expiry; import org.ehcache.core.spi.store.events.StoreEventSource; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.service.PluralService; import org.ehcache.spi.service.Service; @@ -31,11 +32,13 @@ import java.util.Collection; import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; +import javax.annotation.Nonnull; + /** * The {@code Store} interface represents the backing storage of a {@link Cache}. It abstracts the support for multiple * tiers, eventing, eviction and expiry. @@ -110,6 +113,22 @@ public interface Store extends ConfigurationChangeSupport { */ PutStatus put(K key, V value) throws StoreAccessException; + /** + * Maps the specified key to the specified value in this store. + * Neither the key nor the value can be {@code null}. + * + * @param key key with which the specified value is to be associated + * @param value value to be associated with the specified key + * @return the previously associated value + * + * @throws NullPointerException if any of the arguments is {@code null} + * @throws ClassCastException if the specified key or value are not of the correct types ({@code K} or {@code V}) + * @throws StoreAccessException if the mapping can't be installed + */ + default ValueHolder getAndPut(K key, V value) throws StoreAccessException { + return getAndCompute(key, (k, v) -> value); + } + /** * Maps the specified key to the specified value in this store, unless a non-expired mapping * already exists. @@ -131,6 +150,7 @@ public interface Store extends ConfigurationChangeSupport { * * @param key key with which the specified value is to be associated * @param value value to be associated with the specified key + * @param put lambda to be consumed if value has been put * @return the {@link Store.ValueHolder ValueHolder} to * which the specified key was previously mapped, or {@code null} if no such mapping existed or the mapping was expired * @@ -140,7 +160,7 @@ public interface Store extends ConfigurationChangeSupport { * * @see #replace(Object, Object) */ - ValueHolder putIfAbsent(K key, V value) throws StoreAccessException; + ValueHolder putIfAbsent(K key, V value, Consumer put) throws StoreAccessException; /** * Removes the key (and its corresponding value) from this store. @@ -157,6 +177,24 @@ public interface Store extends ConfigurationChangeSupport { */ boolean remove(K key) throws StoreAccessException; + + /** + * Removes the key (and its corresponding value) from this store. + * This method does nothing if the key is not mapped. + *

+ * The key cannot be {@code null}. + * + * @param key the key that needs to be removed + * @return the previously associated value + * + * @throws NullPointerException if the specified key is null + * @throws NullPointerException if the argument is {@code null} + * @throws StoreAccessException if the mapping can't be removed + */ + default ValueHolder getAndRemove(K key) throws StoreAccessException { + return getAndCompute(key, (k, v) -> null); + } + /** * Removes the entry for a key only if currently mapped to the given value * and the entry is not expired. @@ -274,32 +312,28 @@ public interface Store extends ConfigurationChangeSupport { *

* This is equivalent to *

-   *   V newValue = mappingFunction.apply(key, store.get(key));
+   *   V oldValue = store.get(key;
+   *   V newValue = mappingFunction.apply(key, oldValue);
    *   if (newValue != null) {
    *     store.put(key, newValue);
    *   } else {
    *     store.remove(key);
    *   }
-   *   return newValue;
+   *   return oldValue;
    * 
* except that the action is performed atomically. - *

- * This is equivalent to calling {@link Store#compute(Object, BiFunction, Supplier)} - * with a "replaceEquals" function that returns {@link Boolean#TRUE true}. - *

* Neither the key nor the function can be {@code null} * * @param key the key to update the mapping for * @param mappingFunction the function that will produce the new value. - * @return the new value associated with the key or {@code null} if none + * @return the existing value associated with the key or {@code null} if none * * @throws ClassCastException if the specified key is not of the correct type {@code K} * @throws NullPointerException if any of the arguments is {@code null} * @throws StoreAccessException if the mapping can't be changed * - * @see #compute(Object, BiFunction, Supplier) */ - ValueHolder compute(K key, BiFunction mappingFunction) throws StoreAccessException; + ValueHolder getAndCompute(K key, BiFunction mappingFunction) throws StoreAccessException; /** * Compute the value for the given key by invoking the given function to produce the value. @@ -334,15 +368,15 @@ public interface Store extends ConfigurationChangeSupport { * @param key the key to operate on * @param mappingFunction the function that will produce the new value. * @param replaceEqual indicates if an equal value replaces the existing one + * @param invokeWriter indicates if the writer should be invoked * @return the new value associated with the key or {@code null} if none * * @throws ClassCastException if the specified key is not of the correct type {@code K} * @throws NullPointerException if any of the arguments is {@code null} * @throws StoreAccessException if the mapping can't be changed * - * @see #compute(Object, BiFunction) */ - ValueHolder compute(K key, BiFunction mappingFunction, Supplier replaceEqual) throws StoreAccessException; + ValueHolder computeAndGet(K key, BiFunction mappingFunction, Supplier replaceEqual, Supplier invokeWriter) throws StoreAccessException; /** * Compute the value for the given key (only if absent or expired) by invoking the given function to produce the value. @@ -452,7 +486,7 @@ public interface Store extends ConfigurationChangeSupport { * @return a {@link Map} of key/value pairs for each key in keys to the previously missing value. * @throws ClassCastException if the specified key(s) are not of the correct type ({@code K}). Also thrown if the given function produces * entries with either incorrect key or value types - * @throws StoreAccessException + * @throws StoreAccessException when a failure occurs when accessing the store */ Map> bulkComputeIfAbsent(Set keys, Function, Iterable>> mappingFunction) throws StoreAccessException; @@ -461,7 +495,7 @@ public interface Store extends ConfigurationChangeSupport { * * @param the value type */ - interface ValueHolder extends ValueSupplier { + interface ValueHolder extends Supplier { /** * Constant value indicating no expiration - an eternal mapping. @@ -471,49 +505,31 @@ interface ValueHolder extends ValueSupplier { /** * Accessor to the creation time of this ValueHolder * - * @param unit the timeUnit to return the creation time in - * @return the creation time in the given unit + * @return the creation time in milliseconds */ - long creationTime(TimeUnit unit); + long creationTime(); /** * Accessor to the expiration time of this ValueHolder * - * @param unit the timeUnit to return the creation time in - * @return the expiration time in the given unit. A value of {@link #NO_EXPIRE} means that the ValueHolder will never expire. + * @return the expiration time in milliseconds. A value of {@link #NO_EXPIRE} means that the ValueHolder will never expire. */ - long expirationTime(TimeUnit unit); + long expirationTime(); /** * Check if the ValueHolder is expired relative to the specified time * - * @param expirationTime the expiration time relative to which the expiry check must be made - * @param unit the unit of the expiration time + * @param expirationTime the expiration time (in ms) relative to which the expiry check must be made * @return true if the ValueHolder expired relative to the given expiration time */ - boolean isExpired(long expirationTime, TimeUnit unit); + boolean isExpired(long expirationTime); /** * Accessor to the last access time of the Value held in this ValueHolder * - * @param unit the timeUnit to return the last access time in - * @return the last access time in the given unit - */ - long lastAccessTime(TimeUnit unit); - - /** - * Accessor to the hit rate of the value held in this {@code ValueHolder}. - * - * @param now the time in {@link TimeUnit#MILLISECONDS} upto which the rate needs to be calculated - * @param unit the {@link TimeUnit} in which the rate is to returned - * @return the hit rate in the given unit + * @return the last access time in milliseconds */ - float hitRate(long now, TimeUnit unit); - - /** - * @return hit counter of the Value held in this ValueHolder - */ - long hits(); + long lastAccessTime(); /** * The combination of this identifier and the key that ValueHolder is mapped to should to be @@ -524,11 +540,19 @@ interface ValueHolder extends ValueSupplier { */ long getId(); + /** + * Returns the value held by this value holder. This value can't be {@code null}. + * + * @return the value held + */ + @Nonnull + @Override + V get(); } /** * The Service used to create Stores. - * Implementation of {@link Provider} have be thread-safe. + * Implementation of {@link Provider} have to be thread-safe. */ @PluralService interface Provider extends Service { @@ -540,7 +564,7 @@ interface Provider extends Service { * @param serviceConfigs the configurations the Provider may need to configure the Store * @return the Store honoring the configurations passed in */ - Store createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs); + Store createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs); /** * Informs this Provider, a Store it created is being disposed (i.e. closed) @@ -566,7 +590,7 @@ interface Provider extends Service { * to handle the resource types specified by {@code resourceTypes}; a rank of 0 indicates the store * can not handle all types specified in {@code resourceTypes} */ - int rank(Set> resourceTypes, Collection> serviceConfigs); + int rank(Set> resourceTypes, Collection> serviceConfigs); } /** @@ -608,7 +632,7 @@ interface Configuration { /** * The expiration policy instance for this store */ - Expiry getExpiry(); + ExpiryPolicy getExpiry(); /** * The resource pools this store can make use of @@ -629,6 +653,28 @@ interface Configuration { * The concurrency level of the dispatcher that processes events */ int getDispatcherConcurrency(); + + /** + * If operation statistics (e.g. get/put count) should be enabled. It is + * a default method to keep the original behavior which was enabled all the time. + */ + default boolean isOperationStatisticsEnabled() { + return true; + } + + /** + * + * Cache Loader-Writer for the store + * + */ + CacheLoaderWriter getCacheLoaderWriter(); + + /** + * Whether Store should use loader-writer in atomic ops or not + */ + default boolean useLoaderInAtomics() { + return false; + } } /** diff --git a/ehcache-core/src/main/java/org/ehcache/core/spi/store/WrapperStore.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/WrapperStore.java new file mode 100644 index 0000000000..4002b660b4 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/store/WrapperStore.java @@ -0,0 +1,49 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.spi.store; + +import org.ehcache.spi.service.PluralService; +import org.ehcache.spi.service.ServiceConfiguration; + +import java.util.Collection; + +/** + * Marker interface for {@link Store}s which act like wrapper and does not have any storage, rather + * delegate the storage to other stores + * @param the key type + * @param the value type + */ +public interface WrapperStore extends Store { + + /** + * Service to create {@link WrapperStore}s + */ + @PluralService + interface Provider extends Store.Provider { + + /** + * Gets the internal ranking for the {@code WrapperStore} instances provided by this {@code Provider} of the wrapper + * store's + * + * @param serviceConfigs the collection of {@code ServiceConfiguration} instances that may contribute + * to the ranking + * @return a non-negative rank indicating the ability of a {@code WrapperStore} created by this {@code Provider} + */ + int wrapperStoreRank(Collection> serviceConfigs); + + } +} diff --git a/core/src/main/java/org/ehcache/core/spi/store/events/StoreEvent.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/events/StoreEvent.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/store/events/StoreEvent.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/events/StoreEvent.java diff --git a/core/src/main/java/org/ehcache/core/spi/store/events/StoreEventFilter.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/events/StoreEventFilter.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/store/events/StoreEventFilter.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/events/StoreEventFilter.java diff --git a/core/src/main/java/org/ehcache/core/spi/store/events/StoreEventListener.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/events/StoreEventListener.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/store/events/StoreEventListener.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/events/StoreEventListener.java diff --git a/core/src/main/java/org/ehcache/core/spi/store/events/StoreEventSource.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/events/StoreEventSource.java similarity index 81% rename from core/src/main/java/org/ehcache/core/spi/store/events/StoreEventSource.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/events/StoreEventSource.java index 07df024d72..d49187d70b 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/events/StoreEventSource.java +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/store/events/StoreEventSource.java @@ -41,7 +41,16 @@ public interface StoreEventSource { * * @param ordering {@code true} if ordering is desired, {@code false} for no ordering */ - void setEventOrdering(boolean ordering); + void setEventOrdering(boolean ordering) throws IllegalArgumentException; + + /** + * Toggles event synchronicity. + *

+ * If {@code true} it means events will be fire synchronously. + * + * @param synchronous {@code true} if synchronicity is desired, {@code false} for asynchronous. + */ + void setSynchronous(boolean synchronous) throws IllegalArgumentException; /** * Indicates if the related {@link org.ehcache.core.spi.store.Store} is delivering events ordered or not. diff --git a/core/src/main/java/org/ehcache/core/spi/store/events/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/events/package-info.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/store/events/package-info.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/events/package-info.java diff --git a/core/src/main/java/org/ehcache/core/spi/store/heap/LimitExceededException.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/heap/LimitExceededException.java similarity index 93% rename from core/src/main/java/org/ehcache/core/spi/store/heap/LimitExceededException.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/heap/LimitExceededException.java index 1cdd184e50..04a650c2ca 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/heap/LimitExceededException.java +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/store/heap/LimitExceededException.java @@ -22,6 +22,8 @@ */ public class LimitExceededException extends Exception { + private static final long serialVersionUID = -4689090295854830331L; + /** * Creates an exception with the provided message * diff --git a/core/src/main/java/org/ehcache/core/spi/store/heap/SizeOfEngine.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/heap/SizeOfEngine.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/store/heap/SizeOfEngine.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/heap/SizeOfEngine.java diff --git a/core/src/main/java/org/ehcache/core/spi/store/heap/SizeOfEngineProvider.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/heap/SizeOfEngineProvider.java similarity index 96% rename from core/src/main/java/org/ehcache/core/spi/store/heap/SizeOfEngineProvider.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/heap/SizeOfEngineProvider.java index ffa7f6c8eb..28feed15df 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/heap/SizeOfEngineProvider.java +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/store/heap/SizeOfEngineProvider.java @@ -36,5 +36,5 @@ public interface SizeOfEngineProvider extends Service { * @return {@link SizeOfEngine} instance */ - SizeOfEngine createSizeOfEngine(ResourceUnit resourceUnit, ServiceConfiguration... serviceConfigs); + SizeOfEngine createSizeOfEngine(ResourceUnit resourceUnit, ServiceConfiguration... serviceConfigs); } diff --git a/core/src/main/java/org/ehcache/core/spi/store/heap/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/heap/package-info.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/store/heap/package-info.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/heap/package-info.java diff --git a/core/src/main/java/org/ehcache/core/spi/store/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/package-info.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/store/package-info.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/package-info.java diff --git a/core/src/main/java/org/ehcache/core/spi/store/tiering/AuthoritativeTier.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/AuthoritativeTier.java similarity index 95% rename from core/src/main/java/org/ehcache/core/spi/store/tiering/AuthoritativeTier.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/AuthoritativeTier.java index f709fcd88d..4cb97c40da 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/tiering/AuthoritativeTier.java +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/AuthoritativeTier.java @@ -17,7 +17,7 @@ package org.ehcache.core.spi.store.tiering; import org.ehcache.config.ResourceType; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.service.PluralService; import org.ehcache.spi.service.Service; @@ -107,14 +107,14 @@ interface Provider extends Service { /** * Creates a new {@link AuthoritativeTier} instance using the provided configuration. * - * @param storeConfig the {@code Store} configuration - * @param serviceConfigs a collection of service configurations * @param the key type for this tier * @param the value type for this tier * + * @param storeConfig the {@code Store} configuration + * @param serviceConfigs a collection of service configurations * @return the new authoritative tier */ - AuthoritativeTier createAuthoritativeTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs); + AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs); /** * Releases an {@link AuthoritativeTier}. @@ -146,7 +146,7 @@ interface Provider extends Service { * to handle the resource type specified by {@code authorityResource}; a rank of 0 indicates the authority * can not handle the type specified in {@code authorityResource} */ - int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs); + int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs); } } diff --git a/core/src/main/java/org/ehcache/core/spi/store/tiering/CachingTier.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/CachingTier.java similarity index 88% rename from core/src/main/java/org/ehcache/core/spi/store/tiering/CachingTier.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/CachingTier.java index f68530d5ff..cdaf58a46d 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/tiering/CachingTier.java +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/CachingTier.java @@ -17,7 +17,7 @@ package org.ehcache.core.spi.store.tiering; import org.ehcache.config.ResourceType; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.ConfigurationChangeSupport; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.service.PluralService; @@ -53,6 +53,20 @@ public interface CachingTier extends ConfigurationChangeSupport { */ Store.ValueHolder getOrComputeIfAbsent(K key, Function> source) throws StoreAccessException; + /** + * Either return the value holder currently in the caching tier, or return the provided default. + *

+ * Note that in case of expired value holders, {@code null} will be returned and the mapping will be invalidated. + * + * @param key the key + * @param source the function that computes the default value when absent from this tier + * + * @return the value holder, or {@code null} + * + * @throws StoreAccessException if the mapping cannot be retrieved or stored + */ + Store.ValueHolder getOrDefault(K key, Function> source) throws StoreAccessException; + /** * Removes a mapping, triggering the {@link InvalidationListener} if registered. * @@ -131,7 +145,7 @@ interface Provider extends Service { * * @return the new caching tier */ - CachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs); + CachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs); /** * Releases a {@link CachingTier}. @@ -163,7 +177,7 @@ interface Provider extends Service { * to handle the resource types specified by {@code resourceTypes}; a rank of 0 indicates the caching tier * can not handle the type specified in {@code resourceTypes} */ - int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs); + int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs); } } diff --git a/core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java similarity index 96% rename from core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java index 685aa1fc19..bda5705091 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java @@ -16,7 +16,7 @@ package org.ehcache.core.spi.store.tiering; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.service.PluralService; import org.ehcache.spi.service.Service; @@ -77,7 +77,7 @@ interface Provider extends Service { * * @return the new higher caching tier */ - HigherCachingTier createHigherCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs); + HigherCachingTier createHigherCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs); /** * Releases a {@link HigherCachingTier}. diff --git a/core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java similarity index 91% rename from core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java index 62d9384f6f..d9e2860e01 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java @@ -18,7 +18,7 @@ import org.ehcache.core.spi.store.ConfigurationChangeSupport; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.spi.service.PluralService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; @@ -48,6 +48,16 @@ public interface LowerCachingTier extends ConfigurationChangeSupport { */ Store.ValueHolder installMapping(K key, Function> source) throws StoreAccessException; + /** + * Return the value holder currently in this tier. + * + * @param key the key + * @return the value holder, or {@code null} + * + * @throws StoreAccessException if the mapping cannot be access + */ + Store.ValueHolder get(K key) throws StoreAccessException; + /** * Return the value holder currently in this tier and removes it atomically. * @@ -114,7 +124,7 @@ interface Provider extends Service { * * @return the new lower caching tier */ - LowerCachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs); + LowerCachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs); /** * Releases a {@link LowerCachingTier}. diff --git a/core/src/main/java/org/ehcache/core/spi/store/tiering/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/package-info.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/store/tiering/package-info.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/store/tiering/package-info.java diff --git a/core/src/main/java/org/ehcache/core/spi/time/SystemTimeSource.java b/ehcache-core/src/main/java/org/ehcache/core/spi/time/SystemTimeSource.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/time/SystemTimeSource.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/time/SystemTimeSource.java diff --git a/ehcache-core/src/main/java/org/ehcache/core/spi/time/TickingTimeSource.java b/ehcache-core/src/main/java/org/ehcache/core/spi/time/TickingTimeSource.java new file mode 100644 index 0000000000..de68130cb0 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/time/TickingTimeSource.java @@ -0,0 +1,86 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.spi.time; + +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceProvider; + +import java.util.Timer; +import java.util.TimerTask; + +/** + * A {@link TimeSource} that increases the time in background using a timer. This time usually gives better performances + * to Ehcache than the default {@link SystemTimeSource}. However, it will create a background thread that will continuously + * wake up to update the time. + *

+ * It works be increasing the time at a given granularity. So if you set the granularity at 10ms, a timer is called every + * 10ms and will increase the current time of 10ms. This will cause the current time to diverge a bit from the system time + * after a while. So you specify a system update period where the current time will be reset to the system time. + */ +public class TickingTimeSource implements TimeSource, Service { + + private final long granularity; + private final long systemUpdatePeriod; + + private volatile long currentTime; + private volatile long lastUpdate; + + private final Timer timer = new Timer("Ehcache-TickingTimeSource-timer", true); + + /** + * Constructor to create a ticking time source. + * + * @param granularity how long in milliseconds between each timer call to increment the current time + * @param systemUpdatePeriod how long between resets of the current time to system time + */ + public TickingTimeSource(long granularity, long systemUpdatePeriod) { + this.granularity = granularity; + this.systemUpdatePeriod = systemUpdatePeriod; + } + + private void updateToSystemTime() { + long time = System.currentTimeMillis(); + currentTime = time; + lastUpdate = time; + } + + @Override + public long getTimeMillis() { + return currentTime; + } + + @Override + public void start(ServiceProvider serviceProvider) { + updateToSystemTime(); + timer.scheduleAtFixedRate(new TimerTask() { + @Override + public void run() { + if (currentTime - lastUpdate >= systemUpdatePeriod) { + updateToSystemTime(); + } else { + currentTime += granularity; + } + } + }, granularity, granularity); + } + + @Override + public void stop() { + timer.cancel(); + timer.purge(); + } +} diff --git a/core/src/main/java/org/ehcache/core/spi/time/TimeSource.java b/ehcache-core/src/main/java/org/ehcache/core/spi/time/TimeSource.java similarity index 97% rename from core/src/main/java/org/ehcache/core/spi/time/TimeSource.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/time/TimeSource.java index 22d61070a5..b2a1612aab 100644 --- a/core/src/main/java/org/ehcache/core/spi/time/TimeSource.java +++ b/ehcache-core/src/main/java/org/ehcache/core/spi/time/TimeSource.java @@ -22,6 +22,7 @@ * The main purpose of this interface is to allow tests to control time arbitrarily (as opposed to using system time * and sleep()'ing to advance time. */ +@FunctionalInterface public interface TimeSource { /** diff --git a/core/src/main/java/org/ehcache/core/spi/time/TimeSourceService.java b/ehcache-core/src/main/java/org/ehcache/core/spi/time/TimeSourceService.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/time/TimeSourceService.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/time/TimeSourceService.java diff --git a/core/src/main/java/org/ehcache/core/spi/time/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/spi/time/package-info.java similarity index 100% rename from core/src/main/java/org/ehcache/core/spi/time/package-info.java rename to ehcache-core/src/main/java/org/ehcache/core/spi/time/package-info.java diff --git a/core/src/main/java/org/ehcache/core/statistics/AuthoritativeTierOperationOutcomes.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/AuthoritativeTierOperationOutcomes.java similarity index 100% rename from core/src/main/java/org/ehcache/core/statistics/AuthoritativeTierOperationOutcomes.java rename to ehcache-core/src/main/java/org/ehcache/core/statistics/AuthoritativeTierOperationOutcomes.java diff --git a/core/src/main/java/org/ehcache/core/statistics/BulkOps.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/BulkOps.java similarity index 98% rename from core/src/main/java/org/ehcache/core/statistics/BulkOps.java rename to ehcache-core/src/main/java/org/ehcache/core/statistics/BulkOps.java index f2802f2657..bbbc1ce4ed 100644 --- a/core/src/main/java/org/ehcache/core/statistics/BulkOps.java +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/BulkOps.java @@ -44,5 +44,5 @@ public enum BulkOps { /** * The "put all" bulk operation performing an update */ - UPDATE_ALL; + UPDATE_ALL } diff --git a/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java similarity index 93% rename from core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java rename to ehcache-core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java index 9e2bc30c30..a686dd68b5 100755 --- a/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java @@ -45,7 +45,7 @@ enum GetOutcome implements CacheOperationOutcomes { MISS, /** failure */ FAILURE - }; + } /** * Outcomes for cache getAll operation @@ -71,7 +71,7 @@ enum PutOutcome implements CacheOperationOutcomes { NOOP, /** failure */ FAILURE - }; + } /** * Outcomes for cache putAll operation @@ -97,7 +97,7 @@ enum RemoveOutcome implements CacheOperationOutcomes { NOOP, /** failure */ FAILURE - }; + } /** * Outcomes for cache removeAll operation @@ -133,17 +133,7 @@ enum ConditionalRemoveOutcome implements CacheOperationOutcomes { * Operation failure */ FAILURE - }; - - /** - * The cache loading outcomes. - */ - enum CacheLoadingOutcome implements CacheOperationOutcomes { - /** success. */ - SUCCESS, - /** failure */ - FAILURE - }; + } /** * The putIfAbsent outcomes. @@ -161,7 +151,7 @@ enum PutIfAbsentOutcome implements CacheOperationOutcomes { * operation failure */ FAILURE - }; + } /** * The replace outcomes. @@ -183,5 +173,5 @@ enum ReplaceOutcome implements CacheOperationOutcomes { * operation failure */ FAILURE - }; + } } diff --git a/core/src/main/java/org/ehcache/core/statistics/CacheStatistics.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/CacheStatistics.java similarity index 78% rename from core/src/main/java/org/ehcache/core/statistics/CacheStatistics.java rename to ehcache-core/src/main/java/org/ehcache/core/statistics/CacheStatistics.java index bab19d1081..a115c2a5cf 100644 --- a/core/src/main/java/org/ehcache/core/statistics/CacheStatistics.java +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/CacheStatistics.java @@ -24,18 +24,22 @@ public interface CacheStatistics { /** - * List of statistics tracked on this cache + * Map of tier statistics on this cache. Per tier name * - * @return a map of statistics per name + * @return tier statistics per tier name */ - Map getKnownStatistics(); + Map getTierStatistics(); /** - * Map of tier statistics on this cache. Per tier name + * Register a derived statistic to one of the existing statistic. * - * @return tier statistics per tier name + * @param outcomeClass the enum of the possible outcomes + * @param statName name of the statistic we are looking for + * @param derivedStatistic derived statistic to register + * @param type of the outcome + * @param type of the derived statistic */ - Map getTierStatistics(); + , S extends ChainedOperationObserver> void registerDerivedStatistic(Class outcomeClass, String statName, S derivedStatistic); /** * Reset the values for this cache and its underlying tiers. @@ -107,25 +111,4 @@ public interface CacheStatistics { * @return expiration count */ long getCacheExpirations(); - - /** - * The average response time of a get on the cache since its creation or the latest {@link #clear()} - * - * @return average get response time - */ - float getCacheAverageGetTime(); - - /** - * The average response time of a put on the cache since its creation or the latest {@link #clear()} - * - * @return average put response time - */ - float getCacheAveragePutTime(); - - /** - * The average response time of a remove on the cache since its creation or the latest {@link #clear()} - * - * @return average remove response time - */ - float getCacheAverageRemoveTime(); } diff --git a/core/src/main/java/org/ehcache/core/statistics/CachingTierOperationOutcomes.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/CachingTierOperationOutcomes.java similarity index 100% rename from core/src/main/java/org/ehcache/core/statistics/CachingTierOperationOutcomes.java rename to ehcache-core/src/main/java/org/ehcache/core/statistics/CachingTierOperationOutcomes.java diff --git a/ehcache-core/src/main/java/org/ehcache/core/statistics/ChainedObserver.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/ChainedObserver.java new file mode 100644 index 0000000000..19b0cbac93 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/ChainedObserver.java @@ -0,0 +1,19 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.statistics; + +public interface ChainedObserver { +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/statistics/ChainedOperationObserver.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/ChainedOperationObserver.java new file mode 100644 index 0000000000..1dc95ff0dc --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/ChainedOperationObserver.java @@ -0,0 +1,24 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.statistics; + +public interface ChainedOperationObserver> extends ChainedObserver { + + void begin(long time); + + void end(long time, long latency, T result); + +} diff --git a/core/src/main/java/org/ehcache/core/statistics/HigherCachingTierOperationOutcomes.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/HigherCachingTierOperationOutcomes.java similarity index 100% rename from core/src/main/java/org/ehcache/core/statistics/HigherCachingTierOperationOutcomes.java rename to ehcache-core/src/main/java/org/ehcache/core/statistics/HigherCachingTierOperationOutcomes.java diff --git a/core/src/main/java/org/ehcache/core/statistics/LowerCachingTierOperationsOutcome.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/LowerCachingTierOperationsOutcome.java similarity index 100% rename from core/src/main/java/org/ehcache/core/statistics/LowerCachingTierOperationsOutcome.java rename to ehcache-core/src/main/java/org/ehcache/core/statistics/LowerCachingTierOperationsOutcome.java diff --git a/ehcache-core/src/main/java/org/ehcache/core/statistics/OperationObserver.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/OperationObserver.java new file mode 100644 index 0000000000..55fb6700c5 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/OperationObserver.java @@ -0,0 +1,47 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.statistics; + +/** + * Operation observers track the occurrence of processes which take a finite time + * and can potential terminate in different ways. + *

+ * Operations must have an associated enum type that represents their possible + * outcomes. An example of such an enum type would be: + *

+ * enum PlaneFlight {
+ *   LAND, CRASH;
+ * }
+ * 
+ * + * @param Enum type representing the possible operations 'results' + */ +public interface OperationObserver> { + + /** + * Called immediately prior to the operation beginning. + */ + void begin(); + + /** + * Called immediately after the operation completes with no interesting parameters, and with the same thread the called {{@link #begin()}} before. + * + * @param result the operation result + */ + void end(T result); + +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/statistics/OperationStatistic.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/OperationStatistic.java new file mode 100644 index 0000000000..afd1aabdf7 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/OperationStatistic.java @@ -0,0 +1,54 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.statistics; + +import java.util.Set; + +import static org.ehcache.core.statistics.SuppliedValueStatistic.counter; + + +public interface OperationStatistic> extends OperationObserver, SourceStatistic> { + + Class type(); + + /** + * Return a {@link ValueStatistic} returning the count for the given result. + * + * @param result the result of interest + * @return a {@code ValueStatistic} instance + */ + default ValueStatistic statistic(T result) { + return counter(() -> count(result)); + } + + default ValueStatistic statistic(Set results) { + return counter(() -> sum(results)); + } + + /** + * Return the count of operations with the given type. + * + * @param type the result type + * @return the operation count + */ + long count(T type); + + long sum(Set types); + + long sum(); + +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/statistics/SourceStatistic.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/SourceStatistic.java new file mode 100644 index 0000000000..b9472ab6f5 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/SourceStatistic.java @@ -0,0 +1,42 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.statistics; + +import java.util.Collection; + +public interface SourceStatistic { + + /** + * Register the given {@code Observer} to be called by this {@code SourceStatistic} + * + * @param derived statistic to be registered + */ + void addDerivedStatistic(T derived); + + /** + * Remove the given registered {@code Observer} from this {@code SourceStatistic}. + * + * @param derived statistic to be removed + */ + void removeDerivedStatistic(T derived); + + /** + * Retrieve all registered statistics. + * + * @return an unmodifiable collection of all derived statistics + */ + Collection getDerivedStatistics(); +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/statistics/StatisticType.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/StatisticType.java new file mode 100644 index 0000000000..f24336185a --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/StatisticType.java @@ -0,0 +1,22 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.statistics; + +public enum StatisticType { + COUNTER, + GAUGE +} diff --git a/core/src/main/java/org/ehcache/core/statistics/StoreOperationOutcomes.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/StoreOperationOutcomes.java similarity index 99% rename from core/src/main/java/org/ehcache/core/statistics/StoreOperationOutcomes.java rename to ehcache-core/src/main/java/org/ehcache/core/statistics/StoreOperationOutcomes.java index 3dfbf6df7d..82f3b0d6b2 100755 --- a/core/src/main/java/org/ehcache/core/statistics/StoreOperationOutcomes.java +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/StoreOperationOutcomes.java @@ -174,7 +174,7 @@ enum EvictionOutcome implements StoreOperationOutcomes { SUCCESS, /** failure */ FAILURE - }; + } /** * Outcomes for expiration diff --git a/ehcache-core/src/main/java/org/ehcache/core/statistics/SuppliedValueStatistic.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/SuppliedValueStatistic.java new file mode 100644 index 0000000000..2ac1974951 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/SuppliedValueStatistic.java @@ -0,0 +1,61 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.statistics; + +import java.io.Serializable; +import java.util.Objects; +import java.util.function.Supplier; + + +/** + * This class can be used to create a {@link ValueStatistic} with a specific {@link StatisticType} + * which value is given by a provided {@link Supplier} + * + * @author Mathieu Carbou + */ +public class SuppliedValueStatistic implements ValueStatistic { + + private final Supplier supplier; + private final StatisticType type; + + public SuppliedValueStatistic(StatisticType type, Supplier supplier) { + this.type = Objects.requireNonNull(type); + this.supplier = Objects.requireNonNull(supplier); + } + + @Override + public T value() { + return supplier.get(); + } + + @Override + public StatisticType type() { + return type; + } + + public static ValueStatistic counter(Supplier supplier) { + return supply(StatisticType.COUNTER, supplier); + } + + public static ValueStatistic gauge(Supplier supplier) { + return supply(StatisticType.GAUGE, supplier); + } + + public static ValueStatistic supply(StatisticType type, Supplier supplier) { + return new SuppliedValueStatistic<>(type, supplier); + } +} diff --git a/core/src/main/java/org/ehcache/core/statistics/TierOperationOutcomes.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/TierOperationOutcomes.java similarity index 100% rename from core/src/main/java/org/ehcache/core/statistics/TierOperationOutcomes.java rename to ehcache-core/src/main/java/org/ehcache/core/statistics/TierOperationOutcomes.java diff --git a/core/src/main/java/org/ehcache/core/statistics/TierStatistics.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/TierStatistics.java similarity index 87% rename from core/src/main/java/org/ehcache/core/statistics/TierStatistics.java rename to ehcache-core/src/main/java/org/ehcache/core/statistics/TierStatistics.java index d259a1f220..1335148bd2 100644 --- a/core/src/main/java/org/ehcache/core/statistics/TierStatistics.java +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/TierStatistics.java @@ -24,14 +24,7 @@ public interface TierStatistics { /** - * List of statistics tracked on this cache - * - * @return a map of statistics per name - */ - Map getKnownStatistics(); - - /** - * Reset the values for this tier. However, note that {@code mapping, maxMappings, allocatedMemory, occupiedMemory} + * Reset the values for this tier. However, note that {@code mapping, allocatedMemory, occupiedMemory} * won't be reset since it doesn't make sense. *

* Implementation note: Calling clear doesn't really clear the data. It freezes the actual values and compensate @@ -88,13 +81,6 @@ public interface TierStatistics { */ long getMappings(); - /** - * Maximum number of entries that was contained in this tier - * - * @return number of entries - */ - long getMaxMappings(); - /** * How many bytes are currently allocated (occupied or not) for this tier * diff --git a/ehcache-core/src/main/java/org/ehcache/core/statistics/ValueStatistic.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/ValueStatistic.java new file mode 100644 index 0000000000..885e03de62 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/ValueStatistic.java @@ -0,0 +1,33 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.statistics; + +import java.io.Serializable; + +public interface ValueStatistic { + + /** + * @return The statistic type + */ + StatisticType type(); + + /** + * @return The current statistic value + */ + T value(); + +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/statistics/ZeroOperationStatistic.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/ZeroOperationStatistic.java new file mode 100644 index 0000000000..7a09e6cebc --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/statistics/ZeroOperationStatistic.java @@ -0,0 +1,75 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.statistics; + +import java.util.Collection; +import java.util.Collections; +import java.util.Set; + +public class ZeroOperationStatistic> implements OperationStatistic { + + private static final OperationStatistic INSTANCE = new ZeroOperationStatistic<>(); + + @SuppressWarnings("unchecked") + public static > OperationStatistic get() { + return (OperationStatistic) INSTANCE; + } + + @Override + public Class type() { + return null; + } + + @Override + public long count(T type) { + return 0; + } + + @Override + public long sum(Set types) { + return 0; + } + + @Override + public long sum() { + return 0; + } + + @Override + public void addDerivedStatistic(ChainedOperationObserver derived) { + + } + + @Override + public void removeDerivedStatistic(ChainedOperationObserver derived) { + + } + + @Override + public Collection> getDerivedStatistics() { + return Collections.emptyList(); + } + + @Override + public void begin() { + + } + + @Override + public void end(T result) { + + } +} diff --git a/core/src/main/java/org/ehcache/core/statistics/package-info.java b/ehcache-core/src/main/java/org/ehcache/core/statistics/package-info.java similarity index 100% rename from core/src/main/java/org/ehcache/core/statistics/package-info.java rename to ehcache-core/src/main/java/org/ehcache/core/statistics/package-info.java diff --git a/ehcache-core/src/main/java/org/ehcache/core/store/StoreConfigurationImpl.java b/ehcache-core/src/main/java/org/ehcache/core/store/StoreConfigurationImpl.java new file mode 100644 index 0000000000..30fe9c2d49 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/store/StoreConfigurationImpl.java @@ -0,0 +1,251 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.store; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.core.spi.store.Store; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.serialization.Serializer; + +/** + * Implementation of the {@link org.ehcache.core.spi.store.Store.Configuration store configuration interface} as used by + * {@link org.ehcache.core.EhcacheManager EhcacheManager} in order to prepare {@link Store} creation. + */ +public class StoreConfigurationImpl implements Store.Configuration { + + private final Class keyType; + private final Class valueType; + private final EvictionAdvisor evictionAdvisor; + private final ClassLoader classLoader; + private final ExpiryPolicy expiry; + private final ResourcePools resourcePools; + private final Serializer keySerializer; + private final Serializer valueSerializer; + private final int dispatcherConcurrency; + private final boolean operationStatisticsEnabled; + private final CacheLoaderWriter cacheLoaderWriter; + private final boolean useLoaderInAtomics; + + /** + * Creates a new {@code StoreConfigurationImpl} based on the provided parameters. + * + * @param cacheConfig the cache configuration + * @param dispatcherConcurrency the level of concurrency for ordered events + * @param keySerializer the key serializer + * @param valueSerializer the value serializer + */ + public StoreConfigurationImpl(CacheConfiguration cacheConfig, int dispatcherConcurrency, + Serializer keySerializer, Serializer valueSerializer) { + this(cacheConfig.getKeyType(), cacheConfig.getValueType(), cacheConfig.getEvictionAdvisor(), + cacheConfig.getClassLoader(), cacheConfig.getExpiryPolicy(), cacheConfig.getResourcePools(), + dispatcherConcurrency, true, keySerializer, valueSerializer, null, false); + } + + /** + * Creates a new {@code StoreConfigurationImpl} based on the provided parameters. + * + * @param cacheConfig the cache configuration + * @param dispatcherConcurrency the level of concurrency for ordered events + * @param operationStatisticsEnabled if operation statistics should be enabled + * @param keySerializer the key serializer + * @param valueSerializer the value serializer + */ + public StoreConfigurationImpl(CacheConfiguration cacheConfig, int dispatcherConcurrency, boolean operationStatisticsEnabled, + Serializer keySerializer, Serializer valueSerializer, + CacheLoaderWriter cacheLoaderWriter, boolean useLoaderInAtomics) { + this(cacheConfig.getKeyType(), cacheConfig.getValueType(), cacheConfig.getEvictionAdvisor(), + cacheConfig.getClassLoader(), cacheConfig.getExpiryPolicy(), cacheConfig.getResourcePools(), + dispatcherConcurrency, operationStatisticsEnabled, keySerializer, valueSerializer, cacheLoaderWriter, useLoaderInAtomics); + } + + /** + * Creates a new {@code StoreConfigurationImpl} based on the provided parameters. + * + * @param keyType the key type + * @param valueType the value type + * @param evictionAdvisor the eviction advisor + * @param classLoader the class loader + * @param expiry the expiry policy + * @param resourcePools the resource pools + * @param dispatcherConcurrency the level of concurrency for ordered events + * @param keySerializer the key serializer + * @param valueSerializer the value serializer + */ + public StoreConfigurationImpl(Class keyType, Class valueType, + EvictionAdvisor evictionAdvisor, + ClassLoader classLoader, ExpiryPolicy expiry, + ResourcePools resourcePools, int dispatcherConcurrency, + Serializer keySerializer, Serializer valueSerializer) { + this(keyType, valueType, evictionAdvisor, classLoader, expiry, resourcePools, dispatcherConcurrency, + true, keySerializer, valueSerializer, null, false); + } + + /** + * Creates a new {@code StoreConfigurationImpl} based on the provided parameters. + * + * @param keyType the key type + * @param valueType the value type + * @param evictionAdvisor the eviction advisor + * @param classLoader the class loader + * @param expiry the expiry policy + * @param resourcePools the resource pools + * @param dispatcherConcurrency the level of concurrency for ordered events + * @param keySerializer the key serializer + * @param valueSerializer the value serializer + * @param cacheLoaderWriter the loader-writer + */ + public StoreConfigurationImpl(Class keyType, Class valueType, + EvictionAdvisor evictionAdvisor, + ClassLoader classLoader, ExpiryPolicy expiry, + ResourcePools resourcePools, int dispatcherConcurrency, + Serializer keySerializer, Serializer valueSerializer, CacheLoaderWriter cacheLoaderWriter) { + this(keyType, valueType, evictionAdvisor, classLoader, expiry, resourcePools, dispatcherConcurrency, + true, keySerializer, valueSerializer, cacheLoaderWriter, false); + } + + /** + * Creates a new {@code StoreConfigurationImpl} based on the provided parameters. + * + * @param keyType the key type + * @param valueType the value type + * @param evictionAdvisor the eviction advisor + * @param classLoader the class loader + * @param expiry the expiry policy + * @param resourcePools the resource pools + * @param dispatcherConcurrency the level of concurrency for ordered events + * @param operationStatisticsEnabled if operation statistics should be enabled + * @param keySerializer the key serializer + * @param valueSerializer the value serializer + * @param cacheLoaderWriter the loader-writer + */ + public StoreConfigurationImpl(Class keyType, Class valueType, + EvictionAdvisor evictionAdvisor, + ClassLoader classLoader, ExpiryPolicy expiry, + ResourcePools resourcePools, int dispatcherConcurrency, boolean operationStatisticsEnabled, + Serializer keySerializer, Serializer valueSerializer, + CacheLoaderWriter cacheLoaderWriter, boolean useLoaderInAtomics) { + this.keyType = keyType; + this.valueType = valueType; + this.evictionAdvisor = evictionAdvisor; + this.classLoader = classLoader; + this.expiry = expiry; + this.resourcePools = resourcePools; + this.keySerializer = keySerializer; + this.valueSerializer = valueSerializer; + this.dispatcherConcurrency = dispatcherConcurrency; + this.operationStatisticsEnabled = operationStatisticsEnabled; + this.cacheLoaderWriter = cacheLoaderWriter; + this.useLoaderInAtomics = useLoaderInAtomics; + + } + + /** + * {@inheritDoc} + */ + @Override + public Class getKeyType() { + return keyType; + } + + /** + * {@inheritDoc} + */ + @Override + public Class getValueType() { + return valueType; + } + + /** + * {@inheritDoc} + */ + @Override + public EvictionAdvisor getEvictionAdvisor() { + return evictionAdvisor; + } + + /** + * {@inheritDoc} + */ + @Override + public ClassLoader getClassLoader() { + return this.classLoader; + } + + /** + * {@inheritDoc} + */ + @Override + public ExpiryPolicy getExpiry() { + return expiry; + } + + /** + * {@inheritDoc} + */ + @Override + public ResourcePools getResourcePools() { + return resourcePools; + } + + /** + * {@inheritDoc} + */ + @Override + public Serializer getKeySerializer() { + return keySerializer; + } + + /** + * {@inheritDoc} + */ + @Override + public Serializer getValueSerializer() { + return valueSerializer; + } + + /** + * {@inheritDoc} + */ + @Override + public int getDispatcherConcurrency() { + return dispatcherConcurrency; + } + + /** + * {@inheritDoc} + */ + @Override + public boolean isOperationStatisticsEnabled() { + return operationStatisticsEnabled; + } + + /** + * {@inheritDoc} + */ + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return this.cacheLoaderWriter; + } + + @Override + public boolean useLoaderInAtomics() { + return this.useLoaderInAtomics; + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/store/StoreSupport.java b/ehcache-core/src/main/java/org/ehcache/core/store/StoreSupport.java new file mode 100644 index 0000000000..5194f6741c --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/store/StoreSupport.java @@ -0,0 +1,128 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.store; + +import org.ehcache.config.ResourceType; +import org.ehcache.core.spi.ServiceLocator; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.WrapperStore; +import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Defines methods supporting working with {@link Store} implementations. + */ +public final class StoreSupport { + /** + * Private, niladic constructor to prevent instantiation. + */ + private StoreSupport() { + } + + public static Store.Provider selectWrapperStoreProvider(ServiceProvider serviceProvider, Collection> serviceConfigs) { + Collection storeProviders = serviceProvider.getServicesOfType(WrapperStore.Provider.class); + Optional> wrapperProvider = storeProviders.stream() + .map(provider -> new Tuple<>(provider.wrapperStoreRank(serviceConfigs), provider)) + .filter(providerTuple -> providerTuple.x != 0) + .max(Comparator.comparingInt(value -> value.x)); + return wrapperProvider.map(providerTuple -> providerTuple.y).orElse(null); + } + + private static class Tuple { + final X x; + final Y y; + Tuple(X x, Y y) { + this.x = x; + this.y = y; + } + } + + /** + * Chooses a {@link org.ehcache.core.spi.store.Store.Provider Store.Provider} from those + * available through the {@link ServiceLocator} that best supports the resource types and + * service configurations provided. This method relies on the + * {@link Store.Provider#rank(Set, Collection) Store.Provider.rank} method in making the + * selection. + * + * @param serviceProvider the {@code ServiceProvider} instance to use + * @param resourceTypes the set of {@code ResourceType}s that must be supported by the provider + * @param serviceConfigs the collection of {@code ServiceConfiguration}s used to influence the + * selection + * + * @return the non-{@code null} {@code Store.Provider} implementation chosen + * + * @throws IllegalStateException if no suitable {@code Store.Provider} is available or if + * multiple {@code Store.Provider} implementations return the same top ranking + */ + public static Store.Provider selectStoreProvider( + ServiceProvider serviceProvider, Set> resourceTypes, Collection> serviceConfigs) { + + Collection storeProviders = serviceProvider.getServicesOfType(Store.Provider.class); + List filteredStoreProviders = storeProviders.stream().filter(provider -> !(provider instanceof WrapperStore.Provider)).collect(Collectors.toList()); + int highRank = 0; + List rankingProviders = new ArrayList<>(); + for (Store.Provider provider : filteredStoreProviders) { + int rank = provider.rank(resourceTypes, serviceConfigs); + if (rank > highRank) { + highRank = rank; + rankingProviders.clear(); + rankingProviders.add(provider); + } else if (rank != 0 && rank == highRank) { + rankingProviders.add(provider); + } + } + + if (rankingProviders.isEmpty()) { + StringBuilder sb = new StringBuilder("No Store.Provider found to handle configured resource types "); + sb.append(resourceTypes); + sb.append(" from "); + formatStoreProviders(filteredStoreProviders, sb); + throw new IllegalStateException(sb.toString()); + } else if (rankingProviders.size() > 1) { + StringBuilder sb = new StringBuilder("Multiple Store.Providers found to handle configured resource types "); + sb.append(resourceTypes); + sb.append(": "); + formatStoreProviders(rankingProviders, sb); + throw new IllegalStateException(sb.toString()); + } + + return rankingProviders.get(0); + } + + private static void formatStoreProviders(final Collection storeProviders, final StringBuilder sb) { + sb.append('{'); + boolean prependSeparator = false; + for (final Store.Provider provider : storeProviders) { + if (prependSeparator) { + sb.append(", "); + } else { + prependSeparator = true; + } + sb.append(provider.getClass().getName()); + } + sb.append('}'); + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/util/ByteBufferInputStream.java b/ehcache-core/src/main/java/org/ehcache/core/util/ByteBufferInputStream.java new file mode 100644 index 0000000000..e43ca5370f --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/util/ByteBufferInputStream.java @@ -0,0 +1,60 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.util; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; + +import static java.lang.Math.max; +import static java.lang.Math.min; + +public class ByteBufferInputStream extends InputStream { + + private final ByteBuffer buffer; + + public ByteBufferInputStream(ByteBuffer buffer) { + this.buffer = buffer.slice(); + } + + @Override + public int read() { + if (buffer.hasRemaining()) { + return 0xff & buffer.get(); + } else { + return -1; + } + } + + @Override + public int read(byte b[], int off, int len) { + len = min(len, buffer.remaining()); + buffer.get(b, off, len); + return len; + } + + @Override + public long skip(long n) { + n = min(buffer.remaining(), max(n, 0)); + buffer.position((int) (buffer.position() + n)); + return n; + } + + @Override + public synchronized int available() { + return buffer.remaining(); + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/util/ClassLoading.java b/ehcache-core/src/main/java/org/ehcache/core/util/ClassLoading.java new file mode 100644 index 0000000000..a17a34d901 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/util/ClassLoading.java @@ -0,0 +1,119 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.util; + +import org.ehcache.core.osgi.SafeOsgi; +import org.ehcache.core.osgi.OsgiServiceLoader; + +import java.io.IOException; +import java.net.URL; +import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Enumeration; +import java.util.List; +import java.util.ServiceLoader; +import java.util.function.Supplier; + +import static java.security.AccessController.doPrivileged; +import static java.util.Collections.enumeration; +import static java.util.Collections.list; +import static java.util.stream.Collectors.toList; +import static java.util.stream.Stream.concat; +import static java.util.stream.Stream.of; + +public class ClassLoading { + + private static final ClassLoader DEFAULT_CLASSLOADER; + + static { + DEFAULT_CLASSLOADER = delegationChain(() -> Thread.currentThread().getContextClassLoader(), ChainedClassLoader.class.getClassLoader()); + } + + public static ClassLoader getDefaultClassLoader() { + return DEFAULT_CLASSLOADER; + } + + public static Iterable servicesOfType(Class serviceType) { + if (SafeOsgi.useOSGiServiceLoading()) { + return OsgiServiceLoader.load(serviceType); + } else { + return ServiceLoader.load(serviceType, ClassLoading.class.getClassLoader()); + } + } + + @SuppressWarnings("unchecked") + public static ClassLoader delegationChain(Supplier loader, ClassLoader ... loaders) { + return doPrivileged((PrivilegedAction) () -> new ChainedClassLoader(concat(of(loader), of(loaders).map(l -> () -> l)).collect(toList()))); + } + + @SuppressWarnings("unchecked") + public static ClassLoader delegationChain(ClassLoader ... loaders) { + return doPrivileged((PrivilegedAction) () -> new ChainedClassLoader(of(loaders).>map(l -> () -> l).collect(toList()))); + } + + private static class ChainedClassLoader extends ClassLoader { + + private final List> loaders; + + public ChainedClassLoader(List> loaders) { + this.loaders = loaders; + } + + @Override + public Class loadClass(String name) throws ClassNotFoundException { + ClassNotFoundException lastFailure = new ClassNotFoundException(name); + for (Supplier loader : loaders) { + ClassLoader classLoader = loader.get(); + if (classLoader != null) { + try { + return classLoader.loadClass(name); + } catch (ClassNotFoundException cnfe) { + lastFailure = cnfe; + } + } + } + throw lastFailure; + } + + @Override + public URL getResource(String name) { + for (Supplier loader : loaders) { + ClassLoader classLoader = loader.get(); + if (classLoader != null) { + URL resource = classLoader.getResource(name); + if (resource != null) { + return resource; + } + } + } + return null; + } + + @Override + public Enumeration getResources(String name) throws IOException { + Collection aggregate = new ArrayList<>(); + for (Supplier loader : loaders) { + ClassLoader classLoader = loader.get(); + if (classLoader != null) { + aggregate.addAll(list(classLoader.getResources(name))); + } + } + return enumeration(aggregate); + } + } +} diff --git a/ehcache-core/src/main/java/org/ehcache/core/util/CollectionUtil.java b/ehcache-core/src/main/java/org/ehcache/core/util/CollectionUtil.java new file mode 100644 index 0000000000..9acc986ac5 --- /dev/null +++ b/ehcache-core/src/main/java/org/ehcache/core/util/CollectionUtil.java @@ -0,0 +1,62 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.util; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +/** + * Multiple useful methods to play with collections. + */ +public final class CollectionUtil { + + private CollectionUtil() {} + + /** + * Used to create a new collection with the correct size. Given an iterable, will try to see it the iterable actually + * have a size and will return it. If the iterable has no known size, we return the best bet. + * + * @param iterable the iterable we will try to find the size of + * @param bestBet our best bet for the size if the iterable is not sizeable + * @return the size of the iterable if found or null + */ + public static int findBestCollectionSize(Iterable iterable, int bestBet) { + return (iterable instanceof Collection ? ((Collection) iterable).size() : bestBet); + } + + /** + * Copy each map entry to a new map but check that each key and value isn't null. Throw + * a {@code NullPointerException} if it's the case. + * + * @param entries entries to copy + * @param type of key + * @param type of value + * @return cloned map + * @throws NullPointerException if a key or value is null + */ + public static Map copyMapButFailOnNull(Map entries) { + Map entriesToRemap = new HashMap<>(entries.size()); + entries.forEach((k, v) -> { + // If a key/value is null, throw NPE, nothing gets mutated + if (k == null || v == null) { + throw new NullPointerException(); + } + entriesToRemap.put(k, v); + }); + return entriesToRemap; + } +} diff --git a/ehcache-core/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory b/ehcache-core/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory new file mode 100644 index 0000000000..7bcce30799 --- /dev/null +++ b/ehcache-core/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory @@ -0,0 +1 @@ +org.ehcache.core.internal.statistics.DefaultStatisticsServiceFactory diff --git a/core/src/test/java/org/ehcache/core/CacheConfigurationChangeListenerTest.java b/ehcache-core/src/test/java/org/ehcache/core/CacheConfigurationChangeListenerTest.java similarity index 78% rename from core/src/test/java/org/ehcache/core/CacheConfigurationChangeListenerTest.java rename to ehcache-core/src/test/java/org/ehcache/core/CacheConfigurationChangeListenerTest.java index 4337e243d4..8b7272811d 100644 --- a/core/src/test/java/org/ehcache/core/CacheConfigurationChangeListenerTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/CacheConfigurationChangeListenerTest.java @@ -17,11 +17,11 @@ package org.ehcache.core; import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.config.ResourcePoolsHelper; -import org.ehcache.core.config.BaseCacheConfiguration; import org.ehcache.core.events.CacheEventDispatcher; import org.ehcache.core.spi.store.Store; +import org.ehcache.core.util.TestCacheConfig; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.ResilienceStrategy; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -33,6 +33,7 @@ import java.util.List; import java.util.Set; +import static org.ehcache.core.config.ResourcePoolsHelper.createResourcePools; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; @@ -45,17 +46,18 @@ public class CacheConfigurationChangeListenerTest { private CacheEventDispatcher eventNotifier; private EhcacheRuntimeConfiguration runtimeConfiguration; private CacheConfiguration config; - private EhcacheWithLoaderWriter cache; + private Ehcache cache; @SuppressWarnings({ "unchecked"}) @Before public void setUp() throws Exception { this.store = mock(Store.class); this.eventNotifier = mock(CacheEventDispatcher.class); + ResilienceStrategy resilienceStrategy = mock(ResilienceStrategy.class); CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); - this.config = new BaseCacheConfiguration<>(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapDiskPools(2, 10)); - this.cache = new EhcacheWithLoaderWriter<>(config, store, loaderWriter, eventNotifier, - LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "CacheConfigurationListenerTest")); + this.config = new TestCacheConfig<>(Object.class, Object.class, createResourcePools(2L)); + this.cache = new Ehcache<>(config, store, resilienceStrategy, eventNotifier, + LoggerFactory.getLogger(Ehcache.class + "-" + "CacheConfigurationListenerTest"), loaderWriter); cache.init(); this.runtimeConfiguration = (EhcacheRuntimeConfiguration)cache.getRuntimeConfiguration(); } @@ -72,7 +74,7 @@ public void testCacheConfigurationChangeFiresEvent () { = new ArrayList<>(); cacheConfigurationChangeListeners.add(configurationListener); this.runtimeConfiguration.addCacheConfigurationListener(cacheConfigurationChangeListeners); - this.cache.getRuntimeConfiguration().updateResourcePools(ResourcePoolsHelper.createHeapOnlyPools(10)); + this.cache.getRuntimeConfiguration().updateResourcePools(createResourcePools(10L)); assertThat(configurationListener.eventSet.size(), is(1) ); } @@ -83,10 +85,10 @@ public void testRemovingCacheConfigurationListener() { = new ArrayList<>(); cacheConfigurationChangeListeners.add(configurationListener); this.runtimeConfiguration.addCacheConfigurationListener(cacheConfigurationChangeListeners); - this.cache.getRuntimeConfiguration().updateResourcePools(ResourcePoolsHelper.createHeapOnlyPools(20)); + this.cache.getRuntimeConfiguration().updateResourcePools(createResourcePools(20L)); assertThat(configurationListener.eventSet.size(), is(1)); this.runtimeConfiguration.removeCacheConfigurationListener(configurationListener); - this.cache.getRuntimeConfiguration().updateResourcePools(ResourcePoolsHelper.createHeapOnlyPools(5)); + this.cache.getRuntimeConfiguration().updateResourcePools(createResourcePools(5L)); assertThat(configurationListener.eventSet.size(), is(1) ); } @@ -96,7 +98,7 @@ private class Listener implements CacheConfigurationChangeListener { @Override public void cacheConfigurationChange(CacheConfigurationChangeEvent event) { this.eventSet.add(event); - Logger logger = LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "GettingStarted"); + Logger logger = LoggerFactory.getLogger(Ehcache.class + "-" + "GettingStarted"); logger.info("Setting size: "+event.getNewValue().toString()); } } diff --git a/core/src/test/java/org/ehcache/core/CacheTest.java b/ehcache-core/src/test/java/org/ehcache/core/CacheTest.java similarity index 84% rename from core/src/test/java/org/ehcache/core/CacheTest.java rename to ehcache-core/src/test/java/org/ehcache/core/CacheTest.java index 627b2ff688..fdca889473 100644 --- a/core/src/test/java/org/ehcache/core/CacheTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/CacheTest.java @@ -18,23 +18,22 @@ import org.ehcache.Status; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.StateTransitionException; import org.ehcache.core.spi.LifeCycled; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.CoreMatchers; import org.junit.Test; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; import java.util.Collections; import java.util.Iterator; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; import java.util.function.Function; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -226,37 +225,27 @@ public void testPutIfAbsent() throws StoreAccessException { } return new Store.ValueHolder() { @Override - public Object value() { + public Object get() { return existingValue.get(); } @Override - public long creationTime(final TimeUnit unit) { + public long creationTime() { throw new UnsupportedOperationException("Implement me!"); } @Override - public long expirationTime(TimeUnit unit) { + public long expirationTime() { throw new UnsupportedOperationException("Implement me!"); } @Override - public boolean isExpired(long expirationTime, TimeUnit unit) { + public boolean isExpired(long expirationTime) { throw new UnsupportedOperationException("Implement me!"); } @Override - public long lastAccessTime(final TimeUnit unit) { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public float hitRate(final long now, final TimeUnit unit) { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public long hits() { + public long lastAccessTime() { throw new UnsupportedOperationException("Implement me!"); } @@ -266,44 +255,34 @@ public long getId() { } }; }); - when(store.putIfAbsent(eq("foo"), any(String.class))).then(invocation -> { + when(store.putIfAbsent(eq("foo"), any(String.class), any(Consumer.class))).then(invocation -> { final Object toReturn; if ((toReturn = existingValue.get()) == null) { existingValue.compareAndSet(null, invocation.getArguments()[1]); } return new Store.ValueHolder() { @Override - public Object value() { + public Object get() { return toReturn; } @Override - public long creationTime(final TimeUnit unit) { + public long creationTime() { throw new UnsupportedOperationException("Implement me!"); } @Override - public long expirationTime(TimeUnit unit) { + public long expirationTime() { throw new UnsupportedOperationException("Implement me!"); } @Override - public boolean isExpired(long expirationTime, TimeUnit unit) { + public boolean isExpired(long expirationTime) { throw new UnsupportedOperationException("Implement me!"); } @Override - public long lastAccessTime(final TimeUnit unit) { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public float hitRate(final long now, final TimeUnit unit) { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public long hits() { + public long lastAccessTime() { throw new UnsupportedOperationException("Implement me!"); } @@ -316,9 +295,9 @@ public long getId() { InternalCache ehcache = getCache(store); ehcache.init(); assertThat(ehcache.putIfAbsent("foo", value), nullValue()); - assertThat(ehcache.putIfAbsent("foo", "foo"), CoreMatchers.is(value)); - assertThat(ehcache.putIfAbsent("foo", "foobar"), CoreMatchers.is(value)); - assertThat(ehcache.putIfAbsent("foo", value), CoreMatchers.is(value)); + assertThat(ehcache.putIfAbsent("foo", "foo"), CoreMatchers.is(value)); + assertThat(ehcache.putIfAbsent("foo", "foobar"), CoreMatchers.is(value)); + assertThat(ehcache.putIfAbsent("foo", value), CoreMatchers.is(value)); } @Test @@ -339,7 +318,7 @@ public void testInvokesHooks() { if (ehcache instanceof Ehcache) { ((Ehcache)ehcache).removeHook(hook); } else { - ((EhcacheWithLoaderWriter)ehcache).removeHook(hook); + ((Ehcache)ehcache).removeHook(hook); } fail(); } catch (IllegalStateException e) { diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicBulkUtil.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicBulkUtil.java similarity index 99% rename from core/src/test/java/org/ehcache/core/EhcacheBasicBulkUtil.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicBulkUtil.java index 6b251d269d..61cda7f06f 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicBulkUtil.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicBulkUtil.java @@ -31,7 +31,7 @@ import org.ehcache.spi.loaderwriter.CacheLoaderWriter; /** - * Utility methods and common data for {@link EhcacheWithLoaderWriter Ehcache} + * Utility methods and common data for {@link Ehcache} * bulk method unit tests. * * @author Clifford W. Johnson diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicClearTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicClearTest.java similarity index 86% rename from core/src/test/java/org/ehcache/core/EhcacheBasicClearTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicClearTest.java index b0b7ecb761..8b8e3579b6 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicClearTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicClearTest.java @@ -21,13 +21,13 @@ import org.ehcache.Status; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.Matchers; import org.junit.Test; import org.slf4j.LoggerFactory; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; @@ -50,7 +50,7 @@ public void testClearEmpty() throws Exception { final Ehcache ehcache = this.getEhcache(); ehcache.clear(); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); assertThat(realStore.getEntryMap().isEmpty(), is(true)); } @@ -67,7 +67,7 @@ public void testClearEmptyStoreAccessException() throws Exception { final Ehcache ehcache = this.getEhcache(); ehcache.clear(); - verify(this.spiedResilienceStrategy).clearFailure(any(StoreAccessException.class)); + verify(this.resilienceStrategy).clearFailure(any(StoreAccessException.class)); } /** @@ -81,7 +81,7 @@ public void testClearNonEmpty() throws Exception { assertThat(realStore.getEntryMap().isEmpty(), is(false)); ehcache.clear(); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); assertThat(realStore.getEntryMap().isEmpty(), is(true)); } @@ -99,7 +99,7 @@ public void testClearNonEmptyStoreAccessException() throws Exception { assertThat(realStore.getEntryMap().isEmpty(), is(false)); ehcache.clear(); - verify(this.spiedResilienceStrategy).clearFailure(any(StoreAccessException.class)); + verify(this.resilienceStrategy).clearFailure(any(StoreAccessException.class)); // Not testing ResilienceStrategy implementation here } @@ -120,10 +120,9 @@ private Map getTestStoreEntries() { private Ehcache getEhcache() throws Exception { final Ehcache ehcache = - new Ehcache<>(CACHE_CONFIGURATION, this.store, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheBasicClearTest")); + new Ehcache<>(CACHE_CONFIGURATION, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheBasicClearTest")); ehcache.init(); assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); return ehcache; } } diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicContainsKeyTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicContainsKeyTest.java similarity index 86% rename from core/src/test/java/org/ehcache/core/EhcacheBasicContainsKeyTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicContainsKeyTest.java index 21098479f6..75ec6225f4 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicContainsKeyTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicContainsKeyTest.java @@ -21,13 +21,13 @@ import org.ehcache.Status; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.Matchers; import org.junit.Test; import org.slf4j.LoggerFactory; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; @@ -58,7 +58,7 @@ public void testContainsKeyNull() throws Exception { } catch (NullPointerException e) { // Expected } - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); } /** @@ -71,7 +71,7 @@ public void testContainsKeyEmpty() throws Exception { final Ehcache ehcache = this.getEhcache(); assertFalse(ehcache.containsKey("key")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); } /** @@ -87,7 +87,7 @@ public void testContainsKeyEmptyStoreAccessException() throws Exception { final Ehcache ehcache = this.getEhcache(); ehcache.containsKey("key"); - verify(this.spiedResilienceStrategy).containsKeyFailure(eq("key"), any(StoreAccessException.class)); + verify(this.resilienceStrategy).containsKeyFailure(eq("key"), any(StoreAccessException.class)); } /** @@ -101,7 +101,7 @@ public void testContainsKeyContains() throws Exception { final Ehcache ehcache = this.getEhcache(); assertTrue(ehcache.containsKey("keyA")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); } /** @@ -117,7 +117,7 @@ public void testContainsKeyContainsStoreAccessException() throws Exception { final Ehcache ehcache = this.getEhcache(); ehcache.containsKey("keyA"); - verify(this.spiedResilienceStrategy).containsKeyFailure(eq("keyA"), any(StoreAccessException.class)); + verify(this.resilienceStrategy).containsKeyFailure(eq("keyA"), any(StoreAccessException.class)); } /** @@ -131,7 +131,7 @@ public void testContainsKeyMissing() throws Exception { final Ehcache ehcache = this.getEhcache(); assertFalse(ehcache.containsKey("missingKey")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); } /** @@ -147,7 +147,7 @@ public void testContainsKeyMissingStoreAccessException() throws Exception { final Ehcache ehcache = this.getEhcache(); ehcache.containsKey("missingKey"); - verify(this.spiedResilienceStrategy).containsKeyFailure(eq("missingKey"), any(StoreAccessException.class)); + verify(this.resilienceStrategy).containsKeyFailure(eq("missingKey"), any(StoreAccessException.class)); } private Map getTestStoreEntries() { @@ -167,10 +167,9 @@ private Map getTestStoreEntries() { private Ehcache getEhcache() throws Exception { final Ehcache ehcache = - new Ehcache<>(CACHE_CONFIGURATION, this.store, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheBasicContainsKeyTest")); + new Ehcache<>(CACHE_CONFIGURATION, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheBasicContainsKeyTest")); ehcache.init(); assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); return ehcache; } } diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicCrudBase.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicCrudBase.java similarity index 87% rename from core/src/test/java/org/ehcache/core/EhcacheBasicCrudBase.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicCrudBase.java index 6269d9da9e..ab019d6d1b 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicCrudBase.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicCrudBase.java @@ -18,17 +18,16 @@ import org.ehcache.Cache; import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.config.ResourcePoolsHelper; import org.ehcache.core.events.CacheEventDispatcher; import org.ehcache.core.exceptions.StorePassThroughException; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.events.StoreEventSource; +import org.ehcache.core.util.TestCacheConfig; import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.core.internal.resilience.ResilienceStrategy; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.Description; import org.hamcrest.Factory; import org.hamcrest.Matcher; @@ -41,7 +40,6 @@ import org.terracotta.statistics.OperationStatistic; import org.terracotta.statistics.ValueStatistic; -import java.lang.reflect.Field; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; @@ -56,14 +54,13 @@ import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.spy; /** * Provides testing of basic CRUD operations on an {@code Ehcache}. @@ -72,23 +69,14 @@ */ public abstract class EhcacheBasicCrudBase { - protected static final CacheConfiguration CACHE_CONFIGURATION = - new BaseCacheConfiguration<>(String.class, String.class, null, - null, null, ResourcePoolsHelper.createHeapOnlyPools()); + protected static final CacheConfiguration CACHE_CONFIGURATION = new TestCacheConfig<>(String.class, String.class); @Mock protected Store store; @Mock protected CacheEventDispatcher cacheEventDispatcher; - - /** - * Holds a {@link org.mockito.Mockito#spy(Object)}-wrapped reference to the - * {@link ResilienceStrategy ResilienceStrategy} used in the - * {@link EhcacheWithLoaderWriter Ehcache} instance being tested. - * - * @see #setResilienceStrategySpy(InternalCache) - */ - protected ResilienceStrategy spiedResilienceStrategy; + @Mock + protected ResilienceStrategy resilienceStrategy; @Before public void initMocks() { @@ -207,33 +195,6 @@ protected static Supplier getBooleanSupplier() { return any(Supplier.class); // unchecked } - /** - * Replaces the {@link ResilienceStrategy ResilienceStrategy} instance in the - * {@link InternalCache Ehcache} instance provided with a - * {@link org.mockito.Mockito#spy(Object) Mockito spy} wrapping the original - * {@code ResilienceStrategy} instance. - * - * @param ehcache the {@code InternalCache} instance to alter - * - * @return the spy-wrapped {@code ResilienceStrategy} instance - */ - protected final ResilienceStrategy setResilienceStrategySpy(final InternalCache ehcache) { - assert ehcache != null; - try { - final Field resilienceStrategyField = ehcache.getClass().getDeclaredField("resilienceStrategy"); - resilienceStrategyField.setAccessible(true); - @SuppressWarnings("unchecked") - ResilienceStrategy resilienceStrategy = (ResilienceStrategy)resilienceStrategyField.get(ehcache); - if (resilienceStrategy != null) { - resilienceStrategy = spy(resilienceStrategy); - resilienceStrategyField.set(ehcache, resilienceStrategy); - } - return resilienceStrategy; - } catch (Exception e) { - throw new AssertionError(String.format("Unable to wrap ResilienceStrategy in Ehcache instance: %s", e)); - } - } - /** * Provides a basic {@link Store} implementation for testing. * The contract implemented by this {@code Store} is not strictly conformant but @@ -246,6 +207,7 @@ protected static class FakeStore implements Store { }; private static final Supplier REPLACE_EQUAL_TRUE = () -> true; + private static final Supplier INVOKE_WRITER_FALSE = () -> false; /** * The key:value pairs served by this {@code Store}. This map may be empty. @@ -258,7 +220,7 @@ protected static class FakeStore implements Store { private final Set failingKeys; public FakeStore(final Map entries) { - this(entries, Collections.emptySet()); + this(entries, Collections.emptySet()); } public FakeStore(final Map entries, final Set failingKeys) { @@ -280,10 +242,8 @@ public FakeStore(final Map entries, final Set failingKey * @return a new, unmodifiable map of the entries in this {@code Store}. */ protected Map getEntryMap() { - final Map result = new HashMap<>(); - for (final Map.Entry entry : this.entries.entrySet()) { - result.put(entry.getKey(), entry.getValue().value()); - } + Map result = new HashMap<>(entries.size()); + entries.forEach((k, v) -> result.put(k, v.get())); return Collections.unmodifiableMap(result); } @@ -312,7 +272,7 @@ public PutStatus put(final String key, final String value) throws StoreAccessExc } @Override - public ValueHolder putIfAbsent(final String key, final String value) throws StoreAccessException { + public ValueHolder putIfAbsent(final String key, final String value, Consumer put) throws StoreAccessException { this.checkFailingKey(key); final FakeValueHolder currentValue = this.entries.get(key); if (currentValue == null) { @@ -338,7 +298,7 @@ public RemoveStatus remove(final String key, final String value) throws StoreAcc final ValueHolder currentValue = this.entries.get(key); if (currentValue == null) { return RemoveStatus.KEY_MISSING; - } else if (!currentValue.value().equals(value)) { + } else if (!currentValue.get().equals(value)) { return RemoveStatus.KEY_PRESENT; } this.entries.remove(key); @@ -362,7 +322,7 @@ public ReplaceStatus replace(final String key, final String oldValue, final Stri if (currentValue == null) { return ReplaceStatus.MISS_NOT_PRESENT; } - if (!currentValue.value().equals(oldValue)) { + if (!currentValue.get().equals(oldValue)) { return ReplaceStatus.MISS_PRESENT; } this.entries.put(key, new FakeValueHolder(newValue)); @@ -427,16 +387,16 @@ public ValueHolder getValue() { * {@inheritDoc} *

* This method is implemented as - * this.{@link #compute(String, BiFunction, Supplier) compute}(keys, mappingFunction, () -> { returns true; }) + * this.{@link Store#getAndCompute(Object, BiFunction)} (keys, mappingFunction, () -> { returns true; }) */ @Override - public ValueHolder compute(final String key, final BiFunction mappingFunction) + public ValueHolder getAndCompute(final String key, final BiFunction mappingFunction) throws StoreAccessException { - return this.compute(key, mappingFunction, REPLACE_EQUAL_TRUE); + return this.computeAndGet(key, mappingFunction, REPLACE_EQUAL_TRUE, INVOKE_WRITER_FALSE); } /** - * Common core for the {@link #compute(String, BiFunction, Supplier)} method. + * Common core for the {@link Store#computeAndGet(Object, BiFunction, Supplier, Supplier)} method. * * @param key the key of the entry to process * @param currentValue the existing value, if any, for {@code key} @@ -457,9 +417,9 @@ private FakeValueHolder computeInternal( final BiFunction mappingFunction, final Supplier replaceEqual) throws StoreAccessException { - String remappedValue = null; + String remappedValue; try { - remappedValue = mappingFunction.apply(key, (currentValue == null ? null : currentValue.value())); + remappedValue = mappingFunction.apply(key, (currentValue == null ? null : currentValue.get())); } catch (StorePassThroughException cpte) { Throwable cause = cpte.getCause(); if(cause instanceof RuntimeException) { @@ -492,10 +452,10 @@ private FakeValueHolder computeInternal( } @Override - public ValueHolder compute( - final String key, - final BiFunction mappingFunction, - final Supplier replaceEqual) + public ValueHolder computeAndGet( + final String key, + final BiFunction mappingFunction, + final Supplier replaceEqual, Supplier invokeWriter) throws StoreAccessException { this.checkFailingKey(key); @@ -508,7 +468,7 @@ public ValueHolder computeIfAbsent(final String key, final Function> bulkCompute( /** * {@inheritDoc} *

- * This implementation calls {@link #compute(String, BiFunction, Supplier) - * compute(key, BiFunction, replaceEqual)} for each key presented in {@code keys}. + * This implementation calls {@link Store#computeAndGet(Object, BiFunction, Supplier, Supplier) + * } for each key presented in {@code keys}. */ @Override public Map> bulkCompute( @@ -562,14 +522,14 @@ public Map> bulkCompute( final Map> resultMap = new LinkedHashMap<>(); for (final String key : keys) { - final ValueHolder newValue = this.compute(key, + final ValueHolder newValue = this.computeAndGet(key, (key1, oldValue) -> { final Entry entry = new AbstractMap.SimpleEntry<>(key1, oldValue); final Entry remappedEntry = remappingFunction.apply(Collections.singletonList(entry)).iterator().next(); return remappedEntry.getValue(); }, - replaceEqual); + replaceEqual, INVOKE_WRITER_FALSE); resultMap.put(key, newValue); } @@ -630,38 +590,28 @@ public FakeValueHolder(final String value) { } @Override - public String value() { + public String get() { return this.value; } @Override - public long creationTime(final TimeUnit unit) { - return unit.convert(this.creationTime, TimeUnit.MICROSECONDS); + public long creationTime() { + return creationTime; } @Override - public long expirationTime(TimeUnit unit) { + public long expirationTime() { return 0; } @Override - public boolean isExpired(long expirationTime, TimeUnit unit) { + public boolean isExpired(long expirationTime) { return false; } @Override - public long lastAccessTime(final TimeUnit unit) { - return unit.convert(this.lastAccessTime, TimeUnit.MICROSECONDS); - } - - @Override - public float hitRate(long now, final TimeUnit unit) { - return 0; - } - - @Override - public long hits() { - return 0; + public long lastAccessTime() { + return lastAccessTime; } @Override @@ -752,7 +702,7 @@ public FakeCacheLoaderWriter(final Map entries, final SetemptySet() + ? Collections.emptySet() : Collections.unmodifiableSet(new HashSet<>(failingKeys))); } diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicGetAllTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicGetAllTest.java similarity index 86% rename from core/src/test/java/org/ehcache/core/EhcacheBasicGetAllTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicGetAllTest.java index 6852ba6169..55df1a5c08 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicGetAllTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicGetAllTest.java @@ -19,8 +19,8 @@ import org.ehcache.Status; import org.ehcache.core.spi.store.Store; import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.statistics.BulkOps; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.Matchers; import org.junit.Test; import org.slf4j.LoggerFactory; @@ -40,10 +40,10 @@ import static org.ehcache.core.EhcacheBasicBulkUtil.getEntryMap; import static org.ehcache.core.EhcacheBasicBulkUtil.getNullEntryMap; import static org.ehcache.core.EhcacheBasicBulkUtil.union; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -91,7 +91,7 @@ public void testGetAllNullKey() throws Exception { } /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for + * Tests {@link Ehcache#getAll(Set)} for *

    *
  • empty request key set
  • *
  • no {@code CacheLoaderWriter}
  • @@ -109,14 +109,14 @@ public void testGetAllEmptyRequestNoLoader() throws Exception { assertThat(actual.isEmpty(), is(true)); verify(this.store, never()).bulkComputeIfAbsent(eq(Collections.emptySet()), getAnyIterableFunction()); - verify(this.spiedResilienceStrategy, never()).getAllFailure(eq(Collections.emptySet()), any(StoreAccessException.class)); + verify(this.resilienceStrategy, never()).getAllFailure(eq(Collections.emptySet()), any(StoreAccessException.class)); validateStatsNoneof(ehcache); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); } /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for + * Tests {@link Ehcache#getAll(Set)} for *
      *
    • non-empty request key set
    • *
    • no {@link Store} entries match
    • @@ -135,7 +135,7 @@ public void testGetAllStoreNoMatchNoLoader() throws Exception { verify(this.store).bulkComputeIfAbsent(eq(KEY_SET_A), getAnyIterableFunction()); assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_B))); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); validateStatsNoneof(ehcache); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); @@ -143,7 +143,7 @@ public void testGetAllStoreNoMatchNoLoader() throws Exception { } /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for + * Tests {@link Ehcache#getAll(Set)} for *
        *
      • non-empty request key set
      • *
      • all {@link Store} entries match
      • @@ -161,13 +161,11 @@ public void testGetAllStoreAllMatchStoreAccessExceptionBeforeNoLoader() throws E final Ehcache ehcache = this.getEhcache(); final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_B); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(getNullEntryMap(fetchKeys))); + ehcache.getAll(fetchKeys); verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); // ResilienceStrategy invoked: no assertion for Store content - verify(this.spiedResilienceStrategy).getAllFailure(eq(fetchKeys), any(StoreAccessException.class)); + verify(this.resilienceStrategy).getAllFailure(eq(fetchKeys), any(StoreAccessException.class)); validateStatsNoneof(ehcache); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); @@ -175,7 +173,7 @@ public void testGetAllStoreAllMatchStoreAccessExceptionBeforeNoLoader() throws E } /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for + * Tests {@link Ehcache#getAll(Set)} for *
          *
        • non-empty request key set
        • *
        • all {@link Store} entries match
        • @@ -196,7 +194,7 @@ public void testGetAllStoreAllMatchNoLoader() throws Exception { verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); validateStatsNoneof(ehcache); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); @@ -204,7 +202,7 @@ public void testGetAllStoreAllMatchNoLoader() throws Exception { } /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for + * Tests {@link Ehcache#getAll(Set)} for *
            *
          • non-empty request key set
          • *
          • no {@link Store} entries match
          • @@ -221,13 +219,11 @@ public void testGetAllStoreNoMatchStoreAccessExceptionBeforeNoLoader() throws Ex final Ehcache ehcache = this.getEhcache(); - final Map actual = ehcache.getAll(KEY_SET_A); - assertThat(actual, equalTo(getNullEntryMap(KEY_SET_A))); + ehcache.getAll(KEY_SET_A); verify(this.store).bulkComputeIfAbsent(eq(KEY_SET_A), getAnyIterableFunction()); // ResilienceStrategy invoked: no assertion for Store content - verify(this.spiedResilienceStrategy) - .getAllFailure(eq(KEY_SET_A), any(StoreAccessException.class)); + verify(this.resilienceStrategy).getAllFailure(eq(KEY_SET_A), any(StoreAccessException.class)); validateStatsNoneof(ehcache); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); @@ -236,7 +232,7 @@ public void testGetAllStoreNoMatchStoreAccessExceptionBeforeNoLoader() throws Ex /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for + * Tests {@link Ehcache#getAll(Set)} for *
              *
            • non-empty request key set
            • *
            • some {@link Store} entries match
            • @@ -257,7 +253,7 @@ public void testGetAllStoreSomeMatchNoLoader() throws Exception { verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); assertThat(fakeStore.getEntryMap(), equalTo(getEntryMap(KEY_SET_A, KEY_SET_B))); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); validateStatsNoneof(ehcache); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.SUCCESS)); @@ -265,7 +261,7 @@ public void testGetAllStoreSomeMatchNoLoader() throws Exception { } /** - * Tests {@link EhcacheWithLoaderWriter#getAll(Set)} for + * Tests {@link Ehcache#getAll(Set)} for *
                *
              • non-empty request key set
              • *
              • some {@link Store} entries match
              • @@ -283,13 +279,11 @@ public void testGetAllStoreSomeMatchStoreAccessExceptionBeforeNoLoader() throws final Ehcache ehcache = this.getEhcache(); final Set fetchKeys = fanIn(KEY_SET_A, KEY_SET_C); - final Map actual = ehcache.getAll(fetchKeys); - - assertThat(actual, equalTo(getNullEntryMap(fetchKeys))); + ehcache.getAll(fetchKeys); verify(this.store).bulkComputeIfAbsent(eq(fetchKeys), getAnyIterableFunction()); // ResilienceStrategy invoked: no assertion for Store content - verify(this.spiedResilienceStrategy).getAllFailure(eq(fetchKeys), any(StoreAccessException.class)); + verify(this.resilienceStrategy).getAllFailure(eq(fetchKeys), any(StoreAccessException.class)); validateStatsNoneof(ehcache); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetAllOutcome.FAILURE)); @@ -298,9 +292,6 @@ public void testGetAllStoreSomeMatchStoreAccessExceptionBeforeNoLoader() throws private void validateStatsNoneof(Ehcache cache) { validateStats(cache, EnumSet.noneOf(CacheOperationOutcomes.GetOutcome.class)); - if (!(cache instanceof Ehcache)) { - validateStats(cache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); - } } /** @@ -308,12 +299,12 @@ private void validateStatsNoneof(Ehcache cache) { * * @return a new {@code Ehcache} instance */ + @SuppressWarnings("unchecked") private Ehcache getEhcache() { - final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, cacheEventDispatcher, LoggerFactory + final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory .getLogger(Ehcache.class + "-" + "EhcacheBasicGetAllTest")); ehcache.init(); assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); return ehcache; } @@ -345,5 +336,4 @@ static Set getAnyStringSet() { static Function, Iterable>> getAnyIterableFunction() { return any(Function.class); // unchecked } - } diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java similarity index 88% rename from core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java index 1d09624e64..e7bbfde935 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java @@ -20,7 +20,7 @@ import org.ehcache.Status; import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.CoreMatchers; import org.junit.Test; import org.slf4j.LoggerFactory; @@ -28,7 +28,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -67,12 +67,12 @@ public void testGetNoStoreEntry() throws Exception { assertThat(ehcache.get("key"), is(nullValue())); verify(this.store).get(eq("key")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS)); } /** - * Tests the effect of a {@link EhcacheWithLoaderWriter#get(Object)} for + * Tests the effect of a {@link Ehcache#get(Object)} for *
                  *
                • key not present in {@code Store}
                • *
                • {@code Store.get} throws
                • @@ -88,7 +88,7 @@ public void testGetNoStoreEntryStoreAccessException() throws Exception { ehcache.get("key"); verify(this.store).get(eq("key")); - verify(this.spiedResilienceStrategy).getFailure(eq("key"), any(StoreAccessException.class)); + verify(this.resilienceStrategy).getFailure(eq("key"), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.FAILURE)); } @@ -108,7 +108,7 @@ public void testGetHasStoreEntry() throws Exception { assertThat(ehcache.get("key"), equalTo("value")); verify(this.store).get(eq("key")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); } @@ -131,7 +131,7 @@ public void testGetHasStoreEntryStoreAccessExceptionNoCacheLoaderWriter() throws ehcache.get("key"); verify(this.store).get(eq("key")); - verify(this.spiedResilienceStrategy).getFailure(eq("key"), any(StoreAccessException.class)); + verify(this.resilienceStrategy).getFailure(eq("key"), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.FAILURE)); } @@ -141,11 +141,10 @@ public void testGetHasStoreEntryStoreAccessExceptionNoCacheLoaderWriter() throws * @return a new {@code Ehcache} instance */ private Ehcache getEhcache() { - final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, cacheEventDispatcher, LoggerFactory + final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory .getLogger(Ehcache.class + "-" + "EhcacheBasicGetTest")); ehcache.init(); assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); return ehcache; } } diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java similarity index 91% rename from core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java index 819a59d274..097ae95c6e 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java @@ -18,11 +18,10 @@ import org.ehcache.Cache; import org.ehcache.Status; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.CacheIterationException; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.Store.RemoveStatus; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.Matchers; import org.junit.Test; import org.slf4j.LoggerFactory; @@ -33,18 +32,19 @@ import java.util.Map; import java.util.NoSuchElementException; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.hasEntry; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; -import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.verify; /** * Provides testing of basic ITERATOR operations on an {@code Ehcache}. @@ -63,7 +63,7 @@ public class EhcacheBasicIteratorTest extends EhcacheBasicCrudBase { */ @Test public void testIteratorEmptyStoreGet() throws Exception { - this.store = new FakeStore(Collections.emptyMap()); + this.store = new FakeStore(Collections.emptyMap()); final InternalCache ehcache = this.getEhcache(); assertThat(ehcache.iterator(), is(notNullValue())); } @@ -73,7 +73,7 @@ public void testIteratorEmptyStoreGet() throws Exception { */ @Test public void testIteratorEmptyStoreHasNext() throws Exception { - this.store = new FakeStore(Collections.emptyMap()); + this.store = new FakeStore(Collections.emptyMap()); final InternalCache ehcache = this.getEhcache(); final Iterator> iterator = ehcache.iterator(); assertThat(iterator.hasNext(), is(false)); @@ -84,7 +84,7 @@ public void testIteratorEmptyStoreHasNext() throws Exception { */ @Test public void testIteratorEmptyStoreNext() throws Exception { - this.store = new FakeStore(Collections.emptyMap()); + this.store = new FakeStore(Collections.emptyMap()); final InternalCache ehcache = this.getEhcache(); final Iterator> iterator = ehcache.iterator(); try { @@ -100,7 +100,7 @@ public void testIteratorEmptyStoreNext() throws Exception { */ @Test public void testIteratorEmptyStoreRemoveBeforeNext() throws Exception { - this.store = new FakeStore(Collections.emptyMap()); + this.store = new FakeStore(Collections.emptyMap()); final InternalCache ehcache = this.getEhcache(); final Iterator> iterator = ehcache.iterator(); try { @@ -209,7 +209,7 @@ public void testIteratorNonEmptyNextAfterLast() throws Exception { public void testIteratorStoreAccessException() throws Exception { @SuppressWarnings("unchecked") Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); - doReturn("bar").when(valueHolder).value(); + doReturn("bar").when(valueHolder).get(); @SuppressWarnings("unchecked") Cache.Entry> storeEntry = mock(Cache.Entry.class); @@ -228,20 +228,16 @@ public void testIteratorStoreAccessException() throws Exception { final Iterator> iterator = ehcache.iterator(); assertThat(iterator, is(notNullValue())); assertThat(iterator.hasNext(), is(true)); - doThrow(new StoreAccessException("")).when(storeIterator).next(); + StoreAccessException exception = new StoreAccessException(""); + doThrow(exception).when(storeIterator).next(); Cache.Entry entry = iterator.next(); assertThat(entry.getKey(), is("foo")); assertThat(entry.getValue(), is("bar")); - doThrow(new StoreAccessException("")).when(storeIterator).next(); doReturn(RemoveStatus.REMOVED).when(this.store).remove(anyString(), anyString()); - try { - iterator.next(); - fail(); - } catch (CacheIterationException e) { - // Expected - } + iterator.next(); + verify(resilienceStrategy).iteratorFailure(exception); assertThat(iterator.hasNext(), is(false)); @@ -281,12 +277,12 @@ protected Map getTestStoreEntries() { * * @return a new {@code Ehcache} instance */ + @SuppressWarnings("unchecked") protected InternalCache getEhcache() throws Exception { - final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, cacheEventDispatcher, LoggerFactory + final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory .getLogger(Ehcache.class + "-" + "EhcacheBasicIteratorTest")); ehcache.init(); assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); return ehcache; } diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicPutAllTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicPutAllTest.java similarity index 82% rename from core/src/test/java/org/ehcache/core/EhcacheBasicPutAllTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicPutAllTest.java index 797887a746..c0f49692c3 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicPutAllTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicPutAllTest.java @@ -19,11 +19,12 @@ import org.ehcache.Status; import org.ehcache.core.spi.store.Store; import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.statistics.BulkOps; import org.ehcache.spi.loaderwriter.BulkCacheWritingException; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.Matchers; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; @@ -31,8 +32,6 @@ import org.mockito.ArgumentMatchers; import org.mockito.Captor; import org.mockito.InOrder; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; import org.slf4j.LoggerFactory; import java.util.AbstractMap; @@ -59,11 +58,11 @@ import static org.ehcache.core.EhcacheBasicBulkUtil.getAltEntryMap; import static org.ehcache.core.EhcacheBasicBulkUtil.getEntryMap; import static org.ehcache.core.EhcacheBasicBulkUtil.union; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.isIn; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -162,7 +161,7 @@ public void testPutAllNullValue() throws Exception { } /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for + * Tests {@link Ehcache#putAll(Map)} for *
                    *
                  • empty request map
                  • *
                  • populated {@code Store} (keys not relevant)
                  • @@ -180,7 +179,7 @@ public void testPutAllEmptyRequestNoWriter() throws Exception { verify(this.store, never()).bulkCompute(eq(Collections.emptySet()), getAnyEntryIterableFunction()); assertThat(fakeStore.getEntryMap(), equalTo(originalStoreContent)); - verify(this.spiedResilienceStrategy, never()).putAllFailure(eq(Collections.emptyMap()), any(StoreAccessException.class)); + verify(this.resilienceStrategy, never()).putAllFailure(eq(Collections.emptyMap()), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.SUCCESS)); @@ -188,7 +187,7 @@ public void testPutAllEmptyRequestNoWriter() throws Exception { } /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for + * Tests {@link Ehcache#putAll(Map)} for *
                      *
                    • non-empty request map
                    • *
                    • populated {@code Store} - some keys overlap request
                    • @@ -209,7 +208,7 @@ public void testPutAllStoreSomeOverlapNoWriter() throws Exception { verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); assertThat(this.getBulkComputeArgs(), equalTo(contentUpdates.keySet())); assertThat(fakeStore.getEntryMap(), equalTo(union(originalStoreContent, contentUpdates))); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutAllOutcome.SUCCESS)); @@ -217,7 +216,7 @@ public void testPutAllStoreSomeOverlapNoWriter() throws Exception { } /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for + * Tests {@link Ehcache#putAll(Map)} for *
                        *
                      • non-empty request map
                      • *
                      • populated {@code Store} - some keys overlap request
                      • @@ -238,11 +237,11 @@ public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeNoWriter() throw final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); ehcache.putAll(contentUpdates); - final InOrder ordered = inOrder(this.store, this.spiedResilienceStrategy); + final InOrder ordered = inOrder(this.store, this.resilienceStrategy); ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.spiedResilienceStrategy) + ordered.verify(this.resilienceStrategy) .putAllFailure(eq(contentUpdates), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); @@ -251,7 +250,7 @@ public void testPutAllStoreSomeOverlapStoreAccessExceptionBeforeNoWriter() throw } /** - * Tests {@link EhcacheWithLoaderWriter#putAll(Map)} for + * Tests {@link Ehcache#putAll(Map)} for *
                          *
                        • non-empty request map
                        • *
                        • populated {@code Store} - some keys overlap request
                        • @@ -270,11 +269,11 @@ public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterNoWriter() throws final Map contentUpdates = getAltEntryMap("new_", fanIn(KEY_SET_A, KEY_SET_C)); ehcache.putAll(contentUpdates); - final InOrder ordered = inOrder(this.store, this.spiedResilienceStrategy); + final InOrder ordered = inOrder(this.store, this.resilienceStrategy); ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates.keySet()))); // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.spiedResilienceStrategy) + ordered.verify(this.resilienceStrategy) .putAllFailure(eq(contentUpdates), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.PutOutcome.class)); @@ -282,44 +281,6 @@ public void testPutAllStoreSomeOverlapStoreAccessExceptionAfterNoWriter() throws assertThat(ehcache.getBulkMethodEntries().get(BulkOps.PUT_ALL).intValue(), is(0)); } - @Test - @SuppressWarnings("unchecked") - public void putAllStoreCallsMethodTwice() throws Exception { - this.store = mock(Store.class); - CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); - final List written = new ArrayList<>(); - doAnswer(invocation -> { - Iterable> i = (Iterable>) invocation.getArguments()[0]; - for (Map.Entry entry : i) { - if (entry.getKey() == null) fail("null key is forbidden in CacheLoaderWriter.writeAll()"); - if (entry.getValue() == null) fail("null value is forbidden in CacheLoaderWriter.writeAll()"); - written.add(entry); - } - return null; - }).when(cacheLoaderWriter).writeAll(any(Iterable.class)); - final EhcacheWithLoaderWriter ehcache = this.getEhcacheWithLoaderWriter(cacheLoaderWriter); - - final ArgumentCaptor functionArgumentCaptor = ArgumentCaptor.forClass(Function.class); - - Map map = new HashMap() {{ - put("1", "one"); - put("2", "two"); - }}; - - when(store.bulkCompute(ArgumentMatchers.anySet(), functionArgumentCaptor.capture())).then(invocation -> { - Function function = functionArgumentCaptor.getValue(); - Iterable arg = map.entrySet(); - function.apply(arg); - function.apply(arg); - return null; - }); - - ehcache.putAll(map); - - assertThat(written.size(), is(2)); - assertThat(written.contains(new AbstractMap.SimpleEntry<>("1", "one")), is(true)); - assertThat(written.contains(new AbstractMap.SimpleEntry<>("2", "two")), is(true)); - } /** * Gets an initialized {@link Ehcache Ehcache} instance @@ -327,20 +288,10 @@ public void putAllStoreCallsMethodTwice() throws Exception { * @return a new {@code Ehcache} instance */ private Ehcache getEhcache() { - final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, cacheEventDispatcher, LoggerFactory - .getLogger(Ehcache.class + "-" + "EhcacheBasicPutAllTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } - - private EhcacheWithLoaderWriter getEhcacheWithLoaderWriter(CacheLoaderWriter cacheLoaderWriter) { - final EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(CACHE_CONFIGURATION, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory + final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory .getLogger(Ehcache.class + "-" + "EhcacheBasicPutAllTest")); ehcache.init(); assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); return ehcache; } @@ -392,7 +343,7 @@ static Set getAnyStringSet() { * @param originalStoreContent the original content provided to {@code fakeStore} * @param fakeLoaderWriter the {@link org.ehcache.core.EhcacheBasicCrudBase.FakeCacheLoaderWriter FakeCacheLoaderWriter} instances used in the test * @param originalWriterContent the original content provided to {@code fakeLoaderWriter} - * @param contentUpdates the {@code Map} provided to the {@link EhcacheWithLoaderWriter#putAll(java.util.Map)} call in the test + * @param contentUpdates the {@code Map} provided to the {@link Ehcache#putAll(java.util.Map)} call in the test * @param expectedFailures the {@code Set} of failing keys expected for the test * @param expectedSuccesses the {@code Set} of successful keys expected for the test * @param bcweSuccesses the {@code Set} from {@link BulkCacheWritingException#getSuccesses()} diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicPutIfAbsentTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicPutIfAbsentTest.java similarity index 77% rename from core/src/test/java/org/ehcache/core/EhcacheBasicPutIfAbsentTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicPutIfAbsentTest.java index 6923013bc8..a22f3df49d 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicPutIfAbsentTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicPutIfAbsentTest.java @@ -19,24 +19,19 @@ import java.util.EnumSet; import org.ehcache.Status; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.config.ResourcePoolsHelper; import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.expiry.Expirations; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.Matchers; import org.junit.Test; import org.slf4j.LoggerFactory; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.isNull; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; @@ -98,8 +93,8 @@ public void testPutIfAbsentNoStoreEntry() throws Exception { final Ehcache ehcache = this.getEhcache(); assertThat(ehcache.putIfAbsent("key", "value"), is(nullValue())); - verify(this.store).putIfAbsent(eq("key"), eq("value")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verify(this.store).putIfAbsent(eq("key"), eq("value"), any()); + verifyZeroInteractions(this.resilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.PUT)); } @@ -118,8 +113,8 @@ public void testPutIfAbsentHasStoreEntry() throws Exception { final Ehcache ehcache = this.getEhcache(); assertThat(ehcache.putIfAbsent("key", "value"), is(equalTo("oldValue"))); - verify(this.store).putIfAbsent(eq("key"), eq("value")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verify(this.store).putIfAbsent(eq("key"), eq("value"), any()); + verifyZeroInteractions(this.resilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("oldValue")); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.HIT)); } @@ -135,14 +130,13 @@ public void testPutIfAbsentHasStoreEntry() throws Exception { public void testPutIfAbsentNoStoreEntryStoreAccessException() throws Exception { final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).putIfAbsent(eq("key"), eq("value")); + doThrow(new StoreAccessException("")).when(this.store).putIfAbsent(eq("key"), eq("value"), any()); final Ehcache ehcache = this.getEhcache(); ehcache.putIfAbsent("key", "value"); - verify(this.store).putIfAbsent(eq("key"), eq("value")); - verify(this.spiedResilienceStrategy) - .putIfAbsentFailure(eq("key"), eq("value"), (String) isNull(), any(StoreAccessException.class), eq(false)); + verify(this.store).putIfAbsent(eq("key"), eq("value"), any()); + verify(this.resilienceStrategy).putIfAbsentFailure(eq("key"), eq("value"), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.FAILURE)); } @@ -157,14 +151,13 @@ public void testPutIfAbsentNoStoreEntryStoreAccessException() throws Exception { public void testPutIfAbsentHasStoreEntryStoreAccessException() throws Exception { final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); this.store = spy(fakeStore); - doThrow(new StoreAccessException("")).when(this.store).putIfAbsent(eq("key"), eq("value")); + doThrow(new StoreAccessException("")).when(this.store).putIfAbsent(eq("key"), eq("value"), any()); final Ehcache ehcache = this.getEhcache(); ehcache.putIfAbsent("key", "value"); - verify(this.store).putIfAbsent(eq("key"), eq("value")); - verify(this.spiedResilienceStrategy) - .putIfAbsentFailure(eq("key"), eq("value"), (String) isNull(), any(StoreAccessException.class), eq(false)); + verify(this.store).putIfAbsent(eq("key"), eq("value"), any()); + verify(this.resilienceStrategy).putIfAbsentFailure(eq("key"), eq("value"), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.FAILURE)); } @@ -174,12 +167,9 @@ public void testPutIfAbsentHasStoreEntryStoreAccessException() throws Exception * @return a new {@code Ehcache} instance */ private Ehcache getEhcache() { - CacheConfiguration config = new BaseCacheConfiguration<>(String.class, String.class, null, null, - Expirations.noExpiration(), ResourcePoolsHelper.createHeapOnlyPools()); - final Ehcache ehcache = new Ehcache<>(config, this.store, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheBasicPutIfAbsentTest")); + final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheBasicPutIfAbsentTest")); ehcache.init(); assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); return ehcache; } } diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicPutTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicPutTest.java similarity index 75% rename from core/src/test/java/org/ehcache/core/EhcacheBasicPutTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicPutTest.java index 8df8e35052..308f1f04e9 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicPutTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicPutTest.java @@ -21,20 +21,20 @@ import org.ehcache.Status; import org.ehcache.config.CacheConfiguration; import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.CoreMatchers; import org.junit.Test; import org.slf4j.LoggerFactory; import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; /** @@ -94,7 +94,7 @@ public void testPutNoStoreEntry() throws Exception { ehcache.put("key", "value"); verify(this.store).put(eq("key"), eq("value")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.PUT)); } @@ -116,7 +116,7 @@ public void testPutNoStoreEntryStoreAccessException() throws Exception { ehcache.put("key", "value"); verify(this.store).put(eq("key"), eq("value")); - verify(this.spiedResilienceStrategy).putFailure(eq("key"), eq("value"), any(StoreAccessException.class)); + verify(this.resilienceStrategy).putFailure(eq("key"), eq("value"), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.FAILURE)); } @@ -135,7 +135,7 @@ public void testPutHasStoreEntry() throws Exception { ehcache.put("key", "value"); verify(this.store).put(eq("key"), eq("value")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.PUT)); } @@ -157,8 +157,38 @@ public void testPutHasStoreEntryStoreAccessException() throws Exception { ehcache.put("key", "value"); verify(this.store).put(eq("key"), eq("value")); - verify(this.spiedResilienceStrategy).putFailure(eq("key"), eq("value"), any(StoreAccessException.class)); - assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); + verify(this.resilienceStrategy).putFailure(eq("key"), eq("value"), any(StoreAccessException.class)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.FAILURE)); + } + + /** + * Tests the effect of a {@link Ehcache#put(Object, Object)} for + *
                            + *
                          • key present in {@code Store}
                          • + *
                          • {@code Store.put} throws a {@code RuntimeException}
                          • + *
                          + */ + @Test + public void testPutThrowsExceptionShouldKeepTheValueInPlace() throws Exception { + FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); + this.store = spy(fakeStore); + doThrow(new RuntimeException("failed")).when(this.store).put(eq("key"), eq("value")); + + Ehcache ehcache = this.getEhcache(); + + try { + ehcache.put("key", "value"); + fail(); + } catch(RuntimeException e) { + // expected + assertThat(e.getMessage(), equalTo("failed")); + } + + // Key and old value should still be in place + assertThat(ehcache.get("key"), equalTo("oldValue")); + + verify(this.store).put(eq("key"), eq("value")); + verifyNoMoreInteractions(this.resilienceStrategy); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.PutOutcome.FAILURE)); } @@ -171,11 +201,11 @@ private Ehcache getEhcache() { return getEhcache(CACHE_CONFIGURATION); } + @SuppressWarnings("unchecked") private Ehcache getEhcache(CacheConfiguration config) { - final Ehcache ehcache = new Ehcache<>(config, this.store, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheBasicPutTest")); + final Ehcache ehcache = new Ehcache<>(config, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheBasicPutTest")); ehcache.init(); assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); return ehcache; } } diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java similarity index 80% rename from core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java index 5af4df49ca..687d6862d3 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java @@ -19,19 +19,18 @@ import org.ehcache.Status; import org.ehcache.core.spi.store.Store; import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.statistics.BulkOps; import org.ehcache.spi.loaderwriter.BulkCacheWritingException; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.Matchers; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.InOrder; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; import org.slf4j.LoggerFactory; import java.util.ArrayList; @@ -54,11 +53,11 @@ import static org.ehcache.core.EhcacheBasicBulkUtil.copyWithout; import static org.ehcache.core.EhcacheBasicBulkUtil.fanIn; import static org.ehcache.core.EhcacheBasicBulkUtil.getEntryMap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.isIn; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -131,7 +130,7 @@ public void testRemoveAllNullKey() throws Exception { } /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for + * Tests {@link Ehcache#removeAll(Set)} for *
                            *
                          • empty request set
                          • *
                          • populated {@code Store} (keys not relevant)
                          • @@ -149,7 +148,7 @@ public void testRemoveAllEmptyRequestNoWriter() throws Exception { verify(this.store, never()).bulkCompute(eq(Collections.emptySet()), getAnyEntryIterableFunction()); assertThat(fakeStore.getEntryMap(), equalTo(originalStoreContent)); - verify(this.spiedResilienceStrategy, never()).removeAllFailure(eq(Collections.emptySet()), any(StoreAccessException.class)); + verify(this.resilienceStrategy, never()).removeAllFailure(eq(Collections.emptySet()), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.SUCCESS)); @@ -159,7 +158,7 @@ public void testRemoveAllEmptyRequestNoWriter() throws Exception { /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for + * Tests {@link Ehcache#removeAll(Set)} for *
                              *
                            • non-empty request set
                            • *
                            • populated {@code Store} - some keys overlap request
                            • @@ -180,7 +179,7 @@ public void testRemoveAllStoreSomeOverlapNoWriter() throws Exception { verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); assertThat(this.getBulkComputeArgs(), equalTo(contentUpdates)); assertThat(fakeStore.getEntryMap(), equalTo(copyWithout(originalStoreContent, contentUpdates))); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveAllOutcome.SUCCESS)); @@ -188,7 +187,7 @@ public void testRemoveAllStoreSomeOverlapNoWriter() throws Exception { } /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for + * Tests {@link Ehcache#removeAll(Set)} for *
                                *
                              • non-empty request set
                              • *
                              • populated {@code Store} - some keys overlap request
                              • @@ -209,11 +208,11 @@ public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeNoWriter() th final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); ehcache.removeAll(contentUpdates); - final InOrder ordered = inOrder(this.store, this.spiedResilienceStrategy); + final InOrder ordered = inOrder(this.store, this.resilienceStrategy); ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.spiedResilienceStrategy) + ordered.verify(this.resilienceStrategy) .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); @@ -222,7 +221,7 @@ public void testRemoveAllStoreSomeOverlapStoreAccessExceptionBeforeNoWriter() th } /** - * Tests {@link EhcacheWithLoaderWriter#removeAll(Set)} for + * Tests {@link Ehcache#removeAll(Set)} for *
                                  *
                                • non-empty request set
                                • *
                                • populated {@code Store} - some keys overlap request
                                • @@ -241,11 +240,11 @@ public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterNoWriter() thr final Set contentUpdates = fanIn(KEY_SET_A, KEY_SET_C); ehcache.removeAll(contentUpdates); - final InOrder ordered = inOrder(this.store, this.spiedResilienceStrategy); + final InOrder ordered = inOrder(this.store, this.resilienceStrategy); ordered.verify(this.store, atLeast(1)).bulkCompute(this.bulkComputeSetCaptor.capture(), getAnyEntryIterableFunction()); assertThat(this.getBulkComputeArgs(), everyItem(isIn(contentUpdates))); // ResilienceStrategy invoked; no assertions about Store content - ordered.verify(this.spiedResilienceStrategy) + ordered.verify(this.resilienceStrategy) .removeAllFailure(eq(contentUpdates), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.RemoveOutcome.class)); @@ -253,67 +252,16 @@ public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterNoWriter() thr assertThat(ehcache.getBulkMethodEntries().get(BulkOps.REMOVE_ALL).intValue(), is(0)); } - @Test - @SuppressWarnings("unchecked") - public void removeAllStoreCallsMethodTwice() throws Exception { - CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); - final List removed = new ArrayList<>(); - doAnswer(invocation -> { - @SuppressWarnings("unchecked") - Iterable i = (Iterable) invocation.getArguments()[0]; - for (String key : i) { - removed.add(key); - } - return null; - }).when(cacheLoaderWriter).deleteAll(any(Iterable.class)); - final EhcacheWithLoaderWriter ehcache = this.getEhcacheWithLoaderWriter(cacheLoaderWriter); - - final ArgumentCaptor>, Iterable>>> functionArgumentCaptor = (ArgumentCaptor) ArgumentCaptor.forClass(Function.class); - - Set keys = new HashSet() {{ - add("1"); - add("2"); - }}; - - HashMap entriesMap = new HashMap<>(); - entriesMap.put("1", "one"); - entriesMap.put("2", "two"); - - when(store.bulkCompute(any(Set.class), functionArgumentCaptor.capture())).then(invocation -> { - Function>, Iterable>> function = functionArgumentCaptor.getValue(); - Iterable> arg = entriesMap.entrySet(); - function.apply(arg); - function.apply(arg); - return null; - }); - - ehcache.removeAll(keys); - - assertThat(removed.size(), is(2)); - assertThat(removed.contains("1"), is(true)); - assertThat(removed.contains("2"), is(true)); - } - /** * Gets an initialized {@link Ehcache Ehcache} instance * * @return a new {@code Ehcache} instance */ private Ehcache getEhcache() { - final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, cacheEventDispatcher, LoggerFactory + final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory .getLogger(Ehcache.class + "-" + "EhcacheBasicRemoveAllTest")); ehcache.init(); assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); - return ehcache; - } - - private EhcacheWithLoaderWriter getEhcacheWithLoaderWriter(CacheLoaderWriter cacheLoaderWriter) { - final EhcacheWithLoaderWriter ehcache = new EhcacheWithLoaderWriter<>(CACHE_CONFIGURATION, this.store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory - .getLogger(Ehcache.class + "-" + "EhcacheBasicPutAllTest")); - ehcache.init(); - assertThat("cache not initialized", ehcache.getStatus(), Matchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); return ehcache; } @@ -379,7 +327,7 @@ private Set getBulkComputeArgs() { * @param originalStoreContent the original content provided to {@code fakeStore} * @param fakeLoaderWriter the {@link org.ehcache.core.EhcacheBasicCrudBase.FakeCacheLoaderWriter FakeCacheLoaderWriter} instances used in the test * @param originalWriterContent the original content provided to {@code fakeLoaderWriter} - * @param contentUpdates the {@code Set} provided to the {@link EhcacheWithLoaderWriter#removeAll(java.util.Set)} call in the test + * @param contentUpdates the {@code Set} provided to the {@link Ehcache#removeAll(java.util.Set)} call in the test * @param expectedFailures the {@code Set} of failing keys expected for the test * @param expectedSuccesses the {@code Set} of successful keys expected for the test * @param bcweSuccesses the {@code Set} from {@link BulkCacheWritingException#getSuccesses()} diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicRemoveTest.java similarity index 86% rename from core/src/test/java/org/ehcache/core/EhcacheBasicRemoveTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicRemoveTest.java index 09dbbef8aa..00fa591ada 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicRemoveTest.java @@ -20,20 +20,19 @@ import org.ehcache.Status; import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.CoreMatchers; import org.junit.Test; import org.slf4j.LoggerFactory; import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verifyZeroInteractions; /** @@ -69,7 +68,7 @@ public void testRemoveNoStoreEntry() throws Exception { ehcache.remove("key"); verify(this.store).remove(eq("key")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.NOOP)); } @@ -90,8 +89,7 @@ public void testRemoveNoStoreEntryStoreAccessException() throws Exception { final Ehcache ehcache = this.getEhcache(); ehcache.remove("key"); - verify(this.store, times(2)).remove(eq("key")); - verify(this.spiedResilienceStrategy).removeFailure(eq("key"), any(StoreAccessException.class)); + verify(this.resilienceStrategy).removeFailure(eq("key"), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.FAILURE)); } @@ -111,7 +109,7 @@ public void testRemoveHasStoreEntry() throws Exception { ehcache.remove("key"); verify(this.store).remove(eq("key")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.SUCCESS)); } @@ -132,8 +130,7 @@ public void testRemoveHasStoreEntryStoreAccessException() throws Exception { final Ehcache ehcache = this.getEhcache(); ehcache.remove("key"); - verify(this.store, times(2)).remove(eq("key")); - verify(this.spiedResilienceStrategy).removeFailure(eq("key"), any(StoreAccessException.class)); + verify(this.resilienceStrategy).removeFailure(eq("key"), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.RemoveOutcome.FAILURE)); } @@ -142,12 +139,12 @@ public void testRemoveHasStoreEntryStoreAccessException() throws Exception { * * @return a new {@code Ehcache} instance */ + @SuppressWarnings("unchecked") private Ehcache getEhcache() { - final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, cacheEventDispatcher, LoggerFactory + final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory .getLogger(Ehcache.class + "-" + "EhcacheBasicRemoveTest")); ehcache.init(); assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); return ehcache; } } diff --git a/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicRemoveValueTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicRemoveValueTest.java new file mode 100644 index 0000000000..ca5d172e8f --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicRemoveValueTest.java @@ -0,0 +1,218 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core; + +import java.util.Collections; +import java.util.EnumSet; + +import org.ehcache.Status; +import org.ehcache.core.statistics.CacheOperationOutcomes; +import org.ehcache.spi.resilience.StoreAccessException; +import org.hamcrest.CoreMatchers; +import org.junit.Test; +import org.slf4j.LoggerFactory; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; + +/** + * @author Abhilash + * + */ +public class EhcacheBasicRemoveValueTest extends EhcacheBasicCrudBase { + + @Test + public void testRemoveNullNull() { + Ehcache ehcache = this.getEhcache(); + + try { + ehcache.remove(null, null); + fail(); + } catch (NullPointerException e) { + // expected + } + } + + @Test + public void testRemoveKeyNull() throws Exception { + Ehcache ehcache = this.getEhcache(); + + try { + ehcache.remove("key", null); + fail(); + } catch (NullPointerException e) { + // expected + } + } + + @Test + public void testRemoveNullValue() throws Exception { + Ehcache ehcache = this.getEhcache(); + + try { + ehcache.remove(null, "value"); + fail(); + } catch (NullPointerException e) { + // expected + } + } + + /** + * Tests the effect of a {@link Ehcache#remove(Object, Object)} for + *
                                    + *
                                  • key not present in {@code Store}
                                  • + *
                                  + */ + @Test + public void testRemoveValueNoStoreEntry() throws Exception { + FakeStore fakeStore = new FakeStore(Collections.emptyMap()); + this.store = spy(fakeStore); + + Ehcache ehcache = this.getEhcache(); + + assertFalse(ehcache.remove("key", "value")); + verify(this.store).remove(eq("key"), eq("value")); + verifyZeroInteractions(this.resilienceStrategy); + assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_MISSING)); + } + + /** + * Tests the effect of a {@link Ehcache#remove(Object, Object)} for + *
                                    + *
                                  • key with unequal value in {@code Store}
                                  • + *
                                  + */ + @Test + public void testRemoveValueUnequalStoreEntry() throws Exception { + FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); + this.store = spy(fakeStore); + + Ehcache ehcache = this.getEhcache(); + + assertFalse(ehcache.remove("key", "value")); + verify(this.store).remove(eq("key"), eq("value")); + verifyZeroInteractions(this.resilienceStrategy); + assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("unequalValue"))); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); + } + + /** + * Tests the effect of a {@link Ehcache#remove(Object, Object)} for + *
                                    + *
                                  • key with equal value in {@code Store}
                                  • + *
                                  + */ + @Test + public void testRemoveValueEqualStoreEntry() throws Exception { + FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); + this.store = spy(fakeStore); + + Ehcache ehcache = this.getEhcache(); + + assertTrue(ehcache.remove("key", "value")); + verify(this.store).remove(eq("key"), eq("value")); + verifyZeroInteractions(this.resilienceStrategy); + assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS)); + } + + /** + * Tests the effect of a {@link Ehcache#remove(Object, Object)} for + *
                                    + *
                                  • key not present in {@code Store}
                                  • + *
                                  • >{@code Store.remove} throws
                                  • + *
                                  + */ + @Test + public void testRemoveValueNoStoreEntryStoreAccessException() throws Exception { + FakeStore fakeStore = new FakeStore(Collections.emptyMap()); + this.store = spy(fakeStore); + doThrow(new StoreAccessException("")).when(this.store).remove(eq("key"), eq("value")); + + Ehcache ehcache = this.getEhcache(); + + ehcache.remove("key", "value"); + verify(this.store).remove(eq("key"), eq("value")); + verify(this.resilienceStrategy).removeFailure(eq("key"), eq("value"), any(StoreAccessException.class)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); + } + + /** + * Tests the effect of a {@link Ehcache#remove(Object, Object)} for + *
                                    + *
                                  • key with unequal value present in {@code Store}
                                  • + *
                                  • >{@code Store.remove} throws
                                  • + *
                                  + */ + @Test + public void testRemoveValueUnequalStoreEntryStoreAccessException() throws Exception { + FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); + this.store = spy(fakeStore); + doThrow(new StoreAccessException("")).when(this.store).remove(eq("key"), eq("value")); + + Ehcache ehcache = this.getEhcache(); + + ehcache.remove("key", "value"); + verify(this.store).remove(eq("key"), eq("value")); + verify(this.resilienceStrategy).removeFailure(eq("key"), eq("value"), any(StoreAccessException.class)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); + } + + /** + * Tests the effect of a {@link Ehcache#remove(Object, Object)} for + *
                                    + *
                                  • key with equal value present in {@code Store}
                                  • + *
                                  • >{@code Store.remove} throws
                                  • + *
                                  + */ + @Test + public void testRemoveValueEqualStoreEntryStoreAccessException() throws Exception { + FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "value")); + this.store = spy(fakeStore); + doThrow(new StoreAccessException("")).when(this.store).remove(eq("key"), eq("value")); + + Ehcache ehcache = this.getEhcache(); + + ehcache.remove("key", "value"); + verify(this.store).remove(eq("key"), eq("value")); + verify(this.resilienceStrategy).removeFailure(eq("key"), eq("value"), any(StoreAccessException.class)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE)); + } + + /** + * Gets an initialized {@link Ehcache Ehcache} instance + * + * @return a new {@code Ehcache} instance + */ + private Ehcache getEhcache() { + Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory + .getLogger(Ehcache.class + "-" + "EhcacheBasicRemoveValueTest")); + ehcache.init(); + assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); + return ehcache; + } +} diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicReplaceTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicReplaceTest.java similarity index 88% rename from core/src/test/java/org/ehcache/core/EhcacheBasicReplaceTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicReplaceTest.java index fcffdb7a97..f55a9482b7 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicReplaceTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicReplaceTest.java @@ -20,15 +20,15 @@ import org.ehcache.Status; import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.CoreMatchers; import org.junit.Test; import org.slf4j.LoggerFactory; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -94,7 +94,7 @@ public void testReplaceNoStoreEntry() throws Exception { assertNull(ehcache.replace("key", "value")); verify(this.store).replace(eq("key"), eq("value")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_NOT_PRESENT)); } @@ -116,7 +116,7 @@ public void testReplaceNoStoreEntryStoreAccessException() throws Exception { ehcache.replace("key", "value"); verify(this.store).replace(eq("key"), eq("value")); - verify(this.spiedResilienceStrategy).replaceFailure(eq("key"), eq("value"), any(StoreAccessException.class)); + verify(this.resilienceStrategy).replaceFailure(eq("key"), eq("value"), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); } @@ -135,7 +135,7 @@ public void testReplaceHasStoreEntry() throws Exception { assertThat(ehcache.replace("key", "value"), is(equalTo("oldValue"))); verify(this.store).replace(eq("key"), eq("value")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("value"))); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); } @@ -157,7 +157,7 @@ public void testReplaceHasStoreEntryStoreAccessException() throws Exception { ehcache.replace("key", "value"); verify(this.store).replace(eq("key"), eq("value")); - verify(this.spiedResilienceStrategy).replaceFailure(eq("key"), eq("value"), any(StoreAccessException.class)); + verify(this.resilienceStrategy).replaceFailure(eq("key"), eq("value"), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); } @@ -168,10 +168,9 @@ public void testReplaceHasStoreEntryStoreAccessException() throws Exception { */ private Ehcache getEhcache() { final Ehcache ehcache - = new Ehcache<>(CACHE_CONFIGURATION, this.store, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheBasicReplaceTest")); + = new Ehcache<>(CACHE_CONFIGURATION, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheBasicReplaceTest")); ehcache.init(); assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); return ehcache; } } diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicReplaceValueTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicReplaceValueTest.java similarity index 75% rename from core/src/test/java/org/ehcache/core/EhcacheBasicReplaceValueTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicReplaceValueTest.java index a65b0ab6d8..520d877310 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicReplaceValueTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBasicReplaceValueTest.java @@ -20,15 +20,15 @@ import org.ehcache.Status; import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.hamcrest.CoreMatchers; import org.junit.Test; import org.slf4j.LoggerFactory; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; @@ -46,7 +46,7 @@ public class EhcacheBasicReplaceValueTest extends EhcacheBasicCrudBase { @Test public void testReplaceValueNullNullNull() { - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); try { ehcache.replace(null, null, null); @@ -58,7 +58,7 @@ public void testReplaceValueNullNullNull() { @Test public void testReplaceKeyNullNull() { - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); try { ehcache.replace("key", null, null); @@ -70,7 +70,7 @@ public void testReplaceKeyNullNull() { @Test public void testReplaceKeyValueNull() { - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); try { ehcache.replace("key", "oldValue", null); @@ -82,7 +82,7 @@ public void testReplaceKeyValueNull() { @Test public void testReplaceKeyNullValue() { - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); try { ehcache.replace("key", null, "newValue"); @@ -94,7 +94,7 @@ public void testReplaceKeyNullValue() { @Test public void testReplaceNullValueNull() { - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); try { ehcache.replace(null, "oldValue", null); @@ -106,7 +106,7 @@ public void testReplaceNullValueNull() { @Test public void testReplaceNullValueValue() { - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); try { ehcache.replace(null, "oldValue", "newValue"); @@ -118,7 +118,7 @@ public void testReplaceNullValueValue() { @Test public void testReplaceNullNullValue() { - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); try { ehcache.replace(null, null, "newValue"); @@ -137,14 +137,14 @@ public void testReplaceNullNullValue() { */ @Test public void testReplaceValueNoStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); + FakeStore fakeStore = new FakeStore(Collections.emptyMap()); this.store = spy(fakeStore); - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); assertFalse(ehcache.replace("key", "oldValue", "newValue")); verify(this.store).replace(eq("key"), eq("oldValue"), eq("newValue")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_NOT_PRESENT)); } @@ -157,14 +157,14 @@ public void testReplaceValueNoStoreEntry() throws Exception { */ @Test public void testReplaceValueUnequalStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); + FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); this.store = spy(fakeStore); - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); assertFalse(ehcache.replace("key", "oldValue", "newValue")); verify(this.store).replace(eq("key"), eq("oldValue"), eq("newValue")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("unequalValue"))); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_PRESENT)); } @@ -177,14 +177,14 @@ public void testReplaceValueUnequalStoreEntry() throws Exception { */ @Test public void testReplaceValueEqualStoreEntry() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); + FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); this.store = spy(fakeStore); - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); assertTrue(ehcache.replace("key", "oldValue", "newValue")); verify(this.store).replace(eq("key"), eq("oldValue"), eq("newValue")); - verifyZeroInteractions(this.spiedResilienceStrategy); + verifyZeroInteractions(this.resilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), is(equalTo("newValue"))); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)); } @@ -198,16 +198,15 @@ public void testReplaceValueEqualStoreEntry() throws Exception { */ @Test public void testReplaceValueNoStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.emptyMap()); + FakeStore fakeStore = new FakeStore(Collections.emptyMap()); this.store = spy(fakeStore); doThrow(new StoreAccessException("")).when(this.store).replace(eq("key"), eq("oldValue"), eq("newValue")); - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); ehcache.replace("key", "oldValue", "newValue"); verify(this.store).replace(eq("key"), eq("oldValue"), eq("newValue")); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(false)); + verify(this.resilienceStrategy).replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); } @@ -220,16 +219,15 @@ public void testReplaceValueNoStoreEntryStoreAccessException() throws Exception */ @Test public void testReplaceValueUnequalStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); + FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "unequalValue")); this.store = spy(fakeStore); doThrow(new StoreAccessException("")).when(this.store).replace(eq("key"), eq("oldValue"), eq("newValue")); - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); ehcache.replace("key", "oldValue", "newValue"); verify(this.store).replace(eq("key"), eq("oldValue"), eq("newValue")); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(false)); + verify(this.resilienceStrategy).replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); } @@ -242,16 +240,15 @@ public void testReplaceValueUnequalStoreEntryStoreAccessException() throws Excep */ @Test public void testReplaceValueEqualStoreEntryStoreAccessException() throws Exception { - final FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); + FakeStore fakeStore = new FakeStore(Collections.singletonMap("key", "oldValue")); this.store = spy(fakeStore); doThrow(new StoreAccessException("")).when(this.store).replace(eq("key"), eq("oldValue"), eq("newValue")); - final Ehcache ehcache = this.getEhcache(); + Ehcache ehcache = this.getEhcache(); ehcache.replace("key", "oldValue", "newValue"); verify(this.store).replace(eq("key"), eq("oldValue"), eq("newValue")); - verify(this.spiedResilienceStrategy) - .replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class), eq(false)); + verify(this.resilienceStrategy).replaceFailure(eq("key"), eq("oldValue"), eq("newValue"), any(StoreAccessException.class)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.FAILURE)); } @@ -261,11 +258,10 @@ public void testReplaceValueEqualStoreEntryStoreAccessException() throws Excepti * @return a new {@code Ehcache} instance */ private Ehcache getEhcache() { - final Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, cacheEventDispatcher, LoggerFactory + Ehcache ehcache = new Ehcache<>(CACHE_CONFIGURATION, this.store, resilienceStrategy, cacheEventDispatcher, LoggerFactory .getLogger(Ehcache.class + "-" + "EhcacheBasicReplaceValueTest")); ehcache.init(); assertThat("cache not initialized", ehcache.getStatus(), CoreMatchers.is(Status.AVAILABLE)); - this.spiedResilienceStrategy = this.setResilienceStrategySpy(ehcache); return ehcache; } } diff --git a/core/src/test/java/org/ehcache/core/EhcacheBulkMethodsTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBulkMethodsTest.java similarity index 82% rename from core/src/test/java/org/ehcache/core/EhcacheBulkMethodsTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheBulkMethodsTest.java index e26db61190..2e55654ab4 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBulkMethodsTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheBulkMethodsTest.java @@ -18,9 +18,10 @@ import org.ehcache.config.CacheConfiguration; import org.ehcache.core.events.CacheEventDispatcher; -import org.ehcache.expiry.Expiry; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.Store.ValueHolder; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.resilience.ResilienceStrategy; import org.junit.Test; import org.slf4j.LoggerFactory; @@ -32,9 +33,9 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.core.IsCollectionContaining.hasItems; -import static org.junit.Assert.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -68,8 +69,11 @@ public void testPutAll() throws Exception { public void testGetAll() throws Exception { Store store = mock(Store.class); when(store.bulkComputeIfAbsent((Set)argThat(hasItems(1, 2, 3)), any(Function.class))).thenAnswer(invocation -> { - Function function = (Function)invocation.getArguments()[1]; - function.apply(invocation.getArguments()[0]); + Function, Iterable>> function = + (Function, Iterable>>) invocation.getArguments()[1]; + Set keys = (Set) invocation.getArguments()[0]; + + function.apply(keys); Map> map = new HashMap<>(); map.put(1, null); @@ -101,9 +105,10 @@ public void testRemoveAll() throws Exception { protected InternalCache getCache(Store store) { CacheConfiguration cacheConfig = mock(CacheConfiguration.class); - when(cacheConfig.getExpiry()).thenReturn(mock(Expiry.class)); + when(cacheConfig.getExpiryPolicy()).thenReturn(mock(ExpiryPolicy.class)); CacheEventDispatcher cacheEventDispatcher = mock(CacheEventDispatcher.class); - return new Ehcache<>(cacheConfig, store, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheBulkMethodsTest")); + ResilienceStrategy resilienceStrategy = mock(ResilienceStrategy.class); + return new Ehcache<>(cacheConfig, store, resilienceStrategy, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheBulkMethodsTest")); } static Map.Entry entry(final K key, final V value) { @@ -149,40 +154,30 @@ public String toString() { static ValueHolder valueHolder(final V value) { return new ValueHolder() { @Override - public V value() { + public V get() { return value; } @Override - public long creationTime(TimeUnit unit) { + public long creationTime() { throw new AssertionError(); } @Override - public long expirationTime(TimeUnit unit) { + public long expirationTime() { return 0; } @Override - public boolean isExpired(long expirationTime, TimeUnit unit) { + public boolean isExpired(long expirationTime) { return false; } @Override - public long lastAccessTime(TimeUnit unit) { - throw new AssertionError(); - } - - @Override - public float hitRate(long now, TimeUnit unit) { + public long lastAccessTime() { throw new AssertionError(); } - @Override - public long hits() { - throw new UnsupportedOperationException("Implement me!"); - } - @Override public long getId() { throw new UnsupportedOperationException("Implement me!"); diff --git a/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheManagerTest.java similarity index 88% rename from core/src/test/java/org/ehcache/core/EhcacheManagerTest.java rename to ehcache-core/src/test/java/org/ehcache/core/EhcacheManagerTest.java index 3503775c0d..56c239ae83 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheManagerTest.java @@ -27,30 +27,28 @@ import org.ehcache.config.Configuration; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; -import org.ehcache.core.config.BaseCacheConfiguration; import org.ehcache.core.config.DefaultConfiguration; import org.ehcache.core.config.ResourcePoolsHelper; import org.ehcache.core.events.CacheEventDispatcher; import org.ehcache.core.events.CacheEventDispatcherFactory; import org.ehcache.core.events.CacheEventListenerProvider; import org.ehcache.core.events.CacheManagerListener; -import org.ehcache.core.internal.util.ClassLoading; import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.core.spi.store.Store; +import org.ehcache.core.util.ClassLoading; +import org.ehcache.core.util.TestCacheConfig; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; import org.ehcache.spi.loaderwriter.WriteBehindProvider; +import org.ehcache.spi.resilience.ResilienceStrategyProvider; import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.spi.service.ServiceProvider; import org.hamcrest.CoreMatchers; -import org.junit.Rule; +import org.junit.Ignore; import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.mockito.ArgumentMatcher; -import org.mockito.ArgumentMatchers; import org.mockito.ArgumentMatchers; import org.mockito.Mockito; @@ -68,18 +66,18 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasProperty; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.isA; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyCollection; -import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; @@ -92,9 +90,6 @@ @SuppressWarnings({ "unchecked", "rawtypes" }) public class EhcacheManagerTest { - @Rule - public ExpectedException expectedException = ExpectedException.none(); - private static Map> newCacheMap() { return new HashMap<>(); } @@ -106,13 +101,13 @@ private List minimumCacheManagerServices() { mock(WriteBehindProvider.class), mock(CacheEventDispatcherFactory.class), mock(CacheEventListenerProvider.class), - mock(LocalPersistenceService.class))); + mock(LocalPersistenceService.class), + mock(ResilienceStrategyProvider.class))); } @Test public void testCanDestroyAndClose() throws Exception { - CacheConfiguration cacheConfiguration = new BaseCacheConfiguration<>(Long.class, String.class, null, - null, null, ResourcePoolsHelper.createHeapOnlyPools(10)); + CacheConfiguration cacheConfiguration = new TestCacheConfig<>(Long.class, String.class); Store.Provider storeProvider = mock(Store.Provider.class); when(storeProvider.rank(any(Set.class), any(Collection.class))).thenReturn(1); @@ -132,7 +127,8 @@ public void testCanDestroyAndClose() throws Exception { mock(WriteBehindProvider.class), cacheEventNotificationListenerServiceProvider, mock(CacheEventListenerProvider.class), - mock(LocalPersistenceService.class))); + mock(LocalPersistenceService.class), + mock(ResilienceStrategyProvider.class))); cacheManager.init(); cacheManager.close(); @@ -146,7 +142,7 @@ public void testCanDestroyAndClose() throws Exception { @Test public void testConstructionThrowsWhenNotBeingToResolveService() { Map> caches = newCacheMap(); - final DefaultConfiguration config = new DefaultConfiguration(caches, null, (ServiceCreationConfiguration) () -> NoSuchService.class); + final DefaultConfiguration config = new DefaultConfiguration(caches, null, (ServiceCreationConfiguration) () -> NoSuchService.class); try { new EhcacheManager(config); fail("Should have thrown..."); @@ -158,7 +154,7 @@ public void testConstructionThrowsWhenNotBeingToResolveService() { @Test public void testCreationFailsOnDuplicateServiceCreationConfiguration() { Map> caches = newCacheMap(); - DefaultConfiguration config = new DefaultConfiguration(caches, null, (ServiceCreationConfiguration) () -> NoSuchService.class, (ServiceCreationConfiguration) () -> NoSuchService.class); + DefaultConfiguration config = new DefaultConfiguration(caches, null, (ServiceCreationConfiguration) () -> NoSuchService.class, (ServiceCreationConfiguration) () -> NoSuchService.class); try { new EhcacheManager(config); fail("Should have thrown ..."); @@ -188,7 +184,7 @@ public void testStopAllServicesWhenCacheInitializationFails() { @Test public void testNoClassLoaderSpecified() { Map> caches = newCacheMap(); - caches.put("foo", new BaseCacheConfiguration<>(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools())); + caches.put("foo", new TestCacheConfig<>(Object.class, Object.class)); DefaultConfiguration config = new DefaultConfiguration(caches, null); final Store.Provider storeProvider = mock(Store.Provider.class); @@ -222,9 +218,14 @@ public void testClassLoaderSpecified() { assertNotSame(cl1.getClass(), cl2.getClass()); Map> caches = newCacheMap(); - caches.put("foo1", new BaseCacheConfiguration<>(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools())); - caches.put("foo2", new BaseCacheConfiguration<>(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools())); - caches.put("foo3", new BaseCacheConfiguration<>(Object.class, Object.class, null, cl2, null, ResourcePoolsHelper.createHeapOnlyPools())); + caches.put("foo1", new TestCacheConfig<>(Object.class, Object.class)); + caches.put("foo2", new TestCacheConfig<>(Object.class, Object.class)); + caches.put("foo3", new TestCacheConfig(Object.class, Object.class) { + @Override + public ClassLoader getClassLoader() { + return cl2; + } + }); DefaultConfiguration config = new DefaultConfiguration(caches, cl1); final Store.Provider storeProvider = mock(Store.Provider.class); @@ -259,8 +260,7 @@ public void testReturnsNullForNonExistCache() { @Test public void testThrowsWhenAddingExistingCache() { - CacheConfiguration cacheConfiguration = new BaseCacheConfiguration<>(Object.class, Object.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + CacheConfiguration cacheConfiguration = new TestCacheConfig<>(Object.class, Object.class); final Store.Provider storeProvider = mock(Store.Provider.class); when(storeProvider.rank(any(Set.class), any(Collection.class))).thenReturn(1); final Store mock = mock(Store.class); @@ -302,8 +302,7 @@ public void testThrowsWhenNotInitialized() { when(storeProvider .createStore(ArgumentMatchers.any(), ArgumentMatchers.any())).thenReturn(mock); - final CacheConfiguration cacheConfiguration = new BaseCacheConfiguration<>(Integer.class, String.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + final CacheConfiguration cacheConfiguration = new TestCacheConfig<>(Integer.class, String.class); Map> caches = newCacheMap(); caches.put("bar", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); @@ -341,8 +340,7 @@ public void testThrowsWhenRetrievingCacheWithWrongTypes() { when(storeProvider .createStore(ArgumentMatchers.any(), ArgumentMatchers.any())).thenReturn(mock); - final CacheConfiguration cacheConfiguration = new BaseCacheConfiguration<>(Integer.class, String.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + final CacheConfiguration cacheConfiguration = new TestCacheConfig<>(Integer.class, String.class); Map> caches = newCacheMap(); caches.put("bar", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); @@ -367,10 +365,11 @@ public void testThrowsWhenRetrievingCacheWithWrongTypes() { } } + @Ignore @Test public void testLifeCyclesCacheLoaders() throws Exception { - ResourcePools resourcePools = ResourcePoolsHelper.createHeapOnlyPools(10); + ResourcePools resourcePools = ResourcePoolsHelper.createResourcePools(100L); final CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); @@ -413,15 +412,14 @@ public void testLifeCyclesCacheLoaders() throws Exception { verify(cacheLoaderWriterProvider).createCacheLoaderWriter("foo", fooConfig); manager.removeCache("bar"); - verify(cacheLoaderWriterProvider, never()).releaseCacheLoaderWriter((CacheLoaderWriter)Mockito.any()); + verify(cacheLoaderWriterProvider, never()).releaseCacheLoaderWriter(anyString(), (CacheLoaderWriter)Mockito.any()); manager.removeCache("foo"); - verify(cacheLoaderWriterProvider).releaseCacheLoaderWriter(fooLoaderWriter); + verify(cacheLoaderWriterProvider).releaseCacheLoaderWriter(anyString(), fooLoaderWriter); } @Test public void testDoesNotifyAboutCache() { - final CacheConfiguration cacheConfiguration = new BaseCacheConfiguration<>(Object.class, Object.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + final CacheConfiguration cacheConfiguration = new TestCacheConfig<>(Object.class, Object.class); final Store.Provider mock = mock(Store.Provider.class); when(mock.rank(any(Set.class), any(Collection.class))).thenReturn(1); @@ -447,8 +445,7 @@ public void testDoesNotifyAboutCache() { @Test public void testDoesNotNotifyAboutCacheOnInitOrClose() { - final CacheConfiguration cacheConfiguration = new BaseCacheConfiguration<>(Object.class, Object.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + final CacheConfiguration cacheConfiguration = new TestCacheConfig<>(Object.class, Object.class); final Store.Provider mock = mock(Store.Provider.class); when(mock.rank(any(Set.class), any(Collection.class))).thenReturn(1); @@ -475,8 +472,7 @@ public void testDoesNotNotifyAboutCacheOnInitOrClose() { @Test public void testClosesStartedCachesDownWhenInitThrows() { final Set> caches = new HashSet<>(); - final CacheConfiguration cacheConfiguration = new BaseCacheConfiguration<>(Object.class, Object.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + final CacheConfiguration cacheConfiguration = new TestCacheConfig<>(Object.class, Object.class); final Store.Provider storeProvider = mock(Store.Provider.class); when(storeProvider.rank(any(Set.class), any(Collection.class))).thenReturn(1); final Collection services = getServices(storeProvider, null); @@ -495,8 +491,8 @@ InternalCache createNewEhcache(final String alias, final CacheConfi final InternalCache ehcache = super.createNewEhcache(alias, config, keyType, valueType); caches.add(ehcache); if(caches.size() == 1) { - when(storeProvider.createStore(ArgumentMatchers.>any(), - ArgumentMatchers.>any())) + when(storeProvider.createStore( + ArgumentMatchers.>any(), ArgumentMatchers.>any())) .thenThrow(thrown); } return ehcache; @@ -525,8 +521,7 @@ protected void closeEhcache(final String alias, final InternalCache ehcach @Test public void testClosesAllCachesDownWhenCloseThrows() { final Set caches = new HashSet<>(); - final CacheConfiguration cacheConfiguration = new BaseCacheConfiguration<>(Object.class, Object.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + final CacheConfiguration cacheConfiguration = new TestCacheConfig<>(Object.class, Object.class); final Store.Provider storeProvider = mock(Store.Provider.class); when(storeProvider.rank(any(Set.class), any(Collection.class))).thenReturn(1); @@ -588,11 +583,10 @@ public void testDoesNotifyAboutLifecycle() { @Test public void testCloseNoLoaderWriterAndCacheEventListener() throws Exception { - final CacheConfiguration cacheConfiguration = new BaseCacheConfiguration<>(Object.class, Object.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + final CacheConfiguration cacheConfiguration = new TestCacheConfig<>(Object.class, Object.class); final Store.Provider storeProvider = spy(new Store.Provider() { @Override - public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { + public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { return 1; } @@ -615,7 +609,7 @@ public void initStore(Store resource) { } @Override - public Store createStore(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + public Store createStore(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return null; } }); @@ -630,7 +624,7 @@ public void stop() { } @Override - public CacheEventDispatcher createCacheEventDispatcher(Store store, ServiceConfiguration... serviceConfigs) { + public CacheEventDispatcher createCacheEventDispatcher(Store store, ServiceConfiguration... serviceConfigs) { return null; } @@ -676,8 +670,7 @@ public void testChangesToManagerAreReflectedInConfig() { when(store.getConfigurationChangeListeners()).thenReturn(new ArrayList<>()); when(cacheEventNotificationListenerServiceProvider.createCacheEventDispatcher(store)).thenReturn(mock(CacheEventDispatcher.class)); - CacheConfiguration cache1Configuration = new BaseCacheConfiguration<>(Long.class, String.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + CacheConfiguration cache1Configuration = new TestCacheConfig<>(Long.class, String.class); Map> caches = newCacheMap(); caches.put("cache1", cache1Configuration); DefaultConfiguration config = new DefaultConfiguration(caches, null); @@ -687,13 +680,14 @@ public void testChangesToManagerAreReflectedInConfig() { mock(WriteBehindProvider.class), cacheEventNotificationListenerServiceProvider, mock(CacheEventListenerProvider.class), - mock(LocalPersistenceService.class) + mock(LocalPersistenceService.class), + mock(ResilienceStrategyProvider.class) )); cacheManager.init(); try { - final CacheConfiguration cache2Configuration = new BaseCacheConfiguration<>(Long.class, String.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + final CacheConfiguration cache2Configuration = new TestCacheConfig<>(Long.class, String.class, ResourcePoolsHelper + .createResourcePools(100L)); final Cache cache = cacheManager.createCache("cache2", cache2Configuration); final CacheConfiguration cacheConfiguration = cacheManager.getRuntimeConfiguration() .getCacheConfigurations() @@ -723,8 +717,7 @@ public void testCachesAddedAtRuntimeGetReInited() { when(store.getConfigurationChangeListeners()).thenReturn(new ArrayList<>()); when(cacheEventNotificationListenerServiceProvider.createCacheEventDispatcher(store)).thenReturn(mock(CacheEventDispatcher.class)); - CacheConfiguration cache1Configuration = new BaseCacheConfiguration<>(Long.class, String.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + CacheConfiguration cache1Configuration = new TestCacheConfig<>(Long.class, String.class); Map> caches = newCacheMap(); caches.put("cache1", cache1Configuration); DefaultConfiguration config = new DefaultConfiguration(caches, null); @@ -734,13 +727,13 @@ public void testCachesAddedAtRuntimeGetReInited() { mock(WriteBehindProvider.class), cacheEventNotificationListenerServiceProvider, mock(CacheEventListenerProvider.class), - mock(LocalPersistenceService.class) + mock(LocalPersistenceService.class), + mock(ResilienceStrategyProvider.class) )); cacheManager.init(); - CacheConfiguration cache2Configuration = new BaseCacheConfiguration<>(Long.class, String.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + CacheConfiguration cache2Configuration = new TestCacheConfig<>(Long.class, String.class, ResourcePoolsHelper.createResourcePools(100L)); cacheManager.createCache("cache2", cache2Configuration); cacheManager.removeCache("cache1"); @@ -768,13 +761,13 @@ public void testCloseWhenRuntimeCacheCreationFails() throws Exception { mock(WriteBehindProvider.class), mock(CacheEventDispatcherFactory.class), mock(CacheEventListenerProvider.class), - mock(LocalPersistenceService.class) + mock(LocalPersistenceService.class), + mock(ResilienceStrategyProvider.class) )); cacheManager.init(); - CacheConfiguration cacheConfiguration = new BaseCacheConfiguration<>(Long.class, String.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + CacheConfiguration cacheConfiguration = new TestCacheConfig<>(Long.class, String.class); try { cacheManager.createCache("cache", cacheConfiguration); @@ -794,8 +787,7 @@ public void testCloseWhenCacheCreationFailsDuringInitialization() throws Excepti when(storeProvider.rank(any(Set.class), any(Collection.class))).thenReturn(1); doThrow(new Error("Test EhcacheManager close.")).when(storeProvider).createStore(any(Store.Configuration.class), ArgumentMatchers.any()); - CacheConfiguration cacheConfiguration = new BaseCacheConfiguration<>(Long.class, String.class, null, null, null, ResourcePoolsHelper - .createHeapOnlyPools()); + CacheConfiguration cacheConfiguration = new TestCacheConfig<>(Long.class, String.class); Map> caches = newCacheMap(); caches.put("cache1", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); @@ -805,7 +797,8 @@ public void testCloseWhenCacheCreationFailsDuringInitialization() throws Excepti mock(WriteBehindProvider.class), mock(CacheEventDispatcherFactory.class), mock(CacheEventListenerProvider.class), - mock(LocalPersistenceService.class) + mock(LocalPersistenceService.class), + mock(ResilienceStrategyProvider.class) )); final CountDownLatch countDownLatch = new CountDownLatch(1); @@ -838,10 +831,8 @@ public void testDestroyCacheFailsIfAlreadyInMaintenanceMode() throws CachePersis thread.start(); thread.join(1000); - expectedException.expect(IllegalStateException.class); - expectedException.expectMessage("State is MAINTENANCE, yet you don't own it!"); - - manager.destroyCache("test"); + IllegalStateException thrown = assertThrows(IllegalStateException.class, () -> manager.destroyCache("test")); + assertThat(thrown, hasProperty("message", is("State is MAINTENANCE, yet you don't own it!"))); } @Test @@ -856,10 +847,8 @@ public void testDestroyCacheFailsAndStopIfStartingServicesFails() throws CachePe EhcacheManager manager = new EhcacheManager(config, services); - expectedException.expect(StateTransitionException.class); - expectedException.expectMessage("failed"); - - manager.destroyCache("test"); + StateTransitionException thrown = assertThrows(StateTransitionException.class, () -> manager.destroyCache("test")); + assertThat(thrown, hasProperty("message", is("failed"))); assertThat(manager.getStatus(), equalTo(Status.UNINITIALIZED)); } @@ -874,7 +863,7 @@ private Collection getServices(CacheLoaderWriterProvider cacheLoaderWri WriteBehindProvider decoratorLoaderWriterProvider, Store.Provider storeProvider, CacheEventDispatcherFactory cenlProvider) { - return new ArrayList<>(Arrays.asList(cacheLoaderWriterProvider, storeProvider, decoratorLoaderWriterProvider, cenlProvider, mock(CacheEventListenerProvider.class))); + return new ArrayList<>(Arrays.asList(cacheLoaderWriterProvider, storeProvider, decoratorLoaderWriterProvider, cenlProvider, mock(CacheEventListenerProvider.class), mock(ResilienceStrategyProvider.class))); } static class NoSuchService implements Service { diff --git a/ehcache-core/src/test/java/org/ehcache/core/EhcacheTest.java b/ehcache-core/src/test/java/org/ehcache/core/EhcacheTest.java new file mode 100644 index 0000000000..91d2afda22 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/EhcacheTest.java @@ -0,0 +1,43 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core; + +import static org.mockito.Mockito.mock; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.core.events.CacheEventDispatcher; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.util.TestCacheConfig; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.slf4j.LoggerFactory; + +/** + * @author Abhilash + * + */ +public class EhcacheTest extends CacheTest { + + @Override + protected InternalCache getCache(Store store) { + final CacheConfiguration config = new TestCacheConfig<>(Object.class, Object.class); + @SuppressWarnings("unchecked") + CacheEventDispatcher cacheEventDispatcher = mock(CacheEventDispatcher.class); + @SuppressWarnings("unchecked") + ResilienceStrategy resilienceStrategy = mock(ResilienceStrategy.class); + return new Ehcache<>(config, store, resilienceStrategy, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheTest")); + } + +} diff --git a/core/src/test/java/org/ehcache/core/StatusTransitionerTest.java b/ehcache-core/src/test/java/org/ehcache/core/StatusTransitionerTest.java similarity index 99% rename from core/src/test/java/org/ehcache/core/StatusTransitionerTest.java rename to ehcache-core/src/test/java/org/ehcache/core/StatusTransitionerTest.java index 415c8c2d43..a58e6e9e71 100644 --- a/core/src/test/java/org/ehcache/core/StatusTransitionerTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/StatusTransitionerTest.java @@ -31,8 +31,8 @@ import static java.util.Arrays.asList; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; diff --git a/ehcache-core/src/test/java/org/ehcache/core/UserManagedCacheTest.java b/ehcache-core/src/test/java/org/ehcache/core/UserManagedCacheTest.java new file mode 100644 index 0000000000..8bc98b97da --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/UserManagedCacheTest.java @@ -0,0 +1,99 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core; + +import org.ehcache.Status; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.core.events.CacheEventDispatcher; +import org.ehcache.core.spi.store.Store; +import org.ehcache.StateTransitionException; +import org.ehcache.core.spi.LifeCycled; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.hamcrest.CoreMatchers; +import org.junit.Test; +import org.slf4j.LoggerFactory; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.verify; + +@SuppressWarnings({ "unchecked", "rawtypes" }) +public class UserManagedCacheTest { + + @Test + public void testUserManagedCacheDelegatesLifecycleCallsToStore() throws Exception { + final Store store = mock(Store.class); + CacheConfiguration config = mock(CacheConfiguration.class); + Ehcache ehcache = new Ehcache(config, store, mock(ResilienceStrategy.class), mock(CacheEventDispatcher.class), LoggerFactory.getLogger(Ehcache.class + "testUserManagedCacheDelegatesLifecycleCallsToStore")); + assertCacheDelegatesLifecycleCallsToStore(ehcache); + + Ehcache ehcacheWithLoaderWriter = new Ehcache(config, store, mock(ResilienceStrategy.class), + mock(CacheEventDispatcher.class), LoggerFactory.getLogger(Ehcache.class + "testUserManagedCacheDelegatesLifecycleCallsToStore"), + mock(CacheLoaderWriter.class)); + assertCacheDelegatesLifecycleCallsToStore(ehcacheWithLoaderWriter); + } + + private void assertCacheDelegatesLifecycleCallsToStore(InternalCache cache) throws Exception { + final LifeCycled mock = mock(LifeCycled.class); + cache.addHook(mock); + cache.init(); + verify(mock).init(); + cache.close(); + verify(mock).close(); + } + + @Test + public void testUserManagedEhcacheFailingTransitionGoesToLowestStatus() throws Exception { + final Store store = mock(Store.class); + CacheConfiguration config = mock(CacheConfiguration.class); + Ehcache ehcache = new Ehcache(config, store, mock(ResilienceStrategy.class), mock(CacheEventDispatcher.class), LoggerFactory.getLogger(Ehcache.class + "testUserManagedEhcacheFailingTransitionGoesToLowestStatus")); + assertFailingTransitionGoesToLowestStatus(ehcache); + Ehcache ehcacheWithLoaderWriter = new Ehcache(config, store, mock(ResilienceStrategy.class), + mock(CacheEventDispatcher.class), LoggerFactory.getLogger(Ehcache.class + "testUserManagedCacheDelegatesLifecycleCallsToStore"), mock(CacheLoaderWriter.class)); + assertFailingTransitionGoesToLowestStatus(ehcacheWithLoaderWriter); + } + + private void assertFailingTransitionGoesToLowestStatus(InternalCache cache) throws Exception { + final LifeCycled mock = mock(LifeCycled.class); + cache.addHook(mock); + doThrow(new Exception()).when(mock).init(); + try { + cache.init(); + fail(); + } catch (StateTransitionException e) { + assertThat(cache.getStatus(), CoreMatchers.is(Status.UNINITIALIZED)); + } + + reset(mock); + cache.init(); + assertThat(cache.getStatus(), is(Status.AVAILABLE)); + doThrow(new Exception()).when(mock).close(); + try { + cache.close(); + fail(); + } catch (StateTransitionException e) { + assertThat(cache.getStatus(), is(Status.UNINITIALIZED)); + } + + } + +} diff --git a/core/src/test/java/org/ehcache/core/collections/ConcurrentWeakIdentityHashMapTest.java b/ehcache-core/src/test/java/org/ehcache/core/collections/ConcurrentWeakIdentityHashMapTest.java similarity index 99% rename from core/src/test/java/org/ehcache/core/collections/ConcurrentWeakIdentityHashMapTest.java rename to ehcache-core/src/test/java/org/ehcache/core/collections/ConcurrentWeakIdentityHashMapTest.java index 0b65f80240..0f8bd3c5bb 100644 --- a/core/src/test/java/org/ehcache/core/collections/ConcurrentWeakIdentityHashMapTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/collections/ConcurrentWeakIdentityHashMapTest.java @@ -25,12 +25,12 @@ import java.util.Set; import java.util.concurrent.ConcurrentMap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; /** * @author Alex Snaps diff --git a/ehcache-core/src/test/java/org/ehcache/core/config/CoreConfigurationBuilderTest.java b/ehcache-core/src/test/java/org/ehcache/core/config/CoreConfigurationBuilderTest.java new file mode 100644 index 0000000000..92d3a9969f --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/config/CoreConfigurationBuilderTest.java @@ -0,0 +1,51 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.config; + +import org.ehcache.config.Configuration; +import org.ehcache.core.util.ClassLoading; +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.IsSame.sameInstance; +import static org.mockito.Mockito.mock; + +public class CoreConfigurationBuilderTest { + + @Test + public void testWithClassLoader() { + ClassLoader classLoader = mock(ClassLoader.class); + + Configuration configuration = new CoreConfigurationBuilder<>() + .withClassLoader(classLoader) + .build(); + + assertThat(configuration.getClassLoader(), sameInstance(classLoader)); + } + + @Test + public void testWithDefaultClassLoader() { + ClassLoader classLoader = mock(ClassLoader.class); + + Configuration configuration = new CoreConfigurationBuilder<>() + .withClassLoader(classLoader) + .withDefaultClassLoader() + .build(); + + assertThat(configuration.getClassLoader(), sameInstance(ClassLoading.getDefaultClassLoader())); + } + +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/config/ExpiryUtilsTest.java b/ehcache-core/src/test/java/org/ehcache/core/config/ExpiryUtilsTest.java new file mode 100644 index 0000000000..1de47e2845 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/config/ExpiryUtilsTest.java @@ -0,0 +1,76 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.config; + +import org.ehcache.expiry.ExpiryPolicy; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; + +import java.time.Duration; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ExpiryUtilsTest { + + @Rule + public MockitoRule rule = MockitoJUnit.rule(); + + @Mock + private ExpiryPolicy expiry; + + @Test + public void getExpiryForCreation_valid() { + Duration expected = Duration.ofMinutes(1); + when(expiry.getExpiryForCreation(1, 2)).thenReturn(expected); + Duration actual = ExpiryUtils.getExpiryForCreation(1, 2, expiry); + assertThat(actual).isEqualTo(expected); + } + + @Test + public void getExpiryForCreation_negative() { + Duration expected = Duration.ofMinutes(-1); + when(expiry.getExpiryForCreation(1, 2)).thenReturn(expected); + Duration actual = ExpiryUtils.getExpiryForCreation(1, 2, expiry); + assertThat(actual).isEqualTo(Duration.ZERO); + } + + @Test + public void getExpiryForCreation_zero() { + Duration expected = Duration.ZERO; + when(expiry.getExpiryForCreation(1, 2)).thenReturn(expected); + Duration actual = ExpiryUtils.getExpiryForCreation(1, 2, expiry); + assertThat(actual).isEqualTo(expected); + } + + @Test + public void getExpiryForCreation_null() { + when(expiry.getExpiryForCreation(1, 2)).thenReturn(null); + Duration actual = ExpiryUtils.getExpiryForCreation(1, 2, expiry); + assertThat(actual).isEqualTo(Duration.ZERO); + } + + @Test + public void getExpiryForCreation_exception() { + when(expiry.getExpiryForCreation(1, 2)).thenThrow(new RuntimeException()); + Duration actual = ExpiryUtils.getExpiryForCreation(1, 2, expiry); + assertThat(actual).isEqualTo(Duration.ZERO); + } +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/config/ResourcePoolsHelper.java b/ehcache-core/src/test/java/org/ehcache/core/config/ResourcePoolsHelper.java new file mode 100644 index 0000000000..fc7f023774 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/config/ResourcePoolsHelper.java @@ -0,0 +1,80 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.config; + +import org.ehcache.config.ResourcePool; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.ResourceType; +import org.ehcache.config.ResourceUnit; +import org.ehcache.config.SizedResourcePool; +import org.ehcache.config.units.EntryUnit; + +import java.util.Collections; +import java.util.Set; + +/** + * @author Ludovic Orban + */ +public class ResourcePoolsHelper { + + public static ResourcePools createResourcePools(long size) { + return new ResourcePools() { + @Override @SuppressWarnings("unchecked") + public

                                  P getPoolForResource(ResourceType

                                  resourceType) { + if (ResourceType.Core.HEAP.equals(resourceType)) { + return (P) new SizedResourcePool() { + @Override + public long getSize() { + return size; + } + + @Override + public ResourceUnit getUnit() { + return EntryUnit.ENTRIES; + } + + @Override + public ResourceType getType() { + return ResourceType.Core.HEAP; + } + + @Override + public boolean isPersistent() { + return false; + } + + @Override + public void validateUpdate(ResourcePool newPool) { + //all updates are okay + } + }; + } else { + return null; + } + } + + @Override + public Set> getResourceTypeSet() { + return Collections.singleton(ResourceType.Core.HEAP); + } + + @Override + public ResourcePools validateAndMerge(ResourcePools toBeUpdated) throws IllegalArgumentException, UnsupportedOperationException { + return toBeUpdated; + } + }; + } +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/config/store/StoreStatisticsConfigurationTest.java b/ehcache-core/src/test/java/org/ehcache/core/config/store/StoreStatisticsConfigurationTest.java new file mode 100644 index 0000000000..cc3215e927 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/config/store/StoreStatisticsConfigurationTest.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.config.store; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class StoreStatisticsConfigurationTest { + + @Test + public void testDeriveDetachesCorrectly() { + StoreStatisticsConfiguration configuration = new StoreStatisticsConfiguration(true); + StoreStatisticsConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.isOperationStatisticsEnabled(), is(configuration.isOperationStatisticsEnabled())); + } +} diff --git a/core/src/test/java/org/ehcache/core/events/CacheEventsTest.java b/ehcache-core/src/test/java/org/ehcache/core/events/CacheEventsTest.java similarity index 100% rename from core/src/test/java/org/ehcache/core/events/CacheEventsTest.java rename to ehcache-core/src/test/java/org/ehcache/core/events/CacheEventsTest.java diff --git a/ehcache-core/src/test/java/org/ehcache/core/exceptions/ExceptionFactoryTest.java b/ehcache-core/src/test/java/org/ehcache/core/exceptions/ExceptionFactoryTest.java new file mode 100644 index 0000000000..fb67f2d389 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/exceptions/ExceptionFactoryTest.java @@ -0,0 +1,55 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.exceptions; + +import org.ehcache.spi.loaderwriter.CacheLoadingException; +import org.ehcache.spi.loaderwriter.CacheWritingException; +import org.junit.Test; + +import static org.assertj.core.api.Assertions.*; + +/** + * @author Henri Tremblay + */ +public class ExceptionFactoryTest { + + private final Exception cause = new Exception(); + private final Exception suppressed = new Exception(); + + @Test + public void newCacheWritingException() { + CacheWritingException e = ExceptionFactory.newCacheWritingException(cause); + assertThat(e).hasCause(cause); + } + + @Test + public void newCacheLoadingException() { + CacheLoadingException e = ExceptionFactory.newCacheLoadingException(cause); + assertThat(e).hasCause(cause); + } + + @Test + public void newCacheWritingException1() { + CacheWritingException e = ExceptionFactory.newCacheWritingException(cause, suppressed); + assertThat(e).hasCause(cause); + } + + @Test + public void newCacheLoadingException1() { + CacheLoadingException e = ExceptionFactory.newCacheLoadingException(cause, suppressed); + assertThat(e).hasCause(cause); + } +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/exceptions/StorePassThroughExceptionTest.java b/ehcache-core/src/test/java/org/ehcache/core/exceptions/StorePassThroughExceptionTest.java new file mode 100644 index 0000000000..bdd5e3fc6e --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/exceptions/StorePassThroughExceptionTest.java @@ -0,0 +1,52 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.exceptions; + +import org.ehcache.spi.resilience.StoreAccessException; +import org.junit.Test; + +import static org.assertj.core.api.Assertions.*; + +public class StorePassThroughExceptionTest { + + @Test + public void fillInStackTrace() { + StorePassThroughException e = new StorePassThroughException(new Exception()); + assertThat(e.getStackTrace()).isEmpty(); + } + + @Test + public void handleException_runtimeWrappedInStoreAccessException() { + RuntimeException re = new RuntimeException(); + StoreAccessException sae = StorePassThroughException.handleException(re); + assertThat(sae.getCause()).isSameAs(re); + } + + @Test + public void handleException_storePassThroughExceptionUnwrappedIfRuntime() { + RuntimeException re = new RuntimeException(); + assertThatExceptionOfType(RuntimeException.class) + .isThrownBy(() -> StorePassThroughException.handleException(new StorePassThroughException(re))) + .isSameAs(re); + } + + @Test + public void handleException_storePassThroughExceptionCauseWrapped() { + Exception e = new Exception(); + StoreAccessException sae = StorePassThroughException.handleException(new StorePassThroughException(e)); + assertThat(sae.getCause()).isSameAs(e); + } +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/internal/util/CollectionUtilTest.java b/ehcache-core/src/test/java/org/ehcache/core/internal/util/CollectionUtilTest.java new file mode 100644 index 0000000000..af5f56349e --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/internal/util/CollectionUtilTest.java @@ -0,0 +1,51 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.internal.util; + +import org.ehcache.core.util.CollectionUtil; +import org.junit.Test; + +import java.util.Arrays; +import java.util.Collections; + +import static org.assertj.core.api.Assertions.*; + +public class CollectionUtilTest { + + @Test + public void findBestCollectionSize_sizeable() { + int size = CollectionUtil.findBestCollectionSize(Arrays.asList(1, 2 ,3), 100); + assertThat(size).isEqualTo(3); + } + + @Test + public void findBestCollectionSize_empty() { + int size = CollectionUtil.findBestCollectionSize(Collections.emptySet(), 100); + assertThat(size).isZero(); + } + + @Test + public void findBestCollectionSize_singleton() { + int size = CollectionUtil.findBestCollectionSize(Collections.singleton(1), 100); + assertThat(size).isEqualTo(1); + } + + @Test + public void findBestCollectionSize_notSizeable() { + int size = CollectionUtil.findBestCollectionSize(() -> null, 100); + assertThat(size).isEqualTo(100); + } +} diff --git a/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorPluralTest.java b/ehcache-core/src/test/java/org/ehcache/core/spi/ServiceLocatorPluralTest.java similarity index 97% rename from core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorPluralTest.java rename to ehcache-core/src/test/java/org/ehcache/core/spi/ServiceLocatorPluralTest.java index 6a739d1776..11c14f3f95 100644 --- a/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorPluralTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/ServiceLocatorPluralTest.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.core.internal.service; +package org.ehcache.core.spi; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.service.PluralService; @@ -23,17 +23,15 @@ import org.hamcrest.Matchers; import org.junit.Test; -import java.util.Collection; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.isOneOf; -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; /** diff --git a/ehcache-core/src/test/java/org/ehcache/core/spi/ServiceLocatorTest.java b/ehcache-core/src/test/java/org/ehcache/core/spi/ServiceLocatorTest.java new file mode 100644 index 0000000000..6ff0384d7a --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/ServiceLocatorTest.java @@ -0,0 +1,595 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.spi; + +import java.io.IOException; +import java.net.URL; +import java.util.Collection; +import java.util.Enumeration; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; + +import org.ehcache.core.spi.services.TestMandatoryServiceFactory; +import org.ehcache.core.spi.services.ranking.RankServiceB; +import org.ehcache.core.spi.services.ranking.RankServiceA; +import org.ehcache.core.Ehcache; +import org.ehcache.core.spi.store.CacheProvider; +import org.ehcache.spi.service.OptionalServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.core.spi.services.DefaultTestProvidedService; +import org.ehcache.core.spi.services.DefaultTestService; +import org.ehcache.core.spi.services.FancyCacheProvider; +import org.ehcache.core.spi.services.TestProvidedService; +import org.ehcache.core.spi.services.TestService; +import org.hamcrest.CoreMatchers; +import org.junit.Test; + +import static org.ehcache.core.spi.ServiceLocator.dependencySet; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.sameInstance; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.withSettings; + +/** + * Tests for {@link ServiceLocator}. + */ +public class ServiceLocatorTest { + + @Test + public void testClassHierarchies() { + ServiceLocator.DependencySet dependencySet = dependencySet(); + final Service service = new ChildTestService(); + dependencySet.with(service); + assertThat(dependencySet.providerOf(FooProvider.class), sameInstance(service)); + final Service fancyCacheProvider = new FancyCacheProvider(); + dependencySet.with(fancyCacheProvider); + + final Collection servicesOfType = dependencySet.providersOf(CacheProvider.class); + assertThat(servicesOfType, is(not(empty()))); + assertThat(servicesOfType.iterator().next(), sameInstance(fancyCacheProvider)); + } + + @Test + public void testDoesNotUseTCCL() { + Thread.currentThread().setContextClassLoader(new ClassLoader() { + @Override + public Enumeration getResources(String name) throws IOException { + throw new AssertionError(); + } + }); + + dependencySet().with(TestService.class).build().getService(TestService.class); + } + + @Test + public void testAttemptsToStopStartedServicesOnInitFailure() { + Service s1 = new ParentTestService(); + FancyCacheProvider s2 = new FancyCacheProvider(); + + ServiceLocator locator = dependencySet().with(s1).with(s2).build(); + try { + locator.startAllServices(); + fail(); + } catch (Exception e) { + // see org.ehcache.spi.ParentTestService.start() + assertThat(e, instanceOf(RuntimeException.class)); + assertThat(e.getMessage(), is("Implement me!")); + } + assertThat(s2.startStopCounter, is(0)); + } + + @Test + public void testAttemptsToStopAllServicesOnCloseFailure() { + Service s1 = mock(CacheProvider.class); + Service s2 = mock(FooProvider.class); + Service s3 = mock(CacheLoaderWriterProvider.class); + + ServiceLocator locator = dependencySet().with(s1).with(s2).with(s3).build(); + try { + locator.startAllServices(); + } catch (Exception e) { + fail(); + } + final RuntimeException thrown = new RuntimeException(); + doThrow(thrown).when(s1).stop(); + + try { + locator.stopAllServices(); + fail(); + } catch (Exception e) { + assertThat(e, CoreMatchers.sameInstance(thrown)); + } + verify(s1).stop(); + verify(s2).stop(); + verify(s3).stop(); + } + + @Test + public void testStopAllServicesOnlyStopsEachServiceOnce() throws Exception { + Service s1 = mock(CacheProvider.class, withSettings().extraInterfaces(CacheLoaderWriterProvider.class)); + + ServiceLocator locator = dependencySet().with(s1).build(); + try { + locator.startAllServices(); + } catch (Exception e) { + fail(); + } + + locator.stopAllServices(); + verify(s1, times(1)).stop(); + } + + @Test + public void testCanOverrideDefaultServiceFromServiceLoader() { + ServiceLocator locator = dependencySet().with(new ExtendedTestService()).build(); + TestService testService = locator.getService(TestService.class); + assertThat(testService, instanceOf(ExtendedTestService.class)); + } + + @Test + public void testCanOverrideServiceDependencyWithoutOrderingProblem() throws Exception { + final AtomicBoolean started = new AtomicBoolean(false); + ServiceLocator serviceLocator = dependencySet().with(new TestServiceConsumerService()) + .with(new TestService() { + @Override + public void start(ServiceProvider serviceProvider) { + started.set(true); + } + + @Override + public void stop() { + // no-op + } + }).build(); + serviceLocator.startAllServices(); + assertThat(started.get(), is(true)); + } + + @Test + public void testServicesInstanciatedOnceAndStartedOnce() throws Exception { + + @ServiceDependencies(TestProvidedService.class) + class Consumer1 implements Service { + @Override + public void start(ServiceProvider serviceProvider) { + } + + @Override + public void stop() { + + } + } + + @ServiceDependencies(TestProvidedService.class) + class Consumer2 implements Service { + TestProvidedService testProvidedService; + @Override + public void start(ServiceProvider serviceProvider) { + testProvidedService = serviceProvider.getService(TestProvidedService.class); + } + + @Override + public void stop() { + + } + } + + Consumer1 consumer1 = spy(new Consumer1()); + Consumer2 consumer2 = new Consumer2(); + ServiceLocator.DependencySet dependencySet = dependencySet(); + + // add some services + dependencySet.with(consumer1); + dependencySet.with(consumer2); + dependencySet.with(new TestService() { + @Override + public void start(ServiceProvider serviceProvider) { + } + + @Override + public void stop() { + // no-op + } + }); + + // simulate what is done in ehcachemanager + dependencySet.with(TestService.class); + ServiceLocator serviceLocator = dependencySet.build(); + serviceLocator.startAllServices(); + + serviceLocator.stopAllServices(); + + verify(consumer1, times(1)).start(serviceLocator); + verify(consumer1, times(1)).stop(); + + assertThat(consumer2.testProvidedService.ctors(), greaterThanOrEqualTo(1)); + assertThat(consumer2.testProvidedService.stops(), equalTo(1)); + assertThat(consumer2.testProvidedService.starts(), equalTo(1)); + } + + @Test + public void testRedefineDefaultServiceWhileDependingOnIt() throws Exception { + ServiceLocator serviceLocator = dependencySet().with(new YetAnotherCacheProvider()).build(); + + serviceLocator.startAllServices(); + } + + @Test(expected = IllegalStateException.class) + public void testCircularDeps() throws Exception { + + final class StartStopCounter { + final AtomicInteger startCounter = new AtomicInteger(0); + final AtomicReference> startServiceProvider = new AtomicReference<>(); + final AtomicInteger stopCounter = new AtomicInteger(0); + public void countStart(ServiceProvider serviceProvider) { + startCounter.incrementAndGet(); + startServiceProvider.set(serviceProvider); + } + public void countStop() { + stopCounter.incrementAndGet(); + } + } + + @ServiceDependencies(TestProvidedService.class) + class Consumer1 implements Service { + final StartStopCounter startStopCounter = new StartStopCounter(); + @Override + public void start(ServiceProvider serviceProvider) { + assertThat(serviceProvider.getService(TestProvidedService.class), is(notNullValue())); + startStopCounter.countStart(serviceProvider); + } + @Override + public void stop() { + startStopCounter.countStop(); + } + } + + @ServiceDependencies(Consumer1.class) + class Consumer2 implements Service { + final StartStopCounter startStopCounter = new StartStopCounter(); + @Override + public void start(ServiceProvider serviceProvider) { + assertThat(serviceProvider.getService(Consumer1.class), is(notNullValue())); + startStopCounter.countStart(serviceProvider); + } + @Override + public void stop() { + startStopCounter.countStop(); + } + } + + @ServiceDependencies(Consumer2.class) + class MyTestProvidedService extends DefaultTestProvidedService { + final StartStopCounter startStopCounter = new StartStopCounter(); + @Override + public void start(ServiceProvider serviceProvider) { + assertThat(serviceProvider.getService(Consumer2.class), is(notNullValue())); + startStopCounter.countStart(serviceProvider); + super.start(serviceProvider); + } + @Override + public void stop() { + startStopCounter.countStop(); + super.stop(); + } + } + + @ServiceDependencies(DependsOnMe.class) + class DependsOnMe implements Service { + final StartStopCounter startStopCounter = new StartStopCounter(); + @Override + public void start(ServiceProvider serviceProvider) { + assertThat(serviceProvider.getService(DependsOnMe.class), sameInstance(this)); + startStopCounter.countStart(serviceProvider); + } + @Override + public void stop() { + startStopCounter.countStop(); + } + } + + ServiceLocator.DependencySet dependencySet = dependencySet(); + + Consumer1 consumer1 = new Consumer1(); + Consumer2 consumer2 = new Consumer2(); + MyTestProvidedService myTestProvidedService = new MyTestProvidedService(); + DependsOnMe dependsOnMe = new DependsOnMe(); + + // add some services + dependencySet.with(consumer1); + dependencySet.with(consumer2); + dependencySet.with(myTestProvidedService); + dependencySet.with(dependsOnMe); + + ServiceLocator serviceLocator = dependencySet.build(); + // simulate what is done in ehcachemanager + serviceLocator.startAllServices(); + + serviceLocator.stopAllServices(); + + assertThat(consumer1.startStopCounter.startCounter.get(), is(1)); + assertThat(consumer1.startStopCounter.startServiceProvider.get(), CoreMatchers.>is(serviceLocator)); + assertThat(consumer2.startStopCounter.startCounter.get(), is(1)); + assertThat(consumer2.startStopCounter.startServiceProvider.get(), CoreMatchers.>is(serviceLocator)); + assertThat(myTestProvidedService.startStopCounter.startCounter.get(), is(1)); + assertThat(myTestProvidedService.startStopCounter.startServiceProvider.get(), CoreMatchers.>is(serviceLocator)); + assertThat(dependsOnMe.startStopCounter.startCounter.get(), is(1)); + assertThat(dependsOnMe.startStopCounter.startServiceProvider.get(), CoreMatchers.>is(serviceLocator)); + + assertThat(consumer1.startStopCounter.stopCounter.get(), is(1)); + assertThat(consumer2.startStopCounter.stopCounter.get(), is(1)); + assertThat(myTestProvidedService.startStopCounter.stopCounter.get(), is(1)); + assertThat(dependsOnMe.startStopCounter.stopCounter.get(), is(1)); + } + + @Test + public void testAbsentOptionalDepGetIgnored() { + ServiceLocator serviceLocator = dependencySet().with(new ServiceWithOptionalDeps()).build(); + + assertThat(serviceLocator.getService(ServiceWithOptionalDeps.class), is(notNullValue())); + assertThat(serviceLocator.getService(TestService.class), is(notNullValue())); + assertThat(serviceLocator.getService(OptService1.class), is(nullValue())); + assertThat(serviceLocator.getService(OptService2.class), is(nullValue())); + } + + @Test + public void testPresentOptionalDepGetLoaded() { + ServiceLocator serviceLocator = dependencySet().with(new ServiceWithOptionalDeps()).with(new OptService1()).with(new OptService2()).build(); + + assertThat(serviceLocator.getService(ServiceWithOptionalDeps.class), is(notNullValue())); + assertThat(serviceLocator.getService(TestService.class), is(notNullValue())); + assertThat(serviceLocator.getService(OptService1.class), is(notNullValue())); + assertThat(serviceLocator.getService(OptService2.class), is(notNullValue())); + } + + @Test + public void testMixedPresentAndAbsentOptionalDepGetLoadedAndIgnored() { + ServiceLocator serviceLocator = dependencySet().with(new ServiceWithOptionalDeps()).with(new OptService2()).build(); + + assertThat(serviceLocator.getService(ServiceWithOptionalDeps.class), is(notNullValue())); + assertThat(serviceLocator.getService(TestService.class), is(notNullValue())); + assertThat(serviceLocator.getService(OptService1.class), is(nullValue())); + assertThat(serviceLocator.getService(OptService2.class), is(notNullValue())); + } + + @Test + public void testOptionalDepWithAbsentClass() { + ServiceLocator serviceLocator = dependencySet().with(new ServiceWithOptionalNonExistentDeps()).with(new OptService2()).build(); + + assertThat(serviceLocator.getService(ServiceWithOptionalNonExistentDeps.class), is(notNullValue())); + assertThat(serviceLocator.getService(TestService.class), is(notNullValue())); + assertThat(serviceLocator.getService(OptService2.class), is(notNullValue())); + } + + @Test + public void testManadatoryDependencyIsAddedToEmptySet() { + ServiceLocator serviceLocator = dependencySet().build(); + + TestMandatoryServiceFactory.TestMandatoryService service = serviceLocator.getService(TestMandatoryServiceFactory.TestMandatoryService.class); + assertThat(service, notNullValue()); + assertThat(service.getConfig(), nullValue()); + } + + @Test + public void testManadatoryDependenciesCanBeDisabled() { + ServiceLocator serviceLocator = dependencySet().withoutMandatoryServices().build(); + + assertThat(serviceLocator.getService(TestMandatoryServiceFactory.TestMandatoryService.class), nullValue()); + } + + @Test + public void testMandatoryDependencyIsAddedToNonEmptySet() { + ServiceLocator serviceLocator = dependencySet().with(new DefaultTestService()).build(); + + TestMandatoryServiceFactory.TestMandatoryService service = serviceLocator.getService(TestMandatoryServiceFactory.TestMandatoryService.class); + assertThat(service, notNullValue()); + assertThat(service.getConfig(), nullValue()); + } + + @Test + public void testMandatoryDependencyCanStillBeRequested() { + ServiceLocator serviceLocator = dependencySet().with(TestMandatoryServiceFactory.TestMandatoryService.class).build(); + + TestMandatoryServiceFactory.TestMandatoryService service = serviceLocator.getService(TestMandatoryServiceFactory.TestMandatoryService.class); + assertThat(service, notNullValue()); + assertThat(service.getConfig(), nullValue()); + } + + @Test + public void testMandatoryDependencyWithProvidedConfigIsHonored() { + ServiceLocator serviceLocator = dependencySet().with(new TestMandatoryServiceFactory.TestMandatoryServiceConfiguration("apple")).build(); + + TestMandatoryServiceFactory.TestMandatoryService service = serviceLocator.getService(TestMandatoryServiceFactory.TestMandatoryService.class); + assertThat(service, notNullValue()); + assertThat(service.getConfig(), is("apple")); + } + + @Test + public void testMandatoryDependencyCanBeDependedOn() { + ServiceLocator serviceLocator = dependencySet().with(new NeedsMandatoryService()).build(); + + TestMandatoryServiceFactory.TestMandatoryService service = serviceLocator.getService(TestMandatoryServiceFactory.TestMandatoryService.class); + assertThat(service, notNullValue()); + assertThat(service.getConfig(), nullValue()); + assertThat(serviceLocator.getService(NeedsMandatoryService.class), notNullValue()); + } + + @Test + public void testRankedServiceOverrides() { + ServiceLocator serviceLocator = dependencySet().with(RankServiceA.class).build(); + assertThat(serviceLocator.getService(RankServiceA.class).getSource(), is("high-rank")); + } + + @Test + public void testRankedServiceOverridesMandatory() { + ServiceLocator serviceLocator = dependencySet().build(); + assertThat(serviceLocator.getService(RankServiceA.class), nullValue()); + } + + @Test + public void testRankedServiceBecomesMandatory() { + ServiceLocator serviceLocator = dependencySet().build(); + assertThat(serviceLocator.getService(RankServiceB.class), notNullValue()); + } +} + +@ServiceDependencies(TestService.class) +@OptionalServiceDependencies({ + "org.ehcache.core.spi.OptService1", + "org.ehcache.core.spi.OptService2"}) +class ServiceWithOptionalDeps implements Service { + + @Override + public void start(ServiceProvider serviceProvider) { + + } + + @Override + public void stop() { + + } +} + +@ServiceDependencies(TestService.class) +@OptionalServiceDependencies({ + "org.ehcache.core.internal.service.ServiceThatDoesNotExist", + "org.ehcache.core.spi.OptService2"}) +class ServiceWithOptionalNonExistentDeps implements Service { + + @Override + public void start(ServiceProvider serviceProvider) { + + } + + @Override + public void stop() { + + } +} + +class OptService1 implements Service { + @Override + public void start(ServiceProvider serviceProvider) { + } + @Override + public void stop() { + } +} + +class OptService2 implements Service { + @Override + public void start(ServiceProvider serviceProvider) { + } + @Override + public void stop() { + } +} + + +@ServiceDependencies(FancyCacheProvider.class) +class YetAnotherCacheProvider implements CacheProvider { + + @Override + public Ehcache createCache(Class keyClazz, Class valueClazz, ServiceConfiguration... config) { + return null; + } + + @Override + public void releaseCache(Ehcache resource) { + // no-op + } + + @Override + public void start(ServiceProvider serviceProvider) { + // no-op + } + + @Override + public void stop() { + // no-op + } +} + +class ExtendedTestService extends DefaultTestService { + +} + +interface FooProvider extends Service { + +} + +@ServiceDependencies(TestService.class) +class TestServiceConsumerService implements Service { + + @Override + public void start(ServiceProvider serviceProvider) { + assertThat(serviceProvider.getService(TestService.class), notNullValue()); + } + + @Override + public void stop() { + // no-op + } +} + +class ParentTestService implements FooProvider { + + @Override + public void start(final ServiceProvider serviceProvider) { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public void stop() { } + +} + +class ChildTestService extends ParentTestService { + + @Override + public void start(final ServiceProvider serviceProvider) { + throw new UnsupportedOperationException("Implement me!"); + } +} + +@ServiceDependencies(TestMandatoryServiceFactory.TestMandatoryService.class) +class NeedsMandatoryService implements TestService { + + @Override + public void start(ServiceProvider serviceProvider) { + + } + + @Override + public void stop() { + + } +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/spi/service/ServiceUtilsTest.java b/ehcache-core/src/test/java/org/ehcache/core/spi/service/ServiceUtilsTest.java new file mode 100644 index 0000000000..b91cc2fbf9 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/service/ServiceUtilsTest.java @@ -0,0 +1,100 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.spi.service; + +import org.junit.Test; + +import java.util.Arrays; +import java.util.Collections; + +import static org.assertj.core.api.Assertions.*; + +public class ServiceUtilsTest { + + @Test + public void findAmongstArray_notFound() { + assertThat(ServiceUtils.findAmongst(String.class)).isEmpty(); + } + + @Test + public void findAmongstColl_notFound() { + assertThat(ServiceUtils.findAmongst(String.class, Collections.emptySet())).isEmpty(); + } + + @Test + public void findAmongstArray_found() { + assertThat(ServiceUtils.findAmongst(String.class, "test")).containsOnly("test"); + } + + @Test + public void findAmongstColl_found() { + assertThat(ServiceUtils.findAmongst(String.class, Collections.singleton("test"))).containsOnly("test"); + } + + @Test + public void findSingletonAmongstArray_notFound() { + assertThat(ServiceUtils.findSingletonAmongst(String.class)).isNull(); + } + + @Test + public void findSingletonAmongstArray_found() { + assertThat(ServiceUtils.findSingletonAmongst(String.class, 2, "t1")).isEqualTo("t1"); + } + + @Test + public void findSingletonAmongstArray_twoManyFound() { + assertThatThrownBy(() ->ServiceUtils.findSingletonAmongst(String.class, "t1", "t2")) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void findSingletonAmongstColl_notFound() { + assertThat(ServiceUtils.findSingletonAmongst(String.class, Collections.emptySet())).isNull(); + } + + @Test + public void findSingletonAmongstColl_found() { + assertThat(ServiceUtils.findSingletonAmongst(String.class, Arrays.asList( 2, "t1"))).isEqualTo("t1"); + } + + @Test + public void findSingletonAmongstColl_twoManyFound() { + assertThatThrownBy(() ->ServiceUtils.findSingletonAmongst(String.class, Arrays.asList("t1", "t2"))) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void findOptionalAmongstColl_notFound() { + assertThat(ServiceUtils.findOptionalAmongst(String.class, Collections.emptySet())).isEmpty(); + } + + @Test + public void findOptionalAmongstColl_found() { + assertThat(ServiceUtils.findOptionalAmongst(String.class, Arrays.asList( 2, "t1"))).contains("t1"); + } + + @Test + public void findOptionalAmongstArray_notFound() { + assertThat(ServiceUtils.findOptionalAmongst(String.class)).isEmpty(); + } + + @Test + public void findOptionalAmongstArray_found() { + assertThat(ServiceUtils.findOptionalAmongst(String.class, 2, "t1")).contains("t1"); + } + +} diff --git a/core/src/test/java/org/ehcache/core/spi/services/DefaultTestProvidedService.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/DefaultTestProvidedService.java similarity index 100% rename from core/src/test/java/org/ehcache/core/spi/services/DefaultTestProvidedService.java rename to ehcache-core/src/test/java/org/ehcache/core/spi/services/DefaultTestProvidedService.java diff --git a/core/src/test/java/org/ehcache/core/spi/services/DefaultTestService.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/DefaultTestService.java similarity index 100% rename from core/src/test/java/org/ehcache/core/spi/services/DefaultTestService.java rename to ehcache-core/src/test/java/org/ehcache/core/spi/services/DefaultTestService.java diff --git a/core/src/test/java/org/ehcache/core/spi/services/FancyCacheProvider.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/FancyCacheProvider.java similarity index 82% rename from core/src/test/java/org/ehcache/core/spi/services/FancyCacheProvider.java rename to ehcache-core/src/test/java/org/ehcache/core/spi/services/FancyCacheProvider.java index a8dece1b46..954c5d66bc 100644 --- a/core/src/test/java/org/ehcache/core/spi/services/FancyCacheProvider.java +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/services/FancyCacheProvider.java @@ -16,7 +16,7 @@ package org.ehcache.core.spi.services; -import org.ehcache.core.EhcacheWithLoaderWriter; +import org.ehcache.core.Ehcache; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.CacheProvider; import org.ehcache.spi.service.Service; @@ -30,12 +30,12 @@ public class FancyCacheProvider implements CacheProvider { public int startStopCounter = 0; @Override - public EhcacheWithLoaderWriter createCache(Class keyClazz, Class valueClazz, ServiceConfiguration... config) { + public Ehcache createCache(Class keyClazz, Class valueClazz, ServiceConfiguration... config) { return null; } @Override - public void releaseCache(final EhcacheWithLoaderWriter resource) { + public void releaseCache(final Ehcache resource) { // } diff --git a/core/src/test/java/org/ehcache/core/spi/services/FancyCacheProviderFactory.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/FancyCacheProviderFactory.java similarity index 90% rename from core/src/test/java/org/ehcache/core/spi/services/FancyCacheProviderFactory.java rename to ehcache-core/src/test/java/org/ehcache/core/spi/services/FancyCacheProviderFactory.java index dd351289c4..baf0e949ce 100644 --- a/core/src/test/java/org/ehcache/core/spi/services/FancyCacheProviderFactory.java +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/services/FancyCacheProviderFactory.java @@ -24,12 +24,12 @@ */ public class FancyCacheProviderFactory implements ServiceFactory { @Override - public FancyCacheProvider create(ServiceCreationConfiguration configuration) { + public FancyCacheProvider create(ServiceCreationConfiguration configuration) { return new FancyCacheProvider(); } @Override - public Class getServiceType() { + public Class getServiceType() { return FancyCacheProvider.class; } } diff --git a/ehcache-core/src/test/java/org/ehcache/core/spi/services/TestMandatoryServiceFactory.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/TestMandatoryServiceFactory.java new file mode 100644 index 0000000000..2ed51b19c7 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/services/TestMandatoryServiceFactory.java @@ -0,0 +1,84 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.spi.services; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.spi.service.ServiceProvider; + +public class TestMandatoryServiceFactory implements ServiceFactory { + + @Override + public boolean isMandatory() { + return true; + } + + @Override + public TestMandatoryService create(ServiceCreationConfiguration configuration) { + if (configuration == null) { + return new TestMandatoryService(null); + } else { + return new TestMandatoryService(((TestMandatoryServiceConfiguration) configuration).getConfig()); + } + } + + @Override + public Class getServiceType() { + return TestMandatoryService.class; + } + + public static class TestMandatoryServiceConfiguration implements ServiceCreationConfiguration { + + private final String config; + + public TestMandatoryServiceConfiguration(String config) { + this.config = config; + } + + @Override + public Class getServiceType() { + return TestMandatoryService.class; + } + + public String getConfig() { + return config; + } + } + + public static class TestMandatoryService implements Service { + + private final String config; + + public TestMandatoryService(String config) { + this.config = config; + } + + @Override + public void start(ServiceProvider serviceProvider) { + + } + + @Override + public void stop() { + + } + + public String getConfig() { + return config; + } + } +} diff --git a/core/src/test/java/org/ehcache/core/spi/services/TestProvidedService.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/TestProvidedService.java similarity index 100% rename from core/src/test/java/org/ehcache/core/spi/services/TestProvidedService.java rename to ehcache-core/src/test/java/org/ehcache/core/spi/services/TestProvidedService.java diff --git a/core/src/test/java/org/ehcache/core/spi/services/TestProvidedServiceFactory.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/TestProvidedServiceFactory.java similarity index 90% rename from core/src/test/java/org/ehcache/core/spi/services/TestProvidedServiceFactory.java rename to ehcache-core/src/test/java/org/ehcache/core/spi/services/TestProvidedServiceFactory.java index 10233e0343..064bd04320 100644 --- a/core/src/test/java/org/ehcache/core/spi/services/TestProvidedServiceFactory.java +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/services/TestProvidedServiceFactory.java @@ -24,12 +24,12 @@ */ public class TestProvidedServiceFactory implements ServiceFactory { @Override - public TestProvidedService create(ServiceCreationConfiguration configuration) { + public TestProvidedService create(ServiceCreationConfiguration configuration) { return new DefaultTestProvidedService(); } @Override - public Class getServiceType() { + public Class getServiceType() { return TestProvidedService.class; } } diff --git a/core/src/test/java/org/ehcache/core/spi/services/TestService.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/TestService.java similarity index 100% rename from core/src/test/java/org/ehcache/core/spi/services/TestService.java rename to ehcache-core/src/test/java/org/ehcache/core/spi/services/TestService.java diff --git a/core/src/test/java/org/ehcache/core/spi/services/TestServiceFactory.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/TestServiceFactory.java similarity index 89% rename from core/src/test/java/org/ehcache/core/spi/services/TestServiceFactory.java rename to ehcache-core/src/test/java/org/ehcache/core/spi/services/TestServiceFactory.java index d056e7324a..df85d34e81 100644 --- a/core/src/test/java/org/ehcache/core/spi/services/TestServiceFactory.java +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/services/TestServiceFactory.java @@ -24,12 +24,12 @@ */ public class TestServiceFactory implements ServiceFactory { @Override - public TestService create(ServiceCreationConfiguration configuration) { + public TestService create(ServiceCreationConfiguration configuration) { return new DefaultTestService(); } @Override - public Class getServiceType() { - return TestService.class; + public Class getServiceType() { + return DefaultTestService.class; } } diff --git a/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/HighRankServiceAFactory.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/HighRankServiceAFactory.java new file mode 100644 index 0000000000..d7753b7675 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/HighRankServiceAFactory.java @@ -0,0 +1,37 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.spi.services.ranking; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +public class HighRankServiceAFactory implements ServiceFactory { + + @Override + public int rank() { + return 2; + } + + @Override + public RankServiceA create(ServiceCreationConfiguration configuration) { + return new RankServiceA("high-rank"); + } + + @Override + public Class getServiceType() { + return RankServiceA.class; + } +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/LowRankServiceBFactory.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/LowRankServiceBFactory.java new file mode 100644 index 0000000000..6ddbd6c7b0 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/LowRankServiceBFactory.java @@ -0,0 +1,32 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.spi.services.ranking; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +public class LowRankServiceBFactory implements ServiceFactory { + + @Override + public RankServiceB create(ServiceCreationConfiguration configuration) { + return new RankServiceB("low-rank"); + } + + @Override + public Class getServiceType() { + return RankServiceB.class; + } +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/MandatoryHighRankServiceBFactory.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/MandatoryHighRankServiceBFactory.java new file mode 100644 index 0000000000..bdb6e2599a --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/MandatoryHighRankServiceBFactory.java @@ -0,0 +1,42 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.spi.services.ranking; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +public class MandatoryHighRankServiceBFactory implements ServiceFactory { + + @Override + public boolean isMandatory() { + return true; + } + + @Override + public int rank() { + return 2; + } + + @Override + public RankServiceB create(ServiceCreationConfiguration configuration) { + return new RankServiceB("high-rank"); + } + + @Override + public Class getServiceType() { + return RankServiceB.class; + } +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/MandatoryLowRankServiceAFactory.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/MandatoryLowRankServiceAFactory.java new file mode 100644 index 0000000000..b691e4cff5 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/MandatoryLowRankServiceAFactory.java @@ -0,0 +1,37 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.spi.services.ranking; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +public class MandatoryLowRankServiceAFactory implements ServiceFactory { + + @Override + public boolean isMandatory() { + return true; + } + + @Override + public RankServiceA create(ServiceCreationConfiguration configuration) { + return new RankServiceA("low-rank"); + } + + @Override + public Class getServiceType() { + return RankServiceA.class; + } +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/RankServiceA.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/RankServiceA.java new file mode 100644 index 0000000000..534f71d745 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/RankServiceA.java @@ -0,0 +1,42 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.spi.services.ranking; + +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceProvider; + +public class RankServiceA implements Service { + + private final String source; + + public RankServiceA(String source) { + this.source = source; + } + + @Override + public void start(ServiceProvider serviceProvider) { + + } + + @Override + public void stop() { + + } + + public String getSource() { + return source; + } +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/RankServiceB.java b/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/RankServiceB.java new file mode 100644 index 0000000000..60b3184b2a --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/services/ranking/RankServiceB.java @@ -0,0 +1,42 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.spi.services.ranking; + +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceProvider; + +public class RankServiceB implements Service { + + private final String source; + + public RankServiceB(String source) { + this.source = source; + } + + @Override + public void start(ServiceProvider serviceProvider) { + + } + + @Override + public void stop() { + + } + + public String getSource() { + return source; + } +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/spi/store/AbstractValueHolderTest.java b/ehcache-core/src/test/java/org/ehcache/core/spi/store/AbstractValueHolderTest.java new file mode 100644 index 0000000000..7105a313c4 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/store/AbstractValueHolderTest.java @@ -0,0 +1,171 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.spi.store; + +import org.ehcache.core.spi.time.TimeSource; +import org.junit.Test; + +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; + +/** + * @author Ludovic Orban + */ +public class AbstractValueHolderTest { + + @Test + public void testCreationTime() throws Exception { + AbstractValueHolder valueHolder = newAbstractValueHolder(1000L); + + assertThat(valueHolder.creationTime(), is(1000L)); + } + + @Test + public void testExpirationTime() throws Exception { + AbstractValueHolder valueHolder = newAbstractValueHolder(0L, 1000L); + + assertThat(valueHolder.expirationTime(), is(1000L)); + } + + + @Test + public void testLastAccessTime() throws Exception { + // last access time defaults to create time + AbstractValueHolder valueHolder = newAbstractValueHolder(1000L); + + assertThat(valueHolder.lastAccessTime(), is(1000L)); + + valueHolder = newAbstractValueHolder(1000L, 0L, 2000L); + + assertThat(valueHolder.lastAccessTime(), is(2000L)); + } + + + @Test + public void testIsExpired() throws Exception { + assertThat(newAbstractValueHolder(1000L).isExpired(1000L), is(false)); + + assertThat(newAbstractValueHolder(1000L, 1001L).isExpired(1000L), is(false)); + + assertThat(newAbstractValueHolder(1000L, 1000L).isExpired(1000L), is(true)); + } + + @Test + public void testEquals() throws Exception { + assertThat(newAbstractValueHolder( 0L).equals(newAbstractValueHolder( 0L)), is(true)); + assertThat(newAbstractValueHolder( 1L).equals(newAbstractValueHolder( 0L)), is(false)); + + assertThat(newAbstractValueHolder(2L, 0L).equals(newAbstractValueHolder(2L, 0L)), is(true)); + assertThat(newAbstractValueHolder(2L, 0L).equals(newAbstractValueHolder(2L, 1L)), is(false)); + assertThat(newAbstractValueHolder(2L, 0L).equals(newAbstractValueHolder(3L, 0L)), is(false)); + + assertThat(newAbstractValueHolder(0L, 2L, 1L).equals(newAbstractValueHolder(0L, 2L, 1L)), is(true)); + assertThat(newAbstractValueHolder(1L, 2L, 1L).equals(newAbstractValueHolder(0L, 2L, 1L)), is(false)); + + assertThat(newAbstractValueHolder(0L, 3L, 1L).equals(newAbstractValueHolder(0L, 2L, 1L)), is(false)); + assertThat(newAbstractValueHolder(0L, 2L, 3L).equals(newAbstractValueHolder(0L, 2L, 1L)), is(false)); + } + + @Test + public void testSubclassEquals() throws Exception { + assertThat(new AbstractValueHolder(-1, 1L) { + @Override + public String get() { + return "aaa"; + } + + @Override + public int hashCode() { + return super.hashCode() + get().hashCode(); + } + @Override + public boolean equals(Object obj) { + if (obj instanceof AbstractValueHolder) { + AbstractValueHolder other = (AbstractValueHolder) obj; + return super.equals(obj) && get().equals(other.get()); + } + return false; + } + }.equals(new AbstractValueHolder(-1, 1L) { + @Override + public String get() { + return "aaa"; + } + + @Override + public int hashCode() { + return super.hashCode() + get().hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof AbstractValueHolder) { + AbstractValueHolder other = (AbstractValueHolder)obj; + return super.equals(obj) && get().equals(other.get()); + } + return false; + } + }), is(true)); + + } + + private AbstractValueHolder newAbstractValueHolder(long creationTime) { + return new AbstractValueHolder(-1, creationTime) { + @Override + public String get() { + throw new UnsupportedOperationException(); + } + }; + } + + private AbstractValueHolder newAbstractValueHolder(long creationTime, long expirationTime) { + return new AbstractValueHolder(-1, creationTime, expirationTime) { + @Override + public String get() { + throw new UnsupportedOperationException(); + } + }; + } + + private AbstractValueHolder newAbstractValueHolder(long creationTime, long expirationTime, long lastAccessTime) { + final AbstractValueHolder abstractValueHolder = new AbstractValueHolder(-1, creationTime, expirationTime) { + @Override + public String get() { + throw new UnsupportedOperationException(); + } + }; + abstractValueHolder.setLastAccessTime(lastAccessTime); + return abstractValueHolder; + } + + private static class TestTimeSource implements TimeSource { + + private long time = 0; + + @Override + public long getTimeMillis() { + return time; + } + + public void advanceTime(long step) { + time += step; + } + } + +} diff --git a/core/src/test/java/org/ehcache/core/spi/store/CacheProvider.java b/ehcache-core/src/test/java/org/ehcache/core/spi/store/CacheProvider.java similarity index 78% rename from core/src/test/java/org/ehcache/core/spi/store/CacheProvider.java rename to ehcache-core/src/test/java/org/ehcache/core/spi/store/CacheProvider.java index ce03876390..0db6eb7b02 100644 --- a/core/src/test/java/org/ehcache/core/spi/store/CacheProvider.java +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/store/CacheProvider.java @@ -16,7 +16,7 @@ package org.ehcache.core.spi.store; -import org.ehcache.core.EhcacheWithLoaderWriter; +import org.ehcache.core.Ehcache; import org.ehcache.spi.service.PluralService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; @@ -27,7 +27,7 @@ @PluralService public interface CacheProvider extends Service { - EhcacheWithLoaderWriter createCache(Class keyClazz, Class valueClazz, ServiceConfiguration... config); + Ehcache createCache(Class keyClazz, Class valueClazz, ServiceConfiguration... config); - void releaseCache(EhcacheWithLoaderWriter resource); + void releaseCache(Ehcache resource); } diff --git a/ehcache-core/src/test/java/org/ehcache/core/spi/time/TickingTimeSourceTest.java b/ehcache-core/src/test/java/org/ehcache/core/spi/time/TickingTimeSourceTest.java new file mode 100644 index 0000000000..52949435f7 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/spi/time/TickingTimeSourceTest.java @@ -0,0 +1,55 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.spi.time; + +import org.junit.After; +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; + +public class TickingTimeSourceTest { + + private final TickingTimeSource tickingTimeSource = new TickingTimeSource(1L, 1000L); + + @After + public void after() { + tickingTimeSource.stop(); + } + + @Test + public void getTimeMillis() { + + // Hard to test but let's just say... + long currentTime = System.currentTimeMillis(); + tickingTimeSource.start(null); + + long actualTime = tickingTimeSource.getTimeMillis(); + + // ... that time should start now... + assertThat(actualTime).isGreaterThanOrEqualTo(currentTime); + + // ... and increase + long end = System.currentTimeMillis() + 30_000; // 30 seconds should be way way enough to at least tick of one millisecond + while(System.currentTimeMillis() < end) { + if(tickingTimeSource.getTimeMillis() > actualTime) { + break; + } + } + + assertThat(tickingTimeSource.getTimeMillis()).isGreaterThan(actualTime); + } +} diff --git a/core/src/test/java/org/ehcache/core/internal/store/StoreSupportTest.java b/ehcache-core/src/test/java/org/ehcache/core/store/StoreSupportTest.java similarity index 91% rename from core/src/test/java/org/ehcache/core/internal/store/StoreSupportTest.java rename to ehcache-core/src/test/java/org/ehcache/core/store/StoreSupportTest.java index ae223eac4e..ec93741041 100644 --- a/core/src/test/java/org/ehcache/core/internal/store/StoreSupportTest.java +++ b/ehcache-core/src/test/java/org/ehcache/core/store/StoreSupportTest.java @@ -14,11 +14,11 @@ * limitations under the License. */ -package org.ehcache.core.internal.store; +package org.ehcache.core.store; import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourceType; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.service.Service; @@ -32,12 +32,13 @@ import java.util.concurrent.atomic.AtomicInteger; import static java.util.Arrays.asList; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; -import static org.junit.Assert.*; +import static org.junit.Assert.fail; /** * Tests functionality of {@link StoreSupport} methods. @@ -80,7 +81,7 @@ public void testSelectStoreProvider() throws Exception { final ServiceLocator serviceLocator = dependencySet().with(storeProviders).build(); final Store.Provider selectedProvider = StoreSupport.selectStoreProvider(serviceLocator, Collections.>singleton(anyResourceType), - Collections.>emptyList()); + Collections.>emptyList()); assertThat(selectedProvider, is(Matchers.sameInstance(expectedProvider))); @@ -106,7 +107,7 @@ public void testSelectStoreProviderMultiple() throws Exception { try { StoreSupport.selectStoreProvider(serviceLocator, Collections.>singleton(anyResourceType), - Collections.>emptyList()); + Collections.>emptyList()); fail(); } catch (IllegalStateException e) { // expected @@ -123,7 +124,7 @@ public void testSelectStoreProviderNoProviders() throws Exception { try { StoreSupport.selectStoreProvider(dependencySet().build(), Collections.>singleton(anyResourceType), - Collections.>emptyList()); + Collections.>emptyList()); fail(); } catch (IllegalStateException e) { // expected @@ -163,7 +164,7 @@ public int getTierHeight() { try { StoreSupport.selectStoreProvider(serviceLocator, Collections.>singleton(otherResourceType), - Collections.>emptyList()); + Collections.>emptyList()); fail(); } catch (IllegalStateException e) { // expected @@ -214,7 +215,7 @@ public TestBaseProvider(final int rank) { } @Override - public Store createStore(final Store.Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { + public Store createStore(final Store.Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { throw new UnsupportedOperationException("TestBaseProvider.createStore not implemented"); } @@ -229,7 +230,7 @@ public void initStore(final Store resource) { } @Override - public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { + public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { assertThat(resourceTypes, is(not(nullValue()))); assertThat(serviceConfigs, is(not(nullValue()))); rankAccessCount.incrementAndGet(); diff --git a/ehcache-core/src/test/java/org/ehcache/core/util/ClassLoadingTest.java b/ehcache-core/src/test/java/org/ehcache/core/util/ClassLoadingTest.java new file mode 100644 index 0000000000..2e762b1ba3 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/util/ClassLoadingTest.java @@ -0,0 +1,135 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.util; + +import static java.util.Collections.list; +import static org.ehcache.core.util.ClassLoading.getDefaultClassLoader; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.collection.IsIterableContainingInOrder.contains; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertSame; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.util.Enumeration; +import java.util.Map; +import java.util.Vector; + +import org.junit.Test; + +public class ClassLoadingTest { + + @Test + public void testDefaultClassLoader() throws Exception { + ClassLoader originalTccl = Thread.currentThread().getContextClassLoader(); + try { + String resource = getClass().getName().replace('.', '/').concat(".class"); + ClassLoader thisLoader = getClass().getClassLoader(); + ClassLoader defaultClassLoader = getDefaultClassLoader(); + + Thread.currentThread().setContextClassLoader(null); + assertSame(thisLoader.loadClass(getClass().getName()), defaultClassLoader.loadClass(getClass().getName())); + assertEquals(thisLoader.getResource(resource), defaultClassLoader.getResource(resource)); + assertThat(list(defaultClassLoader.getResources(resource)), is(list(thisLoader.getResources(resource)))); + + Thread.currentThread().setContextClassLoader(new FindNothingLoader()); + assertSame(thisLoader.loadClass(getClass().getName()), defaultClassLoader.loadClass(getClass().getName())); + assertEquals(thisLoader.getResource(resource), defaultClassLoader.getResource(resource)); + assertThat(list(defaultClassLoader.getResources(resource)), is(list(thisLoader.getResources(resource)))); + + URL url = new URL("file:///tmp"); + ClassLoader tc = new TestClassLoader(url); + Thread.currentThread().setContextClassLoader(tc); + Class c = defaultClassLoader.loadClass(getClass().getName()); + assertNotSame(getClass(), c); + assertSame(tc, c.getClassLoader()); + assertEquals(url, defaultClassLoader.getResource(resource)); + assertThat(list(defaultClassLoader.getResources(resource)), contains(url, thisLoader.getResource(resource))); + } finally { + Thread.currentThread().setContextClassLoader(originalTccl); + } + } + + @SafeVarargs + private static Enumeration enumerationOf(T... values) { + Vector v = new Vector<>(); + for (T t : values) { + v.add(t); + } + return v.elements(); + } + + private static class TestClassLoader extends ClassLoader { + private final URL url; + + TestClassLoader(URL url) { + super(null); + this.url = url; + } + + @Override + public Class findClass(String name) throws ClassNotFoundException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + byte[] buf = new byte[1024]; + + try { + InputStream is = getClass().getClassLoader().getResourceAsStream(name.replace('.', '/').concat(".class")); + int read; + while ((read = is.read(buf)) >= 0) { + baos.write(buf, 0, read); + } + } catch (IOException ioe) { + throw new ClassNotFoundException(); + } + + byte[] data = baos.toByteArray(); + return defineClass(name, data, 0, data.length); + } + + @Override + public URL getResource(String name) { + return url; + } + + @Override + public Enumeration getResources(String name) throws IOException { + return enumerationOf(url); + } + + } + + private static class FindNothingLoader extends ClassLoader { + @Override + public Class loadClass(String name) throws ClassNotFoundException { + throw new ClassNotFoundException(); + } + + @Override + public URL getResource(String name) { + return null; + } + + @Override + public Enumeration getResources(String name) throws IOException { + return new Vector().elements(); + } + } +} diff --git a/core/src/test/java/org/ehcache/core/util/IsCreated.java b/ehcache-core/src/test/java/org/ehcache/core/util/IsCreated.java similarity index 100% rename from core/src/test/java/org/ehcache/core/util/IsCreated.java rename to ehcache-core/src/test/java/org/ehcache/core/util/IsCreated.java diff --git a/core/src/test/java/org/ehcache/core/util/IsRemoved.java b/ehcache-core/src/test/java/org/ehcache/core/util/IsRemoved.java similarity index 100% rename from core/src/test/java/org/ehcache/core/util/IsRemoved.java rename to ehcache-core/src/test/java/org/ehcache/core/util/IsRemoved.java diff --git a/core/src/test/java/org/ehcache/core/util/IsUpdated.java b/ehcache-core/src/test/java/org/ehcache/core/util/IsUpdated.java similarity index 100% rename from core/src/test/java/org/ehcache/core/util/IsUpdated.java rename to ehcache-core/src/test/java/org/ehcache/core/util/IsUpdated.java diff --git a/ehcache-core/src/test/java/org/ehcache/core/util/Matchers.java b/ehcache-core/src/test/java/org/ehcache/core/util/Matchers.java new file mode 100644 index 0000000000..9ec3ee8769 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/util/Matchers.java @@ -0,0 +1,49 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.util; + +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +/** + * Matchers + */ +public class Matchers { + + public static Matcher> holding(final V value) { + return holding(equalTo(value)); + } + + public static Matcher> holding(final Matcher matcher) { + return new TypeSafeMatcher>() { + @Override + protected boolean matchesSafely(Supplier item) { + return matcher.matches(item.get()); + } + + @Override + public void describeTo(Description description) { + description.appendText("holder containing value ").appendDescriptionOf(matcher); + } + }; + } +} diff --git a/ehcache-core/src/test/java/org/ehcache/core/util/TestCacheConfig.java b/ehcache-core/src/test/java/org/ehcache/core/util/TestCacheConfig.java new file mode 100644 index 0000000000..7d7ec62d12 --- /dev/null +++ b/ehcache-core/src/test/java/org/ehcache/core/util/TestCacheConfig.java @@ -0,0 +1,262 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.core.util; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Eviction; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.FluentCacheConfigurationBuilder; +import org.ehcache.config.ResourcePools; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.service.ServiceConfiguration; + +import java.util.Collection; +import java.util.function.Predicate; +import java.util.function.UnaryOperator; + +import static java.util.Collections.emptyList; +import static org.ehcache.core.config.ExpiryUtils.convertToExpiry; +import static org.ehcache.core.config.ResourcePoolsHelper.createResourcePools; + +public class TestCacheConfig implements CacheConfiguration { + + private final Class keyType; + private final Class valueType; + private final ResourcePools resources; + + public TestCacheConfig(Class keyType, Class valueType) { + this(keyType, valueType, createResourcePools(100L)); + } + + public TestCacheConfig(Class keyType, Class valueType, ResourcePools resources) { + this.keyType = keyType; + this.valueType = valueType; + this.resources = resources; + } + + @Override + public Collection> getServiceConfigurations() { + return emptyList(); + } + + @Override + public Class getKeyType() { + return keyType; + } + + @Override + public Class getValueType() { + return valueType; + } + + @Override + public EvictionAdvisor getEvictionAdvisor() { + return Eviction.noAdvice(); + } + + @Override + public ClassLoader getClassLoader() { + return null; + } + + @Override @SuppressWarnings("deprecation") + public org.ehcache.expiry.Expiry getExpiry() { + return convertToExpiry(getExpiryPolicy()); + } + + @Override + public ExpiryPolicy getExpiryPolicy() { + return ExpiryPolicy.NO_EXPIRY; + } + + @Override + public ResourcePools getResourcePools() { + return resources; + } + + @Override + public Builder derive() { + return new Builder(); + } + + public class Builder implements FluentCacheConfigurationBuilder { + + @Override + public CacheConfiguration build() { + return TestCacheConfig.this; + } + + @Override + public > Collection getServices(Class configurationType) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withService(ServiceConfiguration config) { + throw new UnsupportedOperationException(); + } + + @Override + public > Builder withoutServices(Class clazz, Predicate predicate) { + throw new UnsupportedOperationException(); + } + + @Override + public > Builder updateServices(Class clazz, UnaryOperator update) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withEvictionAdvisor(EvictionAdvisor evictionAdvisor) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withClassLoader(ClassLoader classLoader) { + return new TestCacheConfig(getKeyType(), getValueType(), resources) { + @Override + public ClassLoader getClassLoader() { + return classLoader; + } + }.derive(); + } + + @Override + public Builder withDefaultClassLoader() { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withResourcePools(ResourcePools resourcePools) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder updateResourcePools(UnaryOperator update) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withExpiry(ExpiryPolicy expiry) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withLoaderWriter(CacheLoaderWriter loaderWriter) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withLoaderWriter(Class> cacheLoaderWriterClass, Object... arguments) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withoutLoaderWriter() { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withResilienceStrategy(ResilienceStrategy resilienceStrategy) { + throw new UnsupportedOperationException(); + } + + @Override @SuppressWarnings("rawtypes") + public Builder withResilienceStrategy(Class resilienceStrategyClass, Object... arguments) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withDefaultResilienceStrategy() { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withKeySerializingCopier() { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withValueSerializingCopier() { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withKeyCopier(Copier keyCopier) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withKeyCopier(Class> keyCopierClass) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withoutKeyCopier() { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withValueCopier(Copier valueCopier) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withValueCopier(Class> valueCopierClass) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withoutValueCopier() { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withKeySerializer(Serializer keySerializer) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withKeySerializer(Class> keySerializerClass) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withDefaultKeySerializer() { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withValueSerializer(Serializer valueSerializer) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withValueSerializer(Class> valueSerializerClass) { + throw new UnsupportedOperationException(); + } + + @Override + public Builder withDefaultValueSerializer() { + throw new UnsupportedOperationException(); + } + } +} diff --git a/ehcache-core/src/test/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory b/ehcache-core/src/test/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory new file mode 100644 index 0000000000..82823df336 --- /dev/null +++ b/ehcache-core/src/test/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory @@ -0,0 +1,24 @@ +# +# Copyright Terracotta, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.ehcache.core.spi.services.TestServiceFactory +org.ehcache.core.spi.services.TestProvidedServiceFactory +org.ehcache.core.spi.services.FancyCacheProviderFactory +org.ehcache.core.spi.services.TestMandatoryServiceFactory +org.ehcache.core.spi.services.ranking.HighRankServiceAFactory +org.ehcache.core.spi.services.ranking.MandatoryLowRankServiceAFactory +org.ehcache.core.spi.services.ranking.MandatoryHighRankServiceBFactory +org.ehcache.core.spi.services.ranking.LowRankServiceBFactory diff --git a/ehcache-impl/build.gradle b/ehcache-impl/build.gradle new file mode 100644 index 0000000000..3d338a724a --- /dev/null +++ b/ehcache-impl/build.gradle @@ -0,0 +1,85 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.internal-module' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 Implementation module' + description = 'The implementation module of Ehcache 3' + } +} + +sourceSets { + unsafe +} + +sourceSets { + slowTest { + java.srcDir 'src/slow-test/java' + resources.srcDir 'src/slow-test/resources' + compileClasspath += sourceSets.test.compileClasspath + runtimeClasspath += sourceSets.test.runtimeClasspath + } +} + +task slowTest(type: Test) { + testClassesDirs = sourceSets.slowTest.output.classesDirs + classpath += sourceSets.slowTest.runtimeClasspath + + binResultsDir file("$buildDir/slow-tests-results/binary/$name") + reports.junitXml.destination = file("$buildDir/slow-tests-results") + reports.html.destination = file("$buildDir/reports/slow-tests") +} + + +dependencies { + api project(':ehcache-core') + implementation group: 'org.terracotta', name: 'offheap-store', version: parent.offheapVersion + implementation group: 'org.ehcache', name: 'sizeof', version: parent.sizeofVersion + implementation group: 'org.terracotta', name: 'terracotta-utilities-tools', version: parent.terracottaUtilitiesVersion + compileOnly 'org.osgi:org.osgi.service.component.annotations:1.3.0' + testImplementation project(':core-spi-test') + testImplementation 'org.ow2.asm:asm:6.2' + testImplementation 'org.ow2.asm:asm-commons:6.2' + testImplementation ("org.terracotta:statistics:$parent.statisticVersion") + + unsafeImplementation project(':ehcache-api') + api files(sourceSets.unsafe.output.classesDirs) { + builtBy compileUnsafeJava + } +} + +jar { + from sourceSets.unsafe.output + from "$rootDir/NOTICE" + bnd ( + 'Export-Package': '!org.ehcache.impl.internal.*, org.ehcache.impl.*, org.ehcache.config.builders, ' + + 'org.ehcache.impl.internal.spi.loaderwriter', //ugly 107 induced internal export wart + 'Import-Package': '!sun.misc, !javax.annotation, *', + ) +} + +sourcesJar { + from sourceSets.unsafe.allSource +} + +compileUnsafeJava { + //no -Werror due to unsafe + options.compilerArgs = ['-Xlint:all'] +} diff --git a/impl/config/checkstyle-suppressions.xml b/ehcache-impl/config/checkstyle-suppressions.xml similarity index 100% rename from impl/config/checkstyle-suppressions.xml rename to ehcache-impl/config/checkstyle-suppressions.xml diff --git a/ehcache-impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java b/ehcache-impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java new file mode 100644 index 0000000000..f5b875f4b7 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java @@ -0,0 +1,600 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.config.builders; + +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.FluentCacheConfigurationBuilder; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.impl.config.BaseCacheConfiguration; +import org.ehcache.core.config.store.StoreEventSourceConfiguration; +import org.ehcache.core.spi.store.heap.SizeOfEngine; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.config.copy.DefaultCopierConfiguration; +import org.ehcache.impl.config.event.DefaultCacheEventDispatcherConfiguration; +import org.ehcache.impl.config.event.DefaultCacheEventListenerConfiguration; +import org.ehcache.impl.config.event.DefaultEventSourceConfiguration; +import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyConfiguration; +import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; +import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; +import org.ehcache.impl.copy.SerializingCopier; +import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; +import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.service.ServiceConfiguration; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.function.Predicate; +import java.util.function.Supplier; +import java.util.function.UnaryOperator; + +import static java.util.Objects.requireNonNull; +import static java.util.stream.Collectors.toList; +import static org.ehcache.core.config.ExpiryUtils.convertToExpiryPolicy; +import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE; +import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE; +import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_UNIT; + + +/** + * The {@code CacheConfigurationBuilder} enables building {@link CacheConfiguration}s using a fluent style. + *

                                  + * As with all Ehcache builders, all instances are immutable and calling any method on the builder will return a new + * instance without modifying the one on which the method was called. + * This enables the sharing of builder instances without any risk of seeing them modified by code elsewhere. + */ +public class CacheConfigurationBuilder implements FluentCacheConfigurationBuilder> { + + private final Collection> serviceConfigurations = new HashSet<>(); + private ExpiryPolicy expiry; + private ClassLoader classLoader = null; + private EvictionAdvisor evictionAdvisor; + private ResourcePools resourcePools; + private final Class keyType; + private final Class valueType; + + /** + * Creates a new instance ready to produce a {@link CacheConfiguration} with key type {@code } and with value type + * {@code } and which will use the {@link ResourcePools configured resources}. + * + * @param keyType the key type + * @param valueType the value type + * @param resourcePools the resources to use + * @param the key type + * @param the value type + * @return a {@code CacheConfigurationBuilder} + */ + public static CacheConfigurationBuilder newCacheConfigurationBuilder(Class keyType, Class valueType, ResourcePools resourcePools) { + return new CacheConfigurationBuilder<>(keyType, valueType, resourcePools); + } + + /** + * Creates a new instance ready to produce a {@link CacheConfiguration} with key type {@code } and with value type + * {@code } and which will use the {@link ResourcePools configured resources}, passed as a {@link ResourcePoolsBuilder}. + * + * @param keyType the key type + * @param valueType the value type + * @param resourcePoolsBuilder the resources to use, as a builder + * @param the key type + * @param the value type + * @return a {@code CacheConfigurationBuilder} + */ + public static CacheConfigurationBuilder newCacheConfigurationBuilder(Class keyType, Class valueType, Builder resourcePoolsBuilder) { + return new CacheConfigurationBuilder<>(keyType, valueType, resourcePoolsBuilder.build()); + } + + /** + * Creates a new instance ready to produce a {@link CacheConfiguration} functionally equivalent to the supplied configuration. + * + * @param configuration seed configuration + * @param the key type + * @param the value type + * @return a {@code CacheConfigurationBuilder} + */ + public static CacheConfigurationBuilder newCacheConfigurationBuilder(CacheConfiguration configuration) { + CacheConfigurationBuilder builder = newCacheConfigurationBuilder(configuration.getKeyType(), configuration.getValueType(), configuration.getResourcePools()) + .withEvictionAdvisor(configuration.getEvictionAdvisor()) + .withExpiry(configuration.getExpiryPolicy()); + ClassLoader classLoader = configuration.getClassLoader(); + if (classLoader != null) { + builder = builder.withClassLoader(classLoader); + } + for (ServiceConfiguration serviceConfig : configuration.getServiceConfigurations()) { + builder = builder.withService(serviceConfig); + } + return builder; + } + + private CacheConfigurationBuilder(Class keyType, Class valueType, ResourcePools resourcePools) { + this.keyType = keyType; + this.valueType = valueType; + this.resourcePools = resourcePools; + } + + private CacheConfigurationBuilder(CacheConfigurationBuilder other) { + this.keyType = other.keyType; + this.valueType = other.valueType; + this.expiry = other.expiry; + this.classLoader = other.classLoader; + this.evictionAdvisor = other.evictionAdvisor; + this.resourcePools = other.resourcePools; + this.serviceConfigurations.addAll(other.serviceConfigurations); + } + + /** + * Adds a {@link ServiceConfiguration} to the returned builder. + * + * @param configuration the service configuration to add + * @return a new builder with the added service configuration + * @deprecated in favor of {@link #withService(ServiceConfiguration)} + */ + @Deprecated + public CacheConfigurationBuilder add(ServiceConfiguration configuration) { + if (!getServices(configuration.getClass()).isEmpty()) { + if (configuration instanceof DefaultCopierConfiguration + || configuration instanceof DefaultSerializerConfiguration + || configuration instanceof DefaultCacheEventListenerConfiguration) { + return withService(configuration); + } else { + throw new IllegalStateException("Cannot add a generic service configuration when another one already exists. " + + "Rely on specific with* methods or make sure your remove other configuration first."); + } + } + return withService(configuration); + } + + /** + * Convenience method to add a {@link ServiceConfiguration} that is produced by a {@link Builder}. + * + * @param configurationBuilder the service configuration to add, {@link Builder#build()} will be called on it + * @return a new builder with the added service configuration + * + * @see #add(ServiceConfiguration) + * @deprecated in favor of {@link #withService(Builder)} + */ + @Deprecated + public CacheConfigurationBuilder add(Builder> configurationBuilder) { + return add(configurationBuilder.build()); + } + + @Override + public > Collection getServices(Class configurationType) throws IllegalArgumentException { + return serviceConfigurations.stream().filter(configurationType::isInstance).map(configurationType::cast).collect(toList()); + } + + @Override + public CacheConfigurationBuilder withService(ServiceConfiguration config) { + CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); + otherBuilder.serviceConfigurations.removeIf(other -> !other.compatibleWith(config) || !config.compatibleWith(other)); + otherBuilder.serviceConfigurations.add(config); + return otherBuilder; + } + + @Override + public CacheConfigurationBuilder withoutServices(Class> clazz) { + CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); + otherBuilder.serviceConfigurations.removeIf(clazz::isInstance); + return otherBuilder; + } + + @Override + public >CacheConfigurationBuilder withoutServices(Class clazz, Predicate predicate) { + CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); + otherBuilder.serviceConfigurations.removeIf(c -> clazz.isInstance(c) && predicate.test(clazz.cast(c))); + return otherBuilder; + } + + @Override + public > CacheConfigurationBuilder updateServices(Class clazz, UnaryOperator update) { + Collection> existing = getServices(clazz); + + if (existing.isEmpty()) { + throw new IllegalStateException("Cannot update service configurations. No existing services of type: " + clazz); + } else { + CacheConfigurationBuilder otherBuilder = withoutServices(clazz); + for (ServiceConfiguration configuration : existing) { + ServiceConfiguration replacement = configuration.build(update.apply(configuration.derive())); + if (replacement == null) { + throw new NullPointerException(configuration.getClass().getSimpleName() + ".build(...) returned a null configuration instance"); + } else { + otherBuilder = otherBuilder.withService(replacement); + } + } + return otherBuilder; + } + } + + @Override + public CacheConfigurationBuilder withEvictionAdvisor(final EvictionAdvisor evictionAdvisor) { + CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); + otherBuilder.evictionAdvisor = evictionAdvisor; + return otherBuilder; + } + + /** + * Removes a {@link ServiceConfiguration} from the returned builder. + * + * @param configuration the service configuration to remove + * @return a new builder without the specified configuration + * @deprecated in favor of {@link #withoutServices(Class)} or {@link #withoutServices(Class, Predicate)} + */ + @Deprecated + public CacheConfigurationBuilder remove(ServiceConfiguration configuration) { + CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); + otherBuilder.serviceConfigurations.remove(configuration); + return otherBuilder; + } + + /** + * Clears all {@link ServiceConfiguration}s from the returned builder. + * + * @return a new builder with no service configurations left + * @deprecated in favor of {@link #withoutServices(Class) withoutServices(ServiceConfiguration.class)} + */ + @Deprecated @SuppressWarnings("unchecked") + public CacheConfigurationBuilder clearAllServiceConfig() { + return withoutServices((Class) ServiceConfiguration.class); + } + + /** + * Returns the first {@link ServiceConfiguration} with type matching the class passed in. + * + * @param clazz the service configuration class + * @param the type of the service configuration + * @return a matching service configuration, or {@code null} if none can be found + * @deprecated in favor of {@link #getService(Class)} + */ + @Deprecated + public > T getExistingServiceConfiguration(Class clazz) { + Iterator iterator = getServices(clazz).iterator(); + return iterator.hasNext() ? iterator.next() : null; + } + + /** + * Returns all {@link ServiceConfiguration}s of type matching the class passed in. + * + * @param clazz the service configuration class + * @param the type of the service configuration + * @return a list with service configurations + * @deprecated in favor of {@link #getServices(Class)} + */ + @Deprecated + public > List getExistingServiceConfigurations(Class clazz) { + return new ArrayList<>(getServices(clazz)); + } + + @Override + public CacheConfigurationBuilder withClassLoader(ClassLoader classLoader) { + CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); + otherBuilder.classLoader = requireNonNull(classLoader); + return otherBuilder; + } + + @Override + public CacheConfigurationBuilder withDefaultClassLoader() { + CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); + otherBuilder.classLoader = null; + return otherBuilder; + } + + @Override + public CacheConfigurationBuilder withResourcePools(ResourcePools resourcePools) { + if (resourcePools == null) { + throw new NullPointerException("Null resource pools"); + } + CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); + otherBuilder.resourcePools = resourcePools; + return otherBuilder; + } + + @Override + public CacheConfigurationBuilder updateResourcePools(UnaryOperator update) { + CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); + otherBuilder.resourcePools = update.apply(resourcePools); + return otherBuilder; + } + + /** + * Adds {@link org.ehcache.expiry.Expiry} configuration to the returned builder. + *

                                  + * {@code Expiry} is what controls data freshness in a cache. + * + * @param expiry the expiry to use + * @return a new builder with the added expiry + * + * @deprecated Use {@link #withExpiry(ExpiryPolicy)} instead + */ + @Deprecated + public CacheConfigurationBuilder withExpiry(org.ehcache.expiry.Expiry expiry) { + return withExpiry(convertToExpiryPolicy(requireNonNull(expiry, "Null expiry"))); + } + + @Override + public CacheConfigurationBuilder withExpiry(ExpiryPolicy expiry) { + if (expiry == null) { + throw new NullPointerException("Null expiry"); + } + CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); + otherBuilder.expiry = expiry; + return otherBuilder; + } + + /** + * Indicates whether this builder has configured expiry or not. + * + * @return {@code true} if expiry configured, {@code false} otherwise + */ + public boolean hasConfiguredExpiry() { + return expiry != null; + } + + @Override + public CacheConfigurationBuilder withLoaderWriter(CacheLoaderWriter loaderWriter) { + return withService(new DefaultCacheLoaderWriterConfiguration(requireNonNull(loaderWriter, "Null loaderWriter"))); + } + + @Override + public CacheConfigurationBuilder withLoaderWriter(Class> loaderWriterClass, Object... arguments) { + return withService(new DefaultCacheLoaderWriterConfiguration(requireNonNull(loaderWriterClass, "Null loaderWriterClass"), arguments)); + } + + @Override + public CacheConfigurationBuilder withoutLoaderWriter() { + return withoutServices(DefaultCacheLoaderWriterConfiguration.class); + } + + @Override + public CacheConfigurationBuilder withResilienceStrategy(ResilienceStrategy resilienceStrategy) { + return withService(new DefaultResilienceStrategyConfiguration(requireNonNull(resilienceStrategy, "Null resilienceStrategy"))); + } + + @Override @SuppressWarnings("rawtypes") + public CacheConfigurationBuilder withResilienceStrategy(Class resilienceStrategyClass, Object... arguments) { + return withService(new DefaultResilienceStrategyConfiguration(requireNonNull(resilienceStrategyClass, "Null resilienceStrategyClass"), arguments)); + } + + @Override + public CacheConfigurationBuilder withDefaultResilienceStrategy() { + return withoutServices(DefaultResilienceStrategyConfiguration.class); + } + + @Override + public CacheConfigurationBuilder withKeySerializingCopier() { + return withKeyCopier(SerializingCopier.asCopierClass()); + } + + @Override + public CacheConfigurationBuilder withValueSerializingCopier() { + return withValueCopier(SerializingCopier.asCopierClass()); + } + + @Override + public CacheConfigurationBuilder withKeyCopier(Copier keyCopier) { + return withService(new DefaultCopierConfiguration<>(requireNonNull(keyCopier, "Null key copier"), DefaultCopierConfiguration.Type.KEY)); + } + + @Override + public CacheConfigurationBuilder withKeyCopier(Class> keyCopierClass) { + return withService(new DefaultCopierConfiguration<>(requireNonNull(keyCopierClass, "Null key copier class"), DefaultCopierConfiguration.Type.KEY)); + } + + @Override + public CacheConfigurationBuilder withoutKeyCopier() { + return withoutServices(DefaultCopierConfiguration.class, c -> DefaultCopierConfiguration.Type.KEY.equals(c.getType())); + } + + @Override + public CacheConfigurationBuilder withValueCopier(Copier valueCopier) { + return withService(new DefaultCopierConfiguration<>(requireNonNull(valueCopier, "Null value copier"), DefaultCopierConfiguration.Type.VALUE)); + } + + @Override + public CacheConfigurationBuilder withValueCopier(Class> valueCopierClass) { + return withService(new DefaultCopierConfiguration<>(requireNonNull(valueCopierClass, "Null value copier class"), DefaultCopierConfiguration.Type.VALUE)); + } + + @Override + public CacheConfigurationBuilder withoutValueCopier() { + return withoutServices(DefaultCopierConfiguration.class, c -> DefaultCopierConfiguration.Type.VALUE.equals(c.getType())); + } + + @Override + public CacheConfigurationBuilder withKeySerializer(Serializer keySerializer) { + return withService(new DefaultSerializerConfiguration<>(requireNonNull(keySerializer, "Null key serializer"), DefaultSerializerConfiguration.Type.KEY)); + } + + @Override + public CacheConfigurationBuilder withKeySerializer(Class> keySerializerClass) { + return withService(new DefaultSerializerConfiguration<>(requireNonNull(keySerializerClass, "Null key serializer class"), DefaultSerializerConfiguration.Type.KEY)); + } + + @Override + public CacheConfigurationBuilder withDefaultKeySerializer() { + return withoutServices(DefaultSerializerConfiguration.class, config -> DefaultSerializerConfiguration.Type.KEY.equals(config.getType())); + } + + @Override + public CacheConfigurationBuilder withValueSerializer(Serializer valueSerializer) { + return withService(new DefaultSerializerConfiguration<>(requireNonNull(valueSerializer, "Null value serializer"), DefaultSerializerConfiguration.Type.VALUE)); + } + + @Override + public CacheConfigurationBuilder withValueSerializer(Class> valueSerializerClass) { + return withService(new DefaultSerializerConfiguration<>(requireNonNull(valueSerializerClass, "Null value serializer class"), DefaultSerializerConfiguration.Type.VALUE)); + } + + @Override + public CacheConfigurationBuilder withDefaultValueSerializer() { + return withoutServices(DefaultSerializerConfiguration.class, config -> DefaultSerializerConfiguration.Type.VALUE.equals(config.getType())); + } + + /** + * Adds {@link StoreEventSourceConfiguration} with the specified dispatcher concurrency + * to the configured builder. + * + * @param dispatcherConcurrency the level of concurrency in the dispatcher for ordered events + * @return a new builder with the added configuration + * + * @see #withDefaultDispatcherConcurrency() + */ + public CacheConfigurationBuilder withDispatcherConcurrency(int dispatcherConcurrency) { + return withService(new DefaultEventSourceConfiguration(dispatcherConcurrency)); + } + + /** + * Restores the default dispatcher concurrency. + * + * @return a new builder with the default dispatcher concurrency + * + * @see #withDispatcherConcurrency(int) + */ + public CacheConfigurationBuilder withDefaultDispatcherConcurrency() { + return withoutServices(DefaultEventSourceConfiguration.class); + } + + /** + * Adds a {@link ServiceConfiguration} for the {@link org.ehcache.core.events.CacheEventDispatcherFactory} specifying + * the thread pool alias to use. + * + * @param threadPoolAlias the thread pool alias to use + * @return a new builder with the added configuration + * + * @see #withDefaultEventListenersThreadPool() + */ + public CacheConfigurationBuilder withEventListenersThreadPool(String threadPoolAlias) { + return withService(new DefaultCacheEventDispatcherConfiguration(threadPoolAlias)); + } + + /** + * Restores the default event listener thread pool settings. + * + * @return a new builder with the default event listener thread pool settings + * + * @see #withEventListenersThreadPool(String) + */ + public CacheConfigurationBuilder withDefaultEventListenersThreadPool() { + return withoutServices(DefaultCacheEventDispatcherConfiguration.class); + } + + /** + * Adds a {@link ServiceConfiguration} for the {@link org.ehcache.impl.internal.store.disk.OffHeapDiskStore.Provider} + * indicating thread pool alias and write concurrency. + * + * @param threadPoolAlias the thread pool alias + * @param concurrency the write concurrency + * @return a new builder with the added configuration + * + * @see #withDefaultDiskStoreThreadPool() + */ + public CacheConfigurationBuilder withDiskStoreThreadPool(String threadPoolAlias, int concurrency) { + return installOrUpdate( + () -> new OffHeapDiskStoreConfiguration(threadPoolAlias, concurrency), + existing -> new OffHeapDiskStoreConfiguration(threadPoolAlias, concurrency, existing.getDiskSegments()) + ); + } + + /** + * Restores the default disk store thread pool settings. + * + * @return a new builder with the default disk store thread pool settings + * + * @see #withDiskStoreThreadPool(String, int) + */ + public CacheConfigurationBuilder withDefaultDiskStoreThreadPool() { + return withoutServices(OffHeapDiskStoreConfiguration.class); + } + + /** + * Adds or updates the {@link DefaultSizeOfEngineConfiguration} with the specified object graph maximum size to the configured + * builder. + *

                                  + * {@link SizeOfEngine} is what enables the heap tier to be sized in {@link MemoryUnit}. + * + * @param size the maximum graph size + * @return a new builder with the added / updated configuration + * + * @see #withSizeOfMaxObjectSize(long, MemoryUnit) + * @see #withDefaultSizeOfSettings() + */ + public CacheConfigurationBuilder withSizeOfMaxObjectGraph(long size) { + return installOrUpdate( + () -> new DefaultSizeOfEngineConfiguration(DEFAULT_MAX_OBJECT_SIZE, DEFAULT_UNIT, size), + existing -> new DefaultSizeOfEngineConfiguration(existing.getMaxObjectSize(), existing.getUnit(), size) + ); + } + + /** + * Adds or updates the {@link DefaultSizeOfEngineConfiguration} with the specified maximum mapping size to the configured + * builder. + *

                                  + * {@link SizeOfEngine} is what enables the heap tier to be sized in {@link MemoryUnit}. + * + * @param size the maximum mapping size + * @param unit the memory unit + * @return a new builder with the added / updated configuration + * + * @see #withSizeOfMaxObjectGraph(long) + * @see #withDefaultSizeOfSettings() + */ + public CacheConfigurationBuilder withSizeOfMaxObjectSize(long size, MemoryUnit unit) { + return installOrUpdate( + () -> new DefaultSizeOfEngineConfiguration(size, unit, DEFAULT_OBJECT_GRAPH_SIZE), + existing -> new DefaultSizeOfEngineConfiguration(size, unit, existing.getMaxObjectGraphSize()) + ); + } + + /** + * Restores the default size-of settings. + * + * @return a new builder with the default size-of settings + * + * @see #withSizeOfMaxObjectGraph(long) + * @see #withSizeOfMaxObjectSize(long, MemoryUnit) + */ + public CacheConfigurationBuilder withDefaultSizeOfSettings() { + return withoutServices(DefaultSizeOfEngineConfiguration.class); + } + + @Override + public CacheConfiguration build() { + return new BaseCacheConfiguration<>(keyType, valueType, evictionAdvisor, + classLoader, expiry, resourcePools, + serviceConfigurations.toArray(new ServiceConfiguration[serviceConfigurations.size()])); + } + + private > CacheConfigurationBuilder installOrUpdate(Supplier supplier, UnaryOperator update) { + C newConfig = supplier.get(); + + @SuppressWarnings("unchecked") + Class configType = (Class) newConfig.getClass(); + if (getServices(configType).isEmpty()) { + return withService(newConfig); + } else { + return updateServices(configType, update); + } + } + +} diff --git a/impl/src/main/java/org/ehcache/config/builders/CacheEventListenerConfigurationBuilder.java b/ehcache-impl/src/main/java/org/ehcache/config/builders/CacheEventListenerConfigurationBuilder.java similarity index 99% rename from impl/src/main/java/org/ehcache/config/builders/CacheEventListenerConfigurationBuilder.java rename to ehcache-impl/src/main/java/org/ehcache/config/builders/CacheEventListenerConfigurationBuilder.java index 7db9fe8c33..6a77388099 100644 --- a/impl/src/main/java/org/ehcache/config/builders/CacheEventListenerConfigurationBuilder.java +++ b/ehcache-impl/src/main/java/org/ehcache/config/builders/CacheEventListenerConfigurationBuilder.java @@ -35,7 +35,7 @@ * instance without modifying the one on which the method was called. * This enables the sharing of builder instances without any risk of seeing them modified by code elsewhere. */ -public class CacheEventListenerConfigurationBuilder implements Builder { +public class CacheEventListenerConfigurationBuilder implements Builder> { private EventOrdering eventOrdering; private EventFiring eventFiringMode; private Object[] listenerArguments = new Object[0]; diff --git a/ehcache-impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java b/ehcache-impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java new file mode 100644 index 0000000000..72af567224 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java @@ -0,0 +1,381 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.config.builders; + +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.config.FluentConfigurationBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.EhcacheManager; +import org.ehcache.core.spi.store.heap.SizeOfEngine; +import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; +import org.ehcache.impl.config.event.CacheEventDispatcherFactoryConfiguration; +import org.ehcache.impl.config.loaderwriter.writebehind.WriteBehindProviderConfiguration; +import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; +import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; +import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; +import org.ehcache.impl.config.store.disk.OffHeapDiskStoreProviderConfiguration; +import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +import java.io.File; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Supplier; +import java.util.function.UnaryOperator; + +import static java.util.Collections.emptySet; +import static java.util.Collections.unmodifiableSet; +import static org.ehcache.config.builders.ConfigurationBuilder.newConfigurationBuilder; +import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE; +import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE; +import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_UNIT; + +/** + * The {@code CacheManagerBuilder} enables building cache managers using a fluent style. + *

                                  + * As with all Ehcache builders, all instances are immutable and calling any method on the builder will return a new + * instance without modifying the one on which the method was called. + * This enables the sharing of builder instances without any risk of seeing them modified by code elsewhere. + */ +public class CacheManagerBuilder implements Builder { + + private final FluentConfigurationBuilder configBuilder; + private final Set services; + + /** + * Builds a {@link CacheManager} or a subtype of it and initializes it if requested. + * + * @param init whether the returned {@code CacheManager} is to be initialized or not + * @return a {@code CacheManager} or a subtype of it + */ + public T build(final boolean init) { + final T cacheManager = newCacheManager(services, configBuilder.build()); + if(init) { + cacheManager.init(); + } + return cacheManager; + } + + /** + * Builds a {@link CacheManager} or a subtype of it uninitialized. + * + * @return a {@code CacheManager} or a subtype of it uninitialized + */ + @Override + public T build() { + return build(false); + } + + private CacheManagerBuilder() { + this.configBuilder = newConfigurationBuilder(); + this.services = emptySet(); + } + + private CacheManagerBuilder(CacheManagerBuilder builder, Set services) { + this.configBuilder = builder.configBuilder; + this.services = unmodifiableSet(services); + } + + private CacheManagerBuilder(CacheManagerBuilder builder, FluentConfigurationBuilder configBuilder) { + this.configBuilder = configBuilder; + this.services = builder.services; + } + + /** + * Creates a new {@link CacheManager} based on the provided configuration. + * The returned {@code CacheManager} is uninitialized. + * + * @param configuration the configuration to use + * @return a {@code CacheManager} + */ + public static CacheManager newCacheManager(final Configuration configuration) { + return new EhcacheManager(configuration); + } + + T newCacheManager(Collection services, final Configuration configuration) { + final EhcacheManager ehcacheManager = new EhcacheManager(configuration, services); + return cast(ehcacheManager); + } + + @SuppressWarnings("unchecked") + T cast(EhcacheManager ehcacheManager) { + return (T) ehcacheManager; + } + + /** + * Adds a {@link CacheConfiguration} linked to the specified alias to the returned builder. + * + * @param alias the cache alias + * @param configuration the {@code CacheConfiguration} + * @param the cache key type + * @param the cache value type + * @return a new builder with the added cache configuration + * + * @see CacheConfigurationBuilder + */ + public CacheManagerBuilder withCache(String alias, CacheConfiguration configuration) { + return new CacheManagerBuilder<>(this, configBuilder.withCache(alias, configuration)); + } + + /** + * Convenience method to add a {@link CacheConfiguration} linked to the specified alias to the returned builder by + * building it from the provided {@link Builder}. + * + * @param alias the cache alias + * @param configurationBuilder the {@code Builder} to get {@code CacheConfiguration} from + * @param the cache key type + * @param the cache value type + * @return a new builder with the added cache configuration + * + * @see CacheConfigurationBuilder + */ + public CacheManagerBuilder withCache(String alias, Builder> configurationBuilder) { + return withCache(alias, configurationBuilder.build()); + } + + /** + * Specializes the returned {@link CacheManager} subtype through a specific {@link CacheManagerConfiguration} which + * will optionally add configurations to the returned builder. + * + * @param cfg the {@code CacheManagerConfiguration} to use + * @param the subtype of {@code CacheManager} + * @return a new builder ready to build a more specific subtype of cache manager + * + * @see #persistence(String) + * @see PersistentCacheManager + * @see CacheManagerPersistenceConfiguration + */ + public CacheManagerBuilder with(CacheManagerConfiguration cfg) { + return cfg.builder(this); + } + + /** + * Convenience method to specialize the returned {@link CacheManager} subtype through a {@link CacheManagerConfiguration} + * built using the provided {@link Builder}. + * + * @param cfgBuilder the {@code Builder} to get the {@code CacheManagerConfiguration} from + * @return a new builder ready to build a more specific subtype of cache manager + * + * @see CacheConfigurationBuilder + */ + public CacheManagerBuilder with(Builder> cfgBuilder) { + return with(cfgBuilder.build()); + } + + /** + * Adds a {@link Service} instance to the returned builder. + *

                                  + * The service instance will be used by the constructed {@link CacheManager}. + * + * @param service the {@code Service} to add + * @return a new builder with the added service + */ + public CacheManagerBuilder using(Service service) { + Set newServices = new HashSet<>(services); + newServices.add(service); + return new CacheManagerBuilder<>(this, newServices); + } + + /** + * Adds a default {@link Copier} for the specified type to the returned builder. + * + * @param clazz the {@code Class} for which the copier is + * @param copier the {@code Copier} instance + * @param the type which can be copied + * @return a new builder with the added default copier + */ + public CacheManagerBuilder withCopier(Class clazz, Class> copier) { + return ensureThenUpdate(DefaultCopyProviderConfiguration::new, existing -> existing.addCopierFor(clazz, copier, true)); + } + + /** + * Adds a default {@link Serializer} for the specified type to the returned builder. + * + * @param clazz the {@code Class} for which the serializer is + * @param serializer the {@code Serializer} instance + * @param the type which can be serialized + * @return a new builder with the added default serializer + */ + public CacheManagerBuilder withSerializer(Class clazz, Class> serializer) { + return ensureThenUpdate(DefaultSerializationProviderConfiguration::new, config -> config.addSerializerFor(clazz, serializer, true)); + } + + /** + * Adds a default {@link SizeOfEngine} configuration, that limits the max object graph to + * size, to the returned builder. + * + * @param size the max object graph size + * @return a new builder with the added configuration + */ + public CacheManagerBuilder withDefaultSizeOfMaxObjectGraph(long size) { + return ensureThenUpdate( + () -> new DefaultSizeOfEngineProviderConfiguration(DEFAULT_MAX_OBJECT_SIZE, DEFAULT_UNIT, DEFAULT_OBJECT_GRAPH_SIZE), + existing -> new DefaultSizeOfEngineProviderConfiguration(existing.getMaxObjectSize(), existing.getUnit(), size) + ); + } + + /** + * Adds a default {@link SizeOfEngine} configuration, that limits the max object size, to + * the returned builder. + * + * @param size the max object size + * @param unit the max object size unit + * @return a new builder with the added configuration + */ + public CacheManagerBuilder withDefaultSizeOfMaxObjectSize(long size, MemoryUnit unit) { + return ensureThenUpdate( + () -> new DefaultSizeOfEngineProviderConfiguration(DEFAULT_MAX_OBJECT_SIZE, DEFAULT_UNIT, DEFAULT_OBJECT_GRAPH_SIZE), + existing -> new DefaultSizeOfEngineProviderConfiguration(size, unit, existing.getMaxObjectGraphSize()) + ); + } + + /** + * Adds a {@link WriteBehindProviderConfiguration}, that specifies the thread pool to use, to the returned builder. + * + * @param threadPoolAlias the thread pool alias + * @return a new builder with the added configuration + * + * @see PooledExecutionServiceConfigurationBuilder + */ + public CacheManagerBuilder withDefaultWriteBehindThreadPool(String threadPoolAlias) { + return using(new WriteBehindProviderConfiguration(threadPoolAlias)); + } + + /** + * Adds a {@link OffHeapDiskStoreProviderConfiguration}, that specifies the thread pool to use, to the returned + * builder. + * + * @param threadPoolAlias the thread pool alias + * @return a new builder with the added configuration + * + * @see PooledExecutionServiceConfigurationBuilder + */ + public CacheManagerBuilder withDefaultDiskStoreThreadPool(String threadPoolAlias) { + return using(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias)); + } + + /** + * Adds a {@link CacheEventDispatcherFactoryConfiguration}, that specifies the thread pool to use, to the returned + * builder. + * + * @param threadPoolAlias the thread pool alias + * @return a new builder with the added configuration + * + * @see PooledExecutionServiceConfigurationBuilder + */ + public CacheManagerBuilder withDefaultEventListenersThreadPool(String threadPoolAlias) { + return using(new CacheEventDispatcherFactoryConfiguration(threadPoolAlias)); + } + + /** + * Adds a {@link ServiceCreationConfiguration} to the returned builder. + *

                                  + * These configurations are used to load services and configure them at creation time. This method will remove any + * existing configuration incompatible with the given configuration, before adding the new configuration. + * + * @param serviceConfiguration the {@code ServiceCreationConfiguration} to use + * @return a new builder with the added configuration + * @see FluentConfigurationBuilder#withService(ServiceCreationConfiguration) + */ + public CacheManagerBuilder using(ServiceCreationConfiguration serviceConfiguration) { + return new CacheManagerBuilder<>(this, configBuilder.withService(serviceConfiguration)); + } + + /** + * Replaces an existing {@link ServiceCreationConfiguration} of the same type on the returned builder. + *

                                  + * Duplicate service creation configuration will cause a cache manager to fail to initialize. + * + * @param overwriteServiceConfiguration the new {@code ServiceCreationConfiguration} to use + * @return a new builder with the replaced configuration + * + * @deprecated in favor of {@link #using(ServiceCreationConfiguration)} whose refined contract matches this one + */ + @Deprecated + public CacheManagerBuilder replacing(ServiceCreationConfiguration overwriteServiceConfiguration) { + return using(overwriteServiceConfiguration); + } + + /** + * Adds a {@link ClassLoader}, to use for non Ehcache types, to the returned builder + * + * @param classLoader the class loader to use + * @return a new builder with the added class loader + */ + public CacheManagerBuilder withClassLoader(ClassLoader classLoader) { + return new CacheManagerBuilder<>(this, configBuilder.withClassLoader(classLoader)); + } + + private > CacheManagerBuilder ensureThenUpdate(Supplier supplier, UnaryOperator update) { + C emptyConfig = supplier.get(); + @SuppressWarnings("unchecked") + Class configType = (Class) emptyConfig.getClass(); + + FluentConfigurationBuilder fluentBuilder = configBuilder; + if (configBuilder.getService(configType) == null) { + fluentBuilder = fluentBuilder.withService(emptyConfig); + } + + return new CacheManagerBuilder<>(this, fluentBuilder.updateServices(configType, update)); + } + + /** + * Creates a new {@code CacheManagerBuilder} + * + * @return the cache manager builder + */ + public static CacheManagerBuilder newCacheManagerBuilder() { + return new CacheManagerBuilder<>(); + } + + /** + * Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual + * level of persistence is configured on the disk resource pool per cache. + * + * @param rootDirectory the root directory to use for disk storage + * @return a {@code CacheManagerConfiguration} + * + * @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean) + * @see #with(CacheManagerConfiguration) + * @see PersistentCacheManager + */ + public static CacheManagerConfiguration persistence(String rootDirectory) { + return persistence(new File(rootDirectory)); + } + + /** + * Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual + * level of persistence is configured on the disk resource pool per cache. + * + * @param rootDirectory the root directory to use for disk storage + * @return a {@code CacheManagerConfiguration} + * + * @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean) + * @see #with(CacheManagerConfiguration) + * @see PersistentCacheManager + */ + public static CacheManagerConfiguration persistence(File rootDirectory) { + return new CacheManagerPersistenceConfiguration(rootDirectory); + } +} diff --git a/impl/src/main/java/org/ehcache/config/builders/CacheManagerConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/config/builders/CacheManagerConfiguration.java similarity index 100% rename from impl/src/main/java/org/ehcache/config/builders/CacheManagerConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/config/builders/CacheManagerConfiguration.java diff --git a/ehcache-impl/src/main/java/org/ehcache/config/builders/ConfigurationBuilder.java b/ehcache-impl/src/main/java/org/ehcache/config/builders/ConfigurationBuilder.java new file mode 100644 index 0000000000..bc10763298 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/config/builders/ConfigurationBuilder.java @@ -0,0 +1,169 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.config.builders; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.core.config.CoreConfigurationBuilder; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; + +/** + * The {@code ConfigurationBuilder} enables building {@link Configuration}s using a fluent style. + * + * @author Alex Snaps + */ +public final class ConfigurationBuilder extends CoreConfigurationBuilder { + + /** + * Create a new 'empty' configuration builder. + * + * @return a new empty configuration builder + */ + public static ConfigurationBuilder newConfigurationBuilder() { + return new ConfigurationBuilder(); + } + + /** + * Create a configuration builder seeded from the given configuration. + *

                                  + * Calling {@link #build()} on the returned builder will produce a functionally equivalent configuration to + * {@code seed}. + * + * @param seed configuration to duplicate + * @return a new configuration builder + */ + public static ConfigurationBuilder newConfigurationBuilder(Configuration seed) { + return new ConfigurationBuilder(new ConfigurationBuilder(new ConfigurationBuilder(new ConfigurationBuilder(), + seed.getCacheConfigurations()), seed.getServiceCreationConfigurations()), seed.getClassLoader()); + } + + protected ConfigurationBuilder() { + super(); + } + + protected ConfigurationBuilder(ConfigurationBuilder builder, Map> caches) { + super(builder, caches); + } + + protected ConfigurationBuilder(ConfigurationBuilder builder, Collection> serviceConfigurations) { + super(builder, serviceConfigurations); + } + + protected ConfigurationBuilder(ConfigurationBuilder builder, ClassLoader classLoader) { + super(builder, classLoader); + } + + /** + * Add a cache configuration with the given alias. + *

                                  + * If a cache with the given alias already exists then an {@code IllegalArgumentException} will be thrown. + * + * @param alias cache alias to be added + * @param config cache configuration + * @return an updated builder + * @deprecated in favor of {@link #withCache(String, CacheConfiguration)} + */ + @Deprecated + public ConfigurationBuilder addCache(String alias, CacheConfiguration config) throws IllegalArgumentException { + CacheConfiguration existing = getCache(alias); + if (existing == null) { + return withCache(alias, config); + } else { + throw new IllegalArgumentException("Cache '" + alias + "' already exists: " + existing); + } + } + + /** + * Removes the cache with the given alias. + * + * @param alias cache alias to be removed + * @return an updated builder + * @deprecated in favor of {@link #withoutCache(String)} + */ + @Deprecated + public ConfigurationBuilder removeCache(String alias) { + return withoutCache(alias); + } + + /** + * Adds the given service to this configuration. + *

                                  + * If a a service creation configuration of the same concrete type is already present then an {@code IllegalArgumentException} + * will be thrown. + * + * @param serviceConfiguration service creation configuration + * @return an updated builder + * @deprecated in favor of {@link #withService(ServiceCreationConfiguration)} + */ + @Deprecated + public ConfigurationBuilder addService(ServiceCreationConfiguration serviceConfiguration) { + ServiceCreationConfiguration existing = getService(serviceConfiguration.getClass()); + if (existing == null) { + return withService(serviceConfiguration); + } else { + throw new IllegalArgumentException("There is already an instance of " + serviceConfiguration.getClass() + " registered: " + existing.getClass()); + } + } + + /** + * Removes the given service configuration. + * + * @param serviceConfiguration service creation configuration + * @return an updated builder + * @deprecated in favor of {@link #withoutServices(Class)} or {@link #withoutServices(Class, Predicate)} + */ + @Deprecated + public ConfigurationBuilder removeService(ServiceCreationConfiguration serviceConfiguration) { + @SuppressWarnings("unchecked") + List> newServiceConfigurations = new ArrayList>(getServices((Class) ServiceCreationConfiguration.class)); + newServiceConfigurations.remove(serviceConfiguration); + return new ConfigurationBuilder(this, newServiceConfigurations); + } + + /** + * Returns {@code true} if a cache configuration is associated with the given alias. + * + * @param alias cache configuration alias + * @return {@code true} if the given alias is present + * @deprecated in favor of {@link #getCache(String)} + */ + @Deprecated + public boolean containsCache(String alias) { + return getCache(alias) != null; + } + + @Override + protected ConfigurationBuilder newBuilderWith(Map> caches) { + return new ConfigurationBuilder(this, caches); + } + + @Override + protected ConfigurationBuilder newBuilderWith(Collection> serviceConfigurations) { + return new ConfigurationBuilder(this, serviceConfigurations); + } + + @Override + protected ConfigurationBuilder newBuilderWith(ClassLoader classLoader) { + return new ConfigurationBuilder(this, classLoader); + } +} diff --git a/ehcache-impl/src/main/java/org/ehcache/config/builders/ExpiryPolicyBuilder.java b/ehcache-impl/src/main/java/org/ehcache/config/builders/ExpiryPolicyBuilder.java new file mode 100644 index 0000000000..9d1a4b7006 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/config/builders/ExpiryPolicyBuilder.java @@ -0,0 +1,285 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.config.builders; + +import org.ehcache.config.Builder; +import org.ehcache.expiry.ExpiryPolicy; + +import java.time.Duration; +import java.util.Objects; +import java.util.function.BiFunction; +import java.util.function.Supplier; + +/** + * Builder and utilities for getting predefined {@link ExpiryPolicy} instances. + */ +public final class ExpiryPolicyBuilder implements Builder>{ + + /** + * Get an {@link ExpiryPolicy} instance for a non expiring (ie. "eternal") cache. + * + * @return the no expiry instance + */ + public static ExpiryPolicy noExpiration() { + return ExpiryPolicy.NO_EXPIRY; + } + + /** + * Get a time-to-live (TTL) {@link ExpiryPolicy} instance for the given {@link Duration}. + * + * @param timeToLive the TTL duration + * @return a TTL expiry + */ + public static ExpiryPolicy timeToLiveExpiration(Duration timeToLive) { + Objects.requireNonNull(timeToLive, "TTL duration cannot be null"); + if (timeToLive.isNegative()) { + throw new IllegalArgumentException("TTL duration cannot be negative"); + } + return new TimeToLiveExpiryPolicy(timeToLive); + } + + /** + * Get a time-to-idle (TTI) {@link ExpiryPolicy} instance for the given {@link Duration}. + * + * @param timeToIdle the TTI duration + * @return a TTI expiry + */ + public static ExpiryPolicy timeToIdleExpiration(Duration timeToIdle) { + Objects.requireNonNull(timeToIdle, "TTI duration cannot be null"); + if (timeToIdle.isNegative()) { + throw new IllegalArgumentException("TTI duration cannot be negative"); + } + return new TimeToIdleExpiryPolicy(timeToIdle); + } + + @FunctionalInterface + public interface TriFunction { + /** + * Applies this function to the given arguments. + * + * @param t the first function argument + * @param u the second function argument + * @param v the third function argument + * @return the function result + */ + R apply(T t, U u, V v); + } + + /** + * Fluent API for creating an {@link ExpiryPolicy} instance where you can specify constant values for creation, access and update time. + * Unspecified values will be set to {@link ExpiryPolicy#INFINITE INFINITE} for create and {@code null} for access and update, matching + * the {@link #noExpiration()} no expiration} expiry. + * + * @return an {@link ExpiryPolicy} builder + */ + public static ExpiryPolicyBuilder expiry() { + return new ExpiryPolicyBuilder<>(); + } + + private final BiFunction create; + private final BiFunction, Duration> access; + private final TriFunction, ? super V, Duration> update; + + private ExpiryPolicyBuilder() { + this((k, v) -> ExpiryPolicy.INFINITE, (k, v) -> null, (k, oldV, newV) -> null); + } + + private ExpiryPolicyBuilder(BiFunction create, + BiFunction, Duration> access, + TriFunction, ? super V, Duration> update) { + this.create = create; + this.access = access; + this.update = update; + } + + /** + * Set TTL since creation. + *

                                  + * Note: Calling this method on a builder with an existing TTL since creation will override the previous value or function. + * + * @param create TTL since creation + * @return a new builder with the TTL since creation + */ + public ExpiryPolicyBuilder create(Duration create) { + Objects.requireNonNull(create, "Create duration cannot be null"); + if (create.isNegative()) { + throw new IllegalArgumentException("Create duration must be positive"); + } + return create((a, b) -> create); + } + + /** + * Set a function giving the TTL since creation. + *

                                  + * Note: Calling this method on a builder with an existing TTL since creation will override the previous value or function. + * + * @param create Function giving the TTL since creation + * @return a new builder with the TTL creation calculation function + */ + public ExpiryPolicyBuilder create(BiFunction create) { + return new ExpiryPolicyBuilder<>(Objects.requireNonNull(create), access, update); + } + + /** + * Set TTI since last access. + *

                                  + * Note: Calling this method on a builder with an existing TTI since last access will override the previous value or function. + * + * @param access TTI since last access + * @return a new builder with the TTI since last access + */ + public ExpiryPolicyBuilder access(Duration access) { + if (access != null && access.isNegative()) { + throw new IllegalArgumentException("Access duration must be positive"); + } + return access((a, b) -> access); + } + + /** + * Set a function giving the TTI since last access. + *

                                  + * Note: Calling this method on a builder with an existing TTI since last access will override the previous value or function. + * + * @param access Function giving the TTI since last access + * @return a new builder with the TTI since last access calculation function + */ + public ExpiryPolicyBuilder access(BiFunction, Duration> access) { + return new ExpiryPolicyBuilder<>(create, Objects.requireNonNull(access), update); + } + + /** + * Set TTL since last update. + *

                                  + * Note: Calling this method on a builder with an existing TTL since last access will override the previous value or function. + * + * @param update TTL since last update + * @return a new builder with the TTL since last update + */ + public ExpiryPolicyBuilder update(Duration update) { + if (update != null && update.isNegative()) { + throw new IllegalArgumentException("Update duration must be positive"); + } + return update((a, b, c) -> update); + } + + /** + * Set a function giving the TTL since last update. + *

                                  + * Note: Calling this method on a builder with an existing TTL since last update will override the previous value or function. + * + * @param update Function giving the TTL since last update + * @return a new builder with the TTL since last update calculation function + */ + public ExpiryPolicyBuilder update(TriFunction, V2, Duration> update) { + return new ExpiryPolicyBuilder<>(create, access, Objects.requireNonNull(update)); + } + + /** + * Builds an expiry policy instance. + * + * @return an {@link ExpiryPolicy} + */ + public ExpiryPolicy build() { + return new BaseExpiryPolicy<>(create, access, update); + } + + /** + * Simple implementation of the {@link ExpiryPolicy} interface allowing to set constants to each expiry types. + */ + private static class BaseExpiryPolicy implements ExpiryPolicy { + + private final BiFunction create; + private final BiFunction, Duration> access; + private final TriFunction, ? super V, Duration> update; + + BaseExpiryPolicy(BiFunction create, + BiFunction, Duration> access, + TriFunction, ? super V, Duration> update) { + this.create = create; + this.access = access; + this.update = update; + } + + @Override + public Duration getExpiryForCreation(K key, V value) { + return create.apply(key, value); + } + + @Override + public Duration getExpiryForAccess(K key, Supplier value) { + return access.apply(key, value); + } + + @Override + public Duration getExpiryForUpdate(K key, Supplier oldValue, V newValue) { + return update.apply(key, oldValue, newValue); + } + } + + private static final class TimeToLiveExpiryPolicy extends BaseExpiryPolicy { + private final Duration ttl; + + TimeToLiveExpiryPolicy(Duration ttl) { + super( + (a, b) -> ttl, + (a, b) -> null, + (a, b, c) -> ttl); + this.ttl = ttl; + } + + @Override + public int hashCode() { + return ttl.hashCode(); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof TimeToLiveExpiryPolicy && ttl.equals(((TimeToLiveExpiryPolicy) obj).ttl); + } + + @Override + public String toString() { + return "TTL of " + ttl; + } + } + + private static final class TimeToIdleExpiryPolicy extends BaseExpiryPolicy { + private final Duration tti; + + TimeToIdleExpiryPolicy(Duration tti) { + super( + (a, b) -> tti, + (a, b) -> tti, + (a, b, c) -> tti); + this.tti = tti; + } + + @Override + public int hashCode() { + return tti.hashCode(); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof TimeToIdleExpiryPolicy && tti.equals(((TimeToIdleExpiryPolicy) obj).tti); + } + + @Override + public String toString() { + return "TTI of " + tti; + } + } +} diff --git a/impl/src/main/java/org/ehcache/config/builders/PooledExecutionServiceConfigurationBuilder.java b/ehcache-impl/src/main/java/org/ehcache/config/builders/PooledExecutionServiceConfigurationBuilder.java similarity index 83% rename from impl/src/main/java/org/ehcache/config/builders/PooledExecutionServiceConfigurationBuilder.java rename to ehcache-impl/src/main/java/org/ehcache/config/builders/PooledExecutionServiceConfigurationBuilder.java index 861ba37519..3920d29a24 100644 --- a/impl/src/main/java/org/ehcache/config/builders/PooledExecutionServiceConfigurationBuilder.java +++ b/ehcache-impl/src/main/java/org/ehcache/config/builders/PooledExecutionServiceConfigurationBuilder.java @@ -43,6 +43,16 @@ private PooledExecutionServiceConfigurationBuilder(PooledExecutionServiceConfigu this.pools.addAll(other.pools); } + private PooledExecutionServiceConfigurationBuilder(PooledExecutionServiceConfiguration seed) { + seed.getPoolConfigurations().forEach((alias, config) -> { + Pool pool = new Pool(alias, config.minSize(), config.maxSize()); + if (alias.equals(seed.getDefaultPoolAlias())) { + defaultPool = pool; + } + pools.add(pool); + }); + } + /** * Creates a new instance of {@code PooledExecutionServiceConfigurationBuilder} * @@ -52,6 +62,15 @@ public static PooledExecutionServiceConfigurationBuilder newPooledExecutionServi return new PooledExecutionServiceConfigurationBuilder(); } + /** + * Creates a seeded instance of {@code PooledExecutionServiceConfigurationBuilder} + * + * @return the builder + */ + public static PooledExecutionServiceConfigurationBuilder newPooledExecutionServiceConfigurationBuilder(PooledExecutionServiceConfiguration seed) { + return new PooledExecutionServiceConfigurationBuilder(seed); + } + /** * Adds a default pool configuration to the returned builder. * @@ -99,9 +118,9 @@ public PooledExecutionServiceConfiguration build() { private static class Pool { - private String alias; - private int minSize; - private int maxSize; + private final String alias; + private final int minSize; + private final int maxSize; Pool(String alias, int minSize, int maxSize) { this.alias = alias; diff --git a/impl/src/main/java/org/ehcache/config/builders/ResourcePoolsBuilder.java b/ehcache-impl/src/main/java/org/ehcache/config/builders/ResourcePoolsBuilder.java similarity index 96% rename from impl/src/main/java/org/ehcache/config/builders/ResourcePoolsBuilder.java rename to ehcache-impl/src/main/java/org/ehcache/config/builders/ResourcePoolsBuilder.java index 0a2a32a347..3e3d595c5b 100644 --- a/impl/src/main/java/org/ehcache/config/builders/ResourcePoolsBuilder.java +++ b/ehcache-impl/src/main/java/org/ehcache/config/builders/ResourcePoolsBuilder.java @@ -20,9 +20,9 @@ import org.ehcache.config.ResourcePool; import org.ehcache.config.SizedResourcePool; import org.ehcache.config.units.EntryUnit; -import org.ehcache.core.config.SizedResourcePoolImpl; +import org.ehcache.impl.config.SizedResourcePoolImpl; import org.ehcache.config.ResourcePools; -import org.ehcache.core.config.ResourcePoolsImpl; +import org.ehcache.impl.config.ResourcePoolsImpl; import org.ehcache.config.ResourceType; import org.ehcache.config.ResourceUnit; import org.ehcache.config.units.MemoryUnit; @@ -32,7 +32,7 @@ import static java.util.Collections.unmodifiableMap; import java.util.HashMap; -import static org.ehcache.core.config.ResourcePoolsImpl.validateResourcePools; +import static org.ehcache.impl.config.ResourcePoolsImpl.validateResourcePools; /** * The {@code ResourcePoolsBuilder} enables building {@link ResourcePools} configurations using a fluent style. @@ -46,7 +46,7 @@ public class ResourcePoolsBuilder implements Builder { private final Map, ResourcePool> resourcePools; private ResourcePoolsBuilder() { - this(Collections., ResourcePool>emptyMap()); + this(Collections.emptyMap()); } private ResourcePoolsBuilder(Map, ResourcePool> resourcePools) { diff --git a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java b/ehcache-impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java similarity index 88% rename from impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java rename to ehcache-impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java index 9f5c311a3e..a22f9b78ff 100644 --- a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java +++ b/ehcache-impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java @@ -27,36 +27,40 @@ import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.Ehcache; -import org.ehcache.core.EhcacheWithLoaderWriter; import org.ehcache.core.InternalCache; import org.ehcache.core.PersistentUserManagedEhcache; -import org.ehcache.core.config.BaseCacheConfiguration; +import org.ehcache.impl.config.BaseCacheConfiguration; +import org.ehcache.core.config.ExpiryUtils; import org.ehcache.core.events.CacheEventDispatcher; import org.ehcache.core.events.CacheEventListenerConfiguration; import org.ehcache.core.events.CacheEventListenerProvider; -import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.core.internal.store.StoreSupport; -import org.ehcache.core.internal.util.ClassLoading; +import org.ehcache.core.spi.ServiceLocator; +import org.ehcache.core.resilience.DefaultRecoveryStore; import org.ehcache.core.spi.LifeCycled; import org.ehcache.core.spi.LifeCycledAdapter; import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.heap.SizeOfEngine; import org.ehcache.core.spi.store.heap.SizeOfEngineProvider; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.core.store.StoreSupport; +import org.ehcache.core.util.ClassLoading; import org.ehcache.event.CacheEventListener; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; +import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.impl.events.CacheEventDispatcherImpl; import org.ehcache.impl.internal.events.DisabledCacheEventNotificationService; +import org.ehcache.impl.internal.resilience.RobustLoaderWriterResilienceStrategy; +import org.ehcache.impl.internal.resilience.RobustResilienceStrategy; import org.ehcache.impl.internal.spi.event.DefaultCacheEventListenerProvider; import org.ehcache.spi.copy.Copier; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.persistence.PersistableResourceService; +import org.ehcache.spi.resilience.ResilienceStrategy; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; @@ -74,11 +78,12 @@ import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import static org.ehcache.config.ResourceType.Core.DISK; import static org.ehcache.config.ResourceType.Core.OFFHEAP; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE; @@ -107,8 +112,8 @@ public class UserManagedCacheBuilder> imp private final Class valueType; private String id; private final Set services = new HashSet<>(); - private final Set> serviceCreationConfigurations = new HashSet<>(); - private Expiry expiry = Expirations.noExpiration(); + private final Set> serviceCreationConfigurations = new HashSet<>(); + private ExpiryPolicy expiry = ExpiryPolicy.NO_EXPIRY; private ClassLoader classLoader = ClassLoading.getDefaultClassLoader(); private EvictionAdvisor evictionAdvisor; private CacheLoaderWriter cacheLoaderWriter; @@ -121,7 +126,7 @@ public class UserManagedCacheBuilder> imp private Serializer keySerializer; private Serializer valueSerializer; private int dispatcherConcurrency = 4; - private List eventListenerConfigurations = new ArrayList<>(); + private List> eventListenerConfigurations = new ArrayList<>(); private ExecutorService unOrderedExecutor; private ExecutorService orderedExecutor; private long objectGraphSize = DEFAULT_OBJECT_GRAPH_SIZE; @@ -152,6 +157,7 @@ private UserManagedCacheBuilder(UserManagedCacheBuilder toCopy) { this.valueSerializer = toCopy.valueSerializer; this.useKeySerializingCopier = toCopy.useKeySerializingCopier; this.useValueSerializingCopier = toCopy.useValueSerializingCopier; + this.dispatcherConcurrency = toCopy.dispatcherConcurrency; this.eventListenerConfigurations = toCopy.eventListenerConfigurations; this.unOrderedExecutor = toCopy.unOrderedExecutor; this.orderedExecutor = toCopy.orderedExecutor; @@ -166,7 +172,7 @@ T build(ServiceLocator.DependencySet serviceLocatorBuilder) throws IllegalStateE ServiceLocator serviceLocator; try { - for (ServiceCreationConfiguration serviceCreationConfig : serviceCreationConfigurations) { + for (ServiceCreationConfiguration serviceCreationConfig : serviceCreationConfigurations) { serviceLocatorBuilder = serviceLocatorBuilder.with(serviceCreationConfig); } serviceLocatorBuilder = serviceLocatorBuilder.with(Store.Provider.class); @@ -176,7 +182,7 @@ T build(ServiceLocator.DependencySet serviceLocatorBuilder) throws IllegalStateE throw new IllegalStateException("UserManagedCacheBuilder failed to build.", e); } - List> serviceConfigsList = new ArrayList<>(); + List> serviceConfigsList = new ArrayList<>(); if (keyCopier != null) { serviceConfigsList.add(new DefaultCopierConfiguration<>(keyCopier, DefaultCopierConfiguration.Type.KEY)); @@ -232,7 +238,7 @@ public void close() throws Exception { serviceConfigsList.add(new DefaultSerializerConfiguration<>(this.valueSerializer, DefaultSerializerConfiguration.Type.VALUE)); } - ServiceConfiguration[] serviceConfigs = serviceConfigsList.toArray(new ServiceConfiguration[0]); + ServiceConfiguration[] serviceConfigs = serviceConfigsList.toArray(new ServiceConfiguration[0]); final SerializationProvider serialization = serviceLocator.getService(SerializationProvider.class); if (serialization != null) { try { @@ -270,21 +276,30 @@ public void close() throws Exception { } } - final Store.Provider storeProvider = StoreSupport.selectStoreProvider(serviceLocator, resources, serviceConfigsList); + if (cacheLoaderWriter != null) { + serviceConfigsList.add(new DefaultCacheLoaderWriterConfiguration(cacheLoaderWriter)); + } + + Store.Provider storeProvider = StoreSupport.selectWrapperStoreProvider(serviceLocator, serviceConfigsList); + if (storeProvider == null) { + storeProvider = StoreSupport.selectStoreProvider(serviceLocator, resources, serviceConfigsList); + } Store.Configuration storeConfig = new StoreConfigurationImpl<>(keyType, valueType, evictionAdvisor, classLoader, - expiry, resourcePools, dispatcherConcurrency, keySerializer, valueSerializer); - final Store store = storeProvider.createStore(storeConfig, serviceConfigs); + expiry, resourcePools, dispatcherConcurrency, keySerializer, valueSerializer, cacheLoaderWriter); + Store store = storeProvider.createStore(storeConfig, serviceConfigs); + + AtomicReference storeProviderRef = new AtomicReference<>(storeProvider); lifeCycledList.add(new LifeCycled() { @Override - public void init() throws Exception { - storeProvider.initStore(store); + public void init() { + storeProviderRef.get().initStore(store); } @Override - public void close() throws Exception { - storeProvider.releaseStore(store); + public void close() { + storeProviderRef.get().releaseStore(store); } }); @@ -293,6 +308,13 @@ public void close() throws Exception { } eventDispatcher.setStoreEventSource(store.getStoreEventSource()); + ResilienceStrategy resilienceStrategy; + if (cacheLoaderWriter == null) { + resilienceStrategy = new RobustResilienceStrategy<>(new DefaultRecoveryStore<>(store)); + } else { + resilienceStrategy = new RobustLoaderWriterResilienceStrategy<>(new DefaultRecoveryStore<>(store), cacheLoaderWriter); + } + if (persistent) { DiskResourceService diskResourceService = serviceLocator .getService(DiskResourceService.class); @@ -300,19 +322,14 @@ public void close() throws Exception { throw new IllegalStateException("No LocalPersistenceService could be found - did you configure one?"); } - PersistentUserManagedEhcache cache = new PersistentUserManagedEhcache<>(cacheConfig, store, diskResourceService, cacheLoaderWriter, eventDispatcher, id); + PersistentUserManagedEhcache cache = new PersistentUserManagedEhcache<>(cacheConfig, store, resilienceStrategy, diskResourceService, cacheLoaderWriter, eventDispatcher, id); registerListeners(cache, serviceLocator, lifeCycledList); for (LifeCycled lifeCycled : lifeCycledList) { cache.addHook(lifeCycled); } return cast(cache); } else { - final InternalCache cache; - if (cacheLoaderWriter == null) { - cache = new Ehcache<>(cacheConfig, store, eventDispatcher, getLoggerFor(Ehcache.class)); - } else { - cache = new EhcacheWithLoaderWriter<>(cacheConfig, store, cacheLoaderWriter, eventDispatcher, getLoggerFor(EhcacheWithLoaderWriter.class)); - } + InternalCache cache = new Ehcache<>(cacheConfig, store, resilienceStrategy, eventDispatcher, getLoggerFor(Ehcache.class)); registerListeners(cache, serviceLocator, lifeCycledList); for (LifeCycled lifeCycled : lifeCycledList) { (cache).addHook(lifeCycled); @@ -322,7 +339,7 @@ public void close() throws Exception { } - private Logger getLoggerFor(Class clazz) { + private Logger getLoggerFor(Class clazz) { String loggerName; if (id != null) { loggerName = clazz.getName() + "-" + id; @@ -349,14 +366,14 @@ private void registerListeners(Cache cache, ServiceProvider servi } else { listenerProvider = new DefaultCacheEventListenerProvider(); } - for (CacheEventListenerConfiguration config : eventListenerConfigurations) { + for (CacheEventListenerConfiguration config : eventListenerConfigurations) { final CacheEventListener listener = listenerProvider.createEventListener(id, config); if (listener != null) { cache.getRuntimeConfiguration().registerCacheEventListener(listener, config.orderingMode(), config.firingMode(), config.fireOn()); lifeCycledList.add(new LifeCycled() { @Override - public void init() throws Exception { + public void init() { } @@ -384,7 +401,7 @@ T cast(UserManagedCache cache) { * @throws IllegalStateException if the user managed cache cannot be built */ public final T build(final boolean init) throws IllegalStateException { - final T build = build(dependencySet().with(services)); + final T build = build(dependencySet().withoutMandatoryServices().with(services)); if (init) { build.init(); } @@ -445,12 +462,30 @@ public final UserManagedCacheBuilder withClassLoader(ClassLoader classL } /** - * Adds {@link Expiry} configuration to the returned builder. + * Adds {@link org.ehcache.expiry.Expiry} configuration to the returned builder. + * + * @param expiry the expiry to use + * @return a new builer with the added expiry + * + * @deprecated Use {@link #withExpiry(ExpiryPolicy)} instead + */ + @Deprecated + public final UserManagedCacheBuilder withExpiry(org.ehcache.expiry.Expiry expiry) { + if (expiry == null) { + throw new NullPointerException("Null expiry"); + } + UserManagedCacheBuilder otherBuilder = new UserManagedCacheBuilder<>(this); + otherBuilder.expiry = ExpiryUtils.convertToExpiryPolicy(expiry); + return otherBuilder; + } + + /** + * Adds {@link ExpiryPolicy} configuration to the returned builder. * * @param expiry the expiry to use * @return a new builer with the added expiry */ - public final UserManagedCacheBuilder withExpiry(Expiry expiry) { + public final UserManagedCacheBuilder withExpiry(ExpiryPolicy expiry) { if (expiry == null) { throw new NullPointerException("Null expiry"); } @@ -525,7 +560,7 @@ public final UserManagedCacheBuilder withEventListeners(CacheEventListe * @see #withEventExecutors(ExecutorService, ExecutorService) * @see #withEventListeners(CacheEventListenerConfigurationBuilder) */ - public final UserManagedCacheBuilder withEventListeners(CacheEventListenerConfiguration... cacheEventListenerConfigurations) { + public final UserManagedCacheBuilder withEventListeners(CacheEventListenerConfiguration ... cacheEventListenerConfigurations) { UserManagedCacheBuilder otherBuilder = new UserManagedCacheBuilder<>(this); otherBuilder.eventListenerConfigurations.addAll(Arrays.asList(cacheEventListenerConfigurations)); return otherBuilder; @@ -779,7 +814,7 @@ public UserManagedCacheBuilder using(Service service) { * * @see #using(Service) */ - public UserManagedCacheBuilder using(ServiceCreationConfiguration serviceConfiguration) { + public UserManagedCacheBuilder using(ServiceCreationConfiguration serviceConfiguration) { UserManagedCacheBuilder otherBuilder = new UserManagedCacheBuilder<>(this); if (serviceConfiguration instanceof DefaultSizeOfEngineProviderConfiguration) { removeAnySizeOfEngine(otherBuilder); @@ -788,7 +823,7 @@ public UserManagedCacheBuilder using(ServiceCreationConfiguration se return otherBuilder; } - private static void removeAnySizeOfEngine(UserManagedCacheBuilder builder) { + private static void removeAnySizeOfEngine(UserManagedCacheBuilder builder) { builder.services.remove(findSingletonAmongst(SizeOfEngineProvider.class, builder.services)); builder.serviceCreationConfigurations.remove(findSingletonAmongst(DefaultSizeOfEngineProviderConfiguration.class, builder.serviceCreationConfigurations)); } diff --git a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/config/builders/UserManagedCacheConfiguration.java similarity index 100% rename from impl/src/main/java/org/ehcache/config/builders/UserManagedCacheConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/config/builders/UserManagedCacheConfiguration.java diff --git a/impl/src/main/java/org/ehcache/config/builders/WriteBehindConfigurationBuilder.java b/ehcache-impl/src/main/java/org/ehcache/config/builders/WriteBehindConfigurationBuilder.java similarity index 97% rename from impl/src/main/java/org/ehcache/config/builders/WriteBehindConfigurationBuilder.java rename to ehcache-impl/src/main/java/org/ehcache/config/builders/WriteBehindConfigurationBuilder.java index 26d78846e0..612c2bc9c3 100644 --- a/impl/src/main/java/org/ehcache/config/builders/WriteBehindConfigurationBuilder.java +++ b/ehcache-impl/src/main/java/org/ehcache/config/builders/WriteBehindConfigurationBuilder.java @@ -31,7 +31,7 @@ * instance without modifying the one on which the method was called. * This enables the sharing of builder instances without any risk of seeing them modified by code elsewhere. */ -public abstract class WriteBehindConfigurationBuilder implements Builder { +public abstract class WriteBehindConfigurationBuilder implements Builder> { protected int concurrency = 1; protected int queueSize = Integer.MAX_VALUE; @@ -197,7 +197,7 @@ public BatchedWriteBehindConfigurationBuilder useThreadPool(String alias) { * @return the write behind configuration */ @Override - public WriteBehindConfiguration build() { + public WriteBehindConfiguration build() { return buildWith(new DefaultBatchingConfiguration(maxDelay, maxDelayUnit, batchSize, coalescing)); } } @@ -220,7 +220,7 @@ private UnBatchedWriteBehindConfigurationBuilder(UnBatchedWriteBehindConfigurati * @return the write behind configuration */ @Override - public WriteBehindConfiguration build() { + public WriteBehindConfiguration build() { return buildWith(null); } @@ -261,7 +261,7 @@ public UnBatchedWriteBehindConfigurationBuilder useThreadPool(String alias) { } } - WriteBehindConfiguration buildWith(BatchingConfiguration batching) { + WriteBehindConfiguration buildWith(BatchingConfiguration batching) { return new DefaultWriteBehindConfiguration(threadPoolAlias, concurrency, queueSize, batching); } diff --git a/impl/src/main/java/org/ehcache/config/builders/package-info.java b/ehcache-impl/src/main/java/org/ehcache/config/builders/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/config/builders/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/config/builders/package-info.java diff --git a/core/src/main/java/org/ehcache/core/config/AbstractResourcePool.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/AbstractResourcePool.java similarity index 98% rename from core/src/main/java/org/ehcache/core/config/AbstractResourcePool.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/AbstractResourcePool.java index 767590c879..465bcd203e 100644 --- a/core/src/main/java/org/ehcache/core/config/AbstractResourcePool.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/AbstractResourcePool.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.core.config; +package org.ehcache.impl.config; import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourceType; diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/config/BaseCacheConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/BaseCacheConfiguration.java new file mode 100644 index 0000000000..2b9949e977 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/BaseCacheConfiguration.java @@ -0,0 +1,152 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.core.config.ExpiryUtils; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.service.ServiceConfiguration; + +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; + +/** + * Base implementation of {@link CacheConfiguration}. + */ +public class BaseCacheConfiguration implements CacheConfiguration { + + private final Class keyType; + private final Class valueType; + private final EvictionAdvisor evictionAdvisor; + private final Collection> serviceConfigurations; + private final ClassLoader classLoader; + private final ExpiryPolicy expiry; + private final ResourcePools resourcePools; + + /** + * Creates a new {@code BaseCacheConfiguration} from the given parameters. + * + * @param keyType the key type + * @param valueType the value type + * @param evictionAdvisor the eviction advisor + * @param classLoader the class loader + * @param expiry the expiry policy + * @param resourcePools the resource pools + * @param serviceConfigurations the service configurations + */ + public BaseCacheConfiguration(Class keyType, Class valueType, + EvictionAdvisor evictionAdvisor, + ClassLoader classLoader, ExpiryPolicy expiry, + ResourcePools resourcePools, ServiceConfiguration... serviceConfigurations) { + if (keyType == null) { + throw new NullPointerException("keyType cannot be null"); + } + if (valueType == null) { + throw new NullPointerException("valueType cannot be null"); + } + if (resourcePools == null) { + throw new NullPointerException("resourcePools cannot be null"); + } + this.keyType = keyType; + this.valueType = valueType; + this.evictionAdvisor = evictionAdvisor; + this.classLoader = classLoader; + if (expiry != null) { + this.expiry = expiry; + } else { + this.expiry = ExpiryPolicy.NO_EXPIRY; + } + this.resourcePools = resourcePools; + this.serviceConfigurations = Collections.unmodifiableCollection(Arrays.asList(serviceConfigurations)); + } + + /** + * {@inheritDoc} + */ + @Override + public Collection> getServiceConfigurations() { + return serviceConfigurations; + } + + /** + * {@inheritDoc} + */ + @Override + public Class getKeyType() { + return keyType; + } + + /** + * {@inheritDoc} + */ + @Override + public Class getValueType() { + return valueType; + } + + /** + * {@inheritDoc} + */ + @Override + public EvictionAdvisor getEvictionAdvisor() { + return evictionAdvisor; + } + + /** + * {@inheritDoc} + */ + @Override + public ClassLoader getClassLoader() { + return classLoader; + } + + /** + * {@inheritDoc} + */ + @SuppressWarnings("deprecation") + @Override + public org.ehcache.expiry.Expiry getExpiry() { + return ExpiryUtils.convertToExpiry(expiry); + } + + /** + * {@inheritDoc} + */ + @Override + public ExpiryPolicy getExpiryPolicy() { + return expiry; + } + + /** + * {@inheritDoc} + */ + @Override + public ResourcePools getResourcePools() { + return resourcePools; + } + + @Override + public CacheConfigurationBuilder derive() { + return newCacheConfigurationBuilder(this); + } +} diff --git a/core/src/main/java/org/ehcache/core/config/ResourcePoolsImpl.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/ResourcePoolsImpl.java similarity index 90% rename from core/src/main/java/org/ehcache/core/config/ResourcePoolsImpl.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/ResourcePoolsImpl.java index 10e0b494e7..8906bcb5b6 100644 --- a/core/src/main/java/org/ehcache/core/config/ResourcePoolsImpl.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/ResourcePoolsImpl.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.core.config; +package org.ehcache.impl.config; import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourcePools; @@ -24,9 +24,6 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.EnumMap; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -69,25 +66,27 @@ public Set> getResourceTypeSet() { */ @Override public ResourcePools validateAndMerge(ResourcePools toBeUpdated) { + Set> resourceTypeSet = toBeUpdated.getResourceTypeSet(); + // Ensure update pool types already exist in existing pools - if(!getResourceTypeSet().containsAll(toBeUpdated.getResourceTypeSet())) { + if(!getResourceTypeSet().containsAll(resourceTypeSet)) { throw new IllegalArgumentException("Pools to be updated cannot contain previously undefined resources pools"); } // Can not update OFFHEAP - if(toBeUpdated.getResourceTypeSet().contains(ResourceType.Core.OFFHEAP)) { + if(resourceTypeSet.contains(ResourceType.Core.OFFHEAP)) { throw new UnsupportedOperationException("Updating OFFHEAP resource is not supported"); } // Can not update DISK - if(toBeUpdated.getResourceTypeSet().contains(ResourceType.Core.DISK)) { + if(resourceTypeSet.contains(ResourceType.Core.DISK)) { throw new UnsupportedOperationException("Updating DISK resource is not supported"); } - for(ResourceType currentResourceType : toBeUpdated.getResourceTypeSet()) { + for(ResourceType currentResourceType : resourceTypeSet) { getPoolForResource(currentResourceType).validateUpdate(toBeUpdated.getPoolForResource(currentResourceType)); } - Map, ResourcePool> poolsMap = new HashMap<>(); + Map, ResourcePool> poolsMap = new HashMap<>(pools.size() + resourceTypeSet.size()); poolsMap.putAll(pools); - for(ResourceType currentResourceType : toBeUpdated.getResourceTypeSet()) { + for(ResourceType currentResourceType : resourceTypeSet) { ResourcePool poolForResource = toBeUpdated.getPoolForResource(currentResourceType); poolsMap.put(currentResourceType, poolForResource); } @@ -107,9 +106,9 @@ public static void validateResourcePools(Collection pool ordered.add((SizedResourcePool)pool); } } - Collections.sort(ordered, (o1, o2) -> { + ordered.sort((o1, o2) -> { int retVal = o2.getType().getTierHeight() - o1.getType().getTierHeight(); - if(retVal == 0) { + if (retVal == 0) { return o1.toString().compareTo(o2.toString()); } else { return retVal; diff --git a/core/src/main/java/org/ehcache/core/config/SizedResourcePoolImpl.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/SizedResourcePoolImpl.java similarity index 98% rename from core/src/main/java/org/ehcache/core/config/SizedResourcePoolImpl.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/SizedResourcePoolImpl.java index 60942b37c3..100ba3c23c 100644 --- a/core/src/main/java/org/ehcache/core/config/SizedResourcePoolImpl.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/SizedResourcePoolImpl.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.core.config; +package org.ehcache.impl.config; import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourceType; diff --git a/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopierConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopierConfiguration.java similarity index 82% rename from impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopierConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopierConfiguration.java index 2ff4e161c5..00705e60f3 100644 --- a/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopierConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopierConfiguration.java @@ -25,10 +25,13 @@ * {@link ServiceConfiguration} for the default {@link CopyProvider} implementation. *

                                  * Enables configuring a {@link Copier} for the key or value of a given cache. + *

                                  + * This class overrides the default {@link ServiceConfiguration#compatibleWith(ServiceConfiguration)} implementation + * to allow for independent configuration of the key and value copiers. * * @param the type which the configured copier can handle */ -public class DefaultCopierConfiguration extends ClassInstanceConfiguration> implements ServiceConfiguration { +public class DefaultCopierConfiguration extends ClassInstanceConfiguration> implements ServiceConfiguration { private final Type type; @@ -67,6 +70,15 @@ public Class getServiceType() { return CopyProvider.class; } + @Override + public boolean compatibleWith(ServiceConfiguration other) { + if (other instanceof DefaultCopierConfiguration) { + return !getType().equals(((DefaultCopierConfiguration) other).getType()); + } else { + return ServiceConfiguration.super.compatibleWith(other); + } + } + /** * Returns the {@link Type} of this configuration * diff --git a/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java similarity index 87% rename from impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java index 350fcf0665..db7658d6e2 100644 --- a/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java @@ -16,7 +16,6 @@ package org.ehcache.impl.config.copy; -import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; import org.ehcache.impl.internal.classes.ClassInstanceProviderConfiguration; import org.ehcache.spi.copy.Copier; import org.ehcache.spi.copy.CopyProvider; @@ -28,7 +27,7 @@ * Enables configuring {@link Class} - {@link Copier} pairs that will be selected unless cache level configurations * are provided. */ -public class DefaultCopyProviderConfiguration extends ClassInstanceProviderConfiguration, Copier> implements ServiceCreationConfiguration { +public class DefaultCopyProviderConfiguration extends ClassInstanceProviderConfiguration, DefaultCopierConfiguration> implements ServiceCreationConfiguration { /** * Default constructor. @@ -93,9 +92,17 @@ public DefaultCopyProviderConfiguration addCopierFor(Class clazz, Class> configuration = (ClassInstanceConfiguration) new DefaultCopierConfiguration<>(copierClass); - getDefaults().put(clazz, configuration); + getDefaults().put(clazz, new DefaultCopierConfiguration<>(copierClass)); return this; } + + @Override + public DefaultCopyProviderConfiguration derive() { + return new DefaultCopyProviderConfiguration(this); + } + + @Override + public DefaultCopyProviderConfiguration build(DefaultCopyProviderConfiguration configuration) { + return configuration; + } } diff --git a/impl/src/main/java/org/ehcache/impl/config/copy/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/copy/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/config/copy/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/copy/package-info.java diff --git a/impl/src/main/java/org/ehcache/impl/config/event/CacheEventDispatcherFactoryConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/event/CacheEventDispatcherFactoryConfiguration.java similarity index 85% rename from impl/src/main/java/org/ehcache/impl/config/event/CacheEventDispatcherFactoryConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/event/CacheEventDispatcherFactoryConfiguration.java index ae6abc605c..486d5b2e76 100644 --- a/impl/src/main/java/org/ehcache/impl/config/event/CacheEventDispatcherFactoryConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/event/CacheEventDispatcherFactoryConfiguration.java @@ -25,7 +25,7 @@ * Enables configuring the default thread pool alias to be used by the * {@link org.ehcache.core.events.CacheEventDispatcher}s */ -public class CacheEventDispatcherFactoryConfiguration implements ServiceCreationConfiguration { +public class CacheEventDispatcherFactoryConfiguration implements ServiceCreationConfiguration { private final String threadPoolAlias; @@ -54,4 +54,14 @@ public String getThreadPoolAlias() { public Class getServiceType() { return CacheEventDispatcherFactory.class; } + + @Override + public String derive() { + return getThreadPoolAlias(); + } + + @Override + public CacheEventDispatcherFactoryConfiguration build(String alias) { + return new CacheEventDispatcherFactoryConfiguration(alias); + } } diff --git a/impl/src/main/java/org/ehcache/impl/config/event/DefaultCacheEventDispatcherConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/event/DefaultCacheEventDispatcherConfiguration.java similarity index 85% rename from impl/src/main/java/org/ehcache/impl/config/event/DefaultCacheEventDispatcherConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/event/DefaultCacheEventDispatcherConfiguration.java index 2bbc11369b..300142b82a 100644 --- a/impl/src/main/java/org/ehcache/impl/config/event/DefaultCacheEventDispatcherConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/event/DefaultCacheEventDispatcherConfiguration.java @@ -25,7 +25,7 @@ * Enables configuring the thread pool to be used by a {@link org.ehcache.core.events.CacheEventDispatcher} for * a given cache. */ -public class DefaultCacheEventDispatcherConfiguration implements ServiceConfiguration { +public class DefaultCacheEventDispatcherConfiguration implements ServiceConfiguration { private final String threadPoolAlias; @@ -54,4 +54,14 @@ public Class getServiceType() { public String getThreadPoolAlias() { return threadPoolAlias; } + + @Override + public String derive() { + return getThreadPoolAlias(); + } + + @Override + public DefaultCacheEventDispatcherConfiguration build(String alias) { + return new DefaultCacheEventDispatcherConfiguration(alias); + } } diff --git a/impl/src/main/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfiguration.java similarity index 90% rename from impl/src/main/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfiguration.java index cf3edd13d2..91e9720a54 100644 --- a/impl/src/main/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfiguration.java @@ -23,6 +23,7 @@ import org.ehcache.event.EventOrdering; import org.ehcache.event.EventType; import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; +import org.ehcache.spi.service.ServiceConfiguration; import java.util.EnumSet; import java.util.Set; @@ -31,9 +32,12 @@ * {@link org.ehcache.spi.service.ServiceConfiguration} for the default {@link CacheEventListenerProvider}. *

                                  * Enables configuring a {@link CacheEventListener} for a given cache. + *

                                  + * This class overrides the default {@link ServiceConfiguration#compatibleWith(ServiceConfiguration)} implementation + * to allow for the configuration of multiple cache event listeners on the same cache. */ public class DefaultCacheEventListenerConfiguration extends ClassInstanceConfiguration> - implements CacheEventListenerConfiguration { + implements CacheEventListenerConfiguration { private final EnumSet eventsToFireOn; private EventFiring eventFiringMode = EventFiring.ASYNCHRONOUS; @@ -131,4 +135,9 @@ public EventOrdering orderingMode() { public EnumSet fireOn() { return eventsToFireOn; } + + @Override + public boolean compatibleWith(ServiceConfiguration other) { + return true; + } } diff --git a/impl/src/main/java/org/ehcache/impl/config/event/DefaultEventSourceConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/event/DefaultEventSourceConfiguration.java similarity index 86% rename from impl/src/main/java/org/ehcache/impl/config/event/DefaultEventSourceConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/event/DefaultEventSourceConfiguration.java index 3238486a14..1c549db83e 100644 --- a/impl/src/main/java/org/ehcache/impl/config/event/DefaultEventSourceConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/event/DefaultEventSourceConfiguration.java @@ -23,7 +23,7 @@ * {@link org.ehcache.spi.service.ServiceConfiguration} for a {@link org.ehcache.core.spi.store.Store.Provider} * related to {@link org.ehcache.core.spi.store.events.StoreEvent}s. */ -public class DefaultEventSourceConfiguration implements StoreEventSourceConfiguration { +public class DefaultEventSourceConfiguration implements StoreEventSourceConfiguration { private final int dispatcherConcurrency; @@ -53,4 +53,14 @@ public int getDispatcherConcurrency() { public Class getServiceType() { return Store.Provider.class; } + + @Override + public Integer derive() { + return getDispatcherConcurrency(); + } + + @Override + public DefaultEventSourceConfiguration build(Integer concurrency) { + return new DefaultEventSourceConfiguration(concurrency); + } } diff --git a/impl/src/main/java/org/ehcache/impl/config/event/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/event/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/config/event/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/event/package-info.java diff --git a/impl/src/main/java/org/ehcache/impl/config/executor/PooledExecutionServiceConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/executor/PooledExecutionServiceConfiguration.java similarity index 83% rename from impl/src/main/java/org/ehcache/impl/config/executor/PooledExecutionServiceConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/executor/PooledExecutionServiceConfiguration.java index 37adf3c05f..1073f8630f 100644 --- a/impl/src/main/java/org/ehcache/impl/config/executor/PooledExecutionServiceConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/executor/PooledExecutionServiceConfiguration.java @@ -18,10 +18,13 @@ import java.util.HashMap; import java.util.Map; + +import org.ehcache.config.builders.PooledExecutionServiceConfigurationBuilder; import org.ehcache.core.spi.service.ExecutionService; import org.ehcache.spi.service.ServiceCreationConfiguration; import static java.util.Collections.unmodifiableMap; +import static org.ehcache.config.builders.PooledExecutionServiceConfigurationBuilder.newPooledExecutionServiceConfigurationBuilder; /** * {@link ServiceCreationConfiguration} for the pooled {@link ExecutionService} implementation. @@ -36,7 +39,7 @@ * @see org.ehcache.impl.config.store.disk.OffHeapDiskStoreProviderConfiguration * @see org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration */ -public class PooledExecutionServiceConfiguration implements ServiceCreationConfiguration { +public class PooledExecutionServiceConfiguration implements ServiceCreationConfiguration { private final Map poolConfigurations = new HashMap<>(); @@ -52,18 +55,20 @@ public class PooledExecutionServiceConfiguration implements ServiceCreationConfi * @param alias the pool alias * @param minSize the minimum size * @param maxSize the maximum size + * @return this configuration object with a new default pool * * @throws NullPointerException if alias is null * @throws IllegalArgumentException if another default was configured already or if another pool with the same * alias was configured already */ - public void addDefaultPool(String alias, int minSize, int maxSize) { + public PooledExecutionServiceConfiguration addDefaultPool(String alias, int minSize, int maxSize) { if (alias == null) { throw new NullPointerException("Pool alias cannot be null"); } if (defaultAlias == null) { addPool(alias, minSize, maxSize); defaultAlias = alias; + return this; } else { throw new IllegalArgumentException("'" + defaultAlias + "' is already configured as the default pool"); } @@ -75,11 +80,12 @@ public void addDefaultPool(String alias, int minSize, int maxSize) { * @param alias the pool alias * @param minSize the minimum size * @param maxSize the maximum size + * @return this configuration object with a new pool * * @throws NullPointerException if alias is null * @throws IllegalArgumentException if another pool with the same alias was configured already */ - public void addPool(String alias, int minSize, int maxSize) { + public PooledExecutionServiceConfiguration addPool(String alias, int minSize, int maxSize) { if (alias == null) { throw new NullPointerException("Pool alias cannot be null"); } @@ -87,6 +93,7 @@ public void addPool(String alias, int minSize, int maxSize) { throw new IllegalArgumentException("A pool with the alias '" + alias + "' is already configured"); } else { poolConfigurations.put(alias, new PoolConfiguration(minSize, maxSize)); + return this; } } @@ -116,6 +123,16 @@ public Class getServiceType() { return ExecutionService.class; } + @Override + public PooledExecutionServiceConfigurationBuilder derive() { + return newPooledExecutionServiceConfigurationBuilder(this); + } + + @Override + public PooledExecutionServiceConfiguration build(PooledExecutionServiceConfigurationBuilder builder) { + return builder.build(); + } + /** * Configuration class representing a pool configuration. */ diff --git a/impl/src/main/java/org/ehcache/impl/config/executor/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/executor/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/config/executor/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/executor/package-info.java diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterConfiguration.java new file mode 100644 index 0000000000..4902c4c564 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterConfiguration.java @@ -0,0 +1,63 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.loaderwriter; + +import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterConfiguration; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; +import org.ehcache.spi.service.ServiceConfiguration; + +/** +* {@link ServiceConfiguration} for the default {@link CacheLoaderWriterProvider}. +*/ +public class DefaultCacheLoaderWriterConfiguration extends ClassInstanceConfiguration> implements CacheLoaderWriterConfiguration { + + /** + * Creates a new configuration object with the specified {@link CacheLoaderWriter} class and associated constructor + * arguments. + * + * @param clazz the cache loader writer class + * @param arguments the constructor arguments + */ + public DefaultCacheLoaderWriterConfiguration(final Class> clazz, Object... arguments) { + super(clazz, arguments); + } + + /** + * Creates a new configuration with the specified {@link CacheLoaderWriter} instance. + * + * @param loaderWriter the cache loader writer + */ + public DefaultCacheLoaderWriterConfiguration(CacheLoaderWriter loaderWriter) { + super(loaderWriter); + } + + protected DefaultCacheLoaderWriterConfiguration(DefaultCacheLoaderWriterConfiguration configuration) { + super(configuration); + } + + @Override + public DefaultCacheLoaderWriterConfiguration derive() { + return new DefaultCacheLoaderWriterConfiguration(this); + } + + @Override + public DefaultCacheLoaderWriterConfiguration build(DefaultCacheLoaderWriterConfiguration configuration) { + return configuration; + } +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterProviderConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterProviderConfiguration.java new file mode 100644 index 0000000000..cf3b4bd744 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterProviderConfiguration.java @@ -0,0 +1,69 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.loaderwriter; + +import org.ehcache.impl.internal.classes.ClassInstanceProviderConfiguration; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +/** + * {@link ServiceCreationConfiguration} for the default {@link CacheLoaderWriterProvider}. + */ +public class DefaultCacheLoaderWriterProviderConfiguration extends ClassInstanceProviderConfiguration implements ServiceCreationConfiguration { + + public DefaultCacheLoaderWriterProviderConfiguration() { + super(); + } + + public DefaultCacheLoaderWriterProviderConfiguration(DefaultCacheLoaderWriterProviderConfiguration config) { + super(config); + } + + /** + * {@inheritDoc} + */ + @Override + public Class getServiceType() { + return CacheLoaderWriterProvider.class; + } + + /** + * Adds a default {@link CacheLoaderWriter} class and associated constuctor arguments to be used with a cache matching + * the provided alias. + * + * @param alias the cache alias + * @param clazz the cache loader writer class + * @param arguments the constructor arguments + * + * @return this configuration instance + */ + public DefaultCacheLoaderWriterProviderConfiguration addLoaderFor(String alias, Class> clazz, Object... arguments) { + getDefaults().put(alias, new DefaultCacheLoaderWriterConfiguration(clazz, arguments)); + return this; + } + + @Override + public DefaultCacheLoaderWriterProviderConfiguration derive() { + return new DefaultCacheLoaderWriterProviderConfiguration(this); + } + + @Override + public DefaultCacheLoaderWriterProviderConfiguration build(DefaultCacheLoaderWriterProviderConfiguration configuration) { + return configuration; + } +} diff --git a/impl/src/main/java/org/ehcache/impl/config/loaderwriter/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/config/loaderwriter/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/package-info.java diff --git a/impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/DefaultBatchingConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/DefaultBatchingConfiguration.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/DefaultBatchingConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/DefaultBatchingConfiguration.java diff --git a/impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/DefaultWriteBehindConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/DefaultWriteBehindConfiguration.java similarity index 98% rename from impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/DefaultWriteBehindConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/DefaultWriteBehindConfiguration.java index 57486009ea..7362998bae 100644 --- a/impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/DefaultWriteBehindConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/DefaultWriteBehindConfiguration.java @@ -22,7 +22,7 @@ /** * {@link org.ehcache.spi.service.ServiceConfiguration} for the default {@link WriteBehindProvider}. */ -public class DefaultWriteBehindConfiguration implements WriteBehindConfiguration { +public class DefaultWriteBehindConfiguration implements WriteBehindConfiguration { private final BatchingConfiguration batchingConfig; private final int concurrency; diff --git a/impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/WriteBehindProviderConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/WriteBehindProviderConfiguration.java similarity index 81% rename from impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/WriteBehindProviderConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/WriteBehindProviderConfiguration.java index 114f7dda21..1d116a1007 100644 --- a/impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/WriteBehindProviderConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/WriteBehindProviderConfiguration.java @@ -23,7 +23,7 @@ * * @author cdennis */ -public class WriteBehindProviderConfiguration implements ServiceCreationConfiguration { +public class WriteBehindProviderConfiguration implements ServiceCreationConfiguration { private final String threadPoolAlias; @@ -39,4 +39,14 @@ public String getThreadPoolAlias() { public Class getServiceType() { return WriteBehindProvider.class; } + + @Override + public String derive() { + return getThreadPoolAlias(); + } + + @Override + public WriteBehindProviderConfiguration build(String alias) { + return new WriteBehindProviderConfiguration(alias); + } } diff --git a/impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/loaderwriter/writebehind/package-info.java diff --git a/impl/src/main/java/org/ehcache/impl/config/persistence/CacheManagerPersistenceConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/persistence/CacheManagerPersistenceConfiguration.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/config/persistence/CacheManagerPersistenceConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/persistence/CacheManagerPersistenceConfiguration.java diff --git a/impl/src/main/java/org/ehcache/impl/config/persistence/DefaultPersistenceConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/persistence/DefaultPersistenceConfiguration.java similarity index 85% rename from impl/src/main/java/org/ehcache/impl/config/persistence/DefaultPersistenceConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/persistence/DefaultPersistenceConfiguration.java index 743e6c0022..76030889c9 100644 --- a/impl/src/main/java/org/ehcache/impl/config/persistence/DefaultPersistenceConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/persistence/DefaultPersistenceConfiguration.java @@ -24,7 +24,7 @@ /** * {@link ServiceCreationConfiguration} for the default {@link LocalPersistenceService}. */ -public class DefaultPersistenceConfiguration implements ServiceCreationConfiguration { +public class DefaultPersistenceConfiguration implements ServiceCreationConfiguration { private final File rootDirectory; @@ -53,4 +53,14 @@ public File getRootDirectory() { public Class getServiceType() { return LocalPersistenceService.class; } + + @Override + public File derive() { + return getRootDirectory(); + } + + @Override + public DefaultPersistenceConfiguration build(File file) { + return new DefaultPersistenceConfiguration(file); + } } diff --git a/impl/src/main/java/org/ehcache/impl/config/persistence/UserManagedPersistenceContext.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/persistence/UserManagedPersistenceContext.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/config/persistence/UserManagedPersistenceContext.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/persistence/UserManagedPersistenceContext.java diff --git a/impl/src/main/java/org/ehcache/impl/config/persistence/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/persistence/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/config/persistence/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/persistence/package-info.java diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/config/resilience/DefaultResilienceStrategyConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/resilience/DefaultResilienceStrategyConfiguration.java new file mode 100644 index 0000000000..4ba945686c --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/resilience/DefaultResilienceStrategyConfiguration.java @@ -0,0 +1,128 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.config.resilience; + +import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.RecoveryStore; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.ResilienceStrategyProvider; +import org.ehcache.spi.service.ServiceConfiguration; + +import java.util.Arrays; + +/** + * {@link ServiceConfiguration} for the default {@link ResilienceStrategyProvider}. + */ +public class DefaultResilienceStrategyConfiguration extends ClassInstanceConfiguration> implements ServiceConfiguration { + + /** + * Creates a resilience strategy configuration that instantiates instances of the given class on demand. + *

                                  + * The provided class must have a constructor compatible with the supplied arguments followed by either the cache's + * {@code RecoveryStore}, or the cache's {@code RecoveryStore} and {@code CacheLoaderWriter}. + * + * @param clazz resilience strategy type to use + * @param arguments initial constructor arguments + */ + @SuppressWarnings({"unchecked", "rawtypes"}) + public DefaultResilienceStrategyConfiguration(Class clazz, Object... arguments) { + super((Class>) clazz, arguments); + } + + /** + * Creates a resilience strategy configuration that uses the supplies instance. + * + * @param instance resilience strategy to use + */ + public DefaultResilienceStrategyConfiguration(ResilienceStrategy instance) { + super(instance); + } + + protected DefaultResilienceStrategyConfiguration(DefaultResilienceStrategyConfiguration configuration) { + super(configuration); + } + + @Override + public Class getServiceType() { + return ResilienceStrategyProvider.class; + } + + @Override + public DefaultResilienceStrategyConfiguration derive() { + return new DefaultResilienceStrategyConfiguration(this); + } + + @Override + public DefaultResilienceStrategyConfiguration build(DefaultResilienceStrategyConfiguration config) { + return config; + } + + /** + * Returns a configuration object bound to the given store and cache loader-writer. + * + * @param store store to bind to + * @param loaderWriter loader to bind to + * @return a bound configuration + * @throws IllegalStateException if the configuration is already bound + */ + public DefaultResilienceStrategyConfiguration bind(RecoveryStore store, CacheLoaderWriter loaderWriter) throws IllegalStateException { + if (getInstance() == null) { + Object[] arguments = getArguments(); + Object[] boundArguments = Arrays.copyOf(arguments, arguments.length + 2); + boundArguments[arguments.length] = store; + boundArguments[arguments.length + 1] = loaderWriter; + return new BoundConfiguration(getClazz(), boundArguments); + } else { + return this; + } + } + + /** + * Returns a configuration object bound to the given store. + * + * @param store store to bind to + * @return a bound configuration + * @throws IllegalStateException if the configuration is already bound + */ + public DefaultResilienceStrategyConfiguration bind(RecoveryStore store) throws IllegalStateException { + if (getInstance() == null) { + Object[] arguments = getArguments(); + Object[] boundArguments = Arrays.copyOf(arguments, arguments.length + 1); + boundArguments[arguments.length] = store; + return new BoundConfiguration(getClazz(), boundArguments); + } else { + return this; + } + } + + private static class BoundConfiguration extends DefaultResilienceStrategyConfiguration { + + private BoundConfiguration(Class> clazz, Object... arguments) { + super(clazz, arguments); + } + + @Override + public DefaultResilienceStrategyConfiguration bind(RecoveryStore store, CacheLoaderWriter loaderWriter) { + throw new IllegalStateException(); + } + + @Override + public DefaultResilienceStrategyConfiguration bind(RecoveryStore store) { + throw new IllegalStateException(); + } + } +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/config/resilience/DefaultResilienceStrategyProviderConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/resilience/DefaultResilienceStrategyProviderConfiguration.java new file mode 100644 index 0000000000..1538bceb5e --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/resilience/DefaultResilienceStrategyProviderConfiguration.java @@ -0,0 +1,173 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.config.resilience; + +import org.ehcache.impl.internal.classes.ClassInstanceProviderConfiguration; +import org.ehcache.impl.internal.resilience.RobustLoaderWriterResilienceStrategy; +import org.ehcache.impl.internal.resilience.RobustResilienceStrategy; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.ResilienceStrategyProvider; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +/** + * {@link ServiceCreationConfiguration} for the default {@link ResilienceStrategyProvider}. + */ +public class DefaultResilienceStrategyProviderConfiguration extends ClassInstanceProviderConfiguration implements ServiceCreationConfiguration { + + @SuppressWarnings("rawtypes") + private static final Class DEFAULT_RESILIENCE = RobustResilienceStrategy.class; + @SuppressWarnings("rawtypes") + private static final Class DEFAULT_LOADER_WRITER_RESILIENCE = RobustLoaderWriterResilienceStrategy.class; + + private DefaultResilienceStrategyConfiguration defaultRegularConfiguration; + private DefaultResilienceStrategyConfiguration defaultLoaderWriterConfiguration; + + private DefaultResilienceStrategyProviderConfiguration(DefaultResilienceStrategyProviderConfiguration config) { + super(config); + this.defaultRegularConfiguration = config.defaultRegularConfiguration; + this.defaultLoaderWriterConfiguration = config.defaultLoaderWriterConfiguration; + } + + public DefaultResilienceStrategyProviderConfiguration() { + this.defaultRegularConfiguration = new DefaultResilienceStrategyConfiguration(DEFAULT_RESILIENCE); + this.defaultLoaderWriterConfiguration = new DefaultResilienceStrategyConfiguration(DEFAULT_LOADER_WRITER_RESILIENCE); + } + + /** + * Returns the default resilience strategy configuration used for caches without loader-writers + * + * @return the regular default configuration + */ + public DefaultResilienceStrategyConfiguration getDefaultConfiguration() { + return defaultRegularConfiguration; + } + + /** + * Returns the default resilience strategy configuration used for caches with loader-writers + * + * @return the loader-writer default configuration + */ + public DefaultResilienceStrategyConfiguration getDefaultLoaderWriterConfiguration() { + return defaultLoaderWriterConfiguration; + } + + @Override + public Class getServiceType() { + return ResilienceStrategyProvider.class; + } + + /** + * Sets the default {@link ResilienceStrategy} class and associated constructor arguments to be used for caches without + * a loader-writer. + *

                                  + * The provided class must have a constructor compatible with the supplied arguments followed by the cache's + * {@code RecoveryStore}. + * + * @param clazz the resilience strategy class + * @param arguments the constructor arguments + * + * @return this configuration instance + */ + @SuppressWarnings("rawtypes") + public DefaultResilienceStrategyProviderConfiguration setDefaultResilienceStrategy(Class clazz, Object... arguments) { + this.defaultRegularConfiguration = new DefaultResilienceStrategyConfiguration(clazz, arguments); + return this; + } + + /** + * Sets the default {@link ResilienceStrategy} instance to be used for caches without a loader-writer. + * + * @param resilienceStrategy the resilience strategy instance + * + * @return this configuration instance + */ + public DefaultResilienceStrategyProviderConfiguration setDefaultResilienceStrategy(ResilienceStrategy resilienceStrategy) { + this.defaultRegularConfiguration = new DefaultResilienceStrategyConfiguration(resilienceStrategy); + return this; + } + + /** + * Sets the default {@link ResilienceStrategy} class and associated constructor arguments to be used for caches with + * a loader writer. + *

                                  + * The provided class must have a constructor compatible with the supplied arguments followed by the cache's + * {@code RecoveryStore} and {@code CacheLoaderWriter}. + * + * @param clazz the resilience strategy class + * @param arguments the constructor arguments + * + * @return this configuration instance + */ + @SuppressWarnings("rawtypes") + public DefaultResilienceStrategyProviderConfiguration setDefaultLoaderWriterResilienceStrategy(Class clazz, Object... arguments) { + this.defaultLoaderWriterConfiguration = new DefaultResilienceStrategyConfiguration(clazz, arguments); + return this; + } + + /** + * Sets the default {@link ResilienceStrategy} instance to be used for caches with a loader-writer. + * + * @param resilienceStrategy the resilience strategy instance + * + * @return this configuration instance + */ + public DefaultResilienceStrategyProviderConfiguration setDefaultLoaderWriterResilienceStrategy(ResilienceStrategy resilienceStrategy) { + this.defaultLoaderWriterConfiguration = new DefaultResilienceStrategyConfiguration(resilienceStrategy); + return this; + } + + /** + * Adds a {@link ResilienceStrategy} class and associated constructor arguments to be used with a cache matching + * the provided alias. + *

                                  + * The provided class must have a constructor compatible with the supplied arguments followed by either the cache's + * {@code RecoveryStore}, or the cache's {@code RecoveryStore} and {@code CacheLoaderWriter}. + * + * @param alias the cache alias + * @param clazz the resilience strategy class + * @param arguments the constructor arguments + * + * @return this configuration instance + */ + @SuppressWarnings("rawtypes") + public DefaultResilienceStrategyProviderConfiguration addResilienceStrategyFor(String alias, Class clazz, Object... arguments) { + getDefaults().put(alias, new DefaultResilienceStrategyConfiguration(clazz, arguments)); + return this; + } + + /** + * Adds a {@link ResilienceStrategy} instance to be used with a cache matching the provided alias. + * + * @param alias the cache alias + * @param resilienceStrategy the resilience strategy instance + * + * @return this configuration instance + */ + public DefaultResilienceStrategyProviderConfiguration addResilienceStrategyFor(String alias, ResilienceStrategy resilienceStrategy) { + getDefaults().put(alias, new DefaultResilienceStrategyConfiguration(resilienceStrategy)); + return this; + } + + @Override + public DefaultResilienceStrategyProviderConfiguration derive() { + return new DefaultResilienceStrategyProviderConfiguration(this); + } + + @Override + public DefaultResilienceStrategyProviderConfiguration build(DefaultResilienceStrategyProviderConfiguration configuration) { + return configuration; + } +} diff --git a/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java similarity index 92% rename from impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java index 184cbd963f..961ce124ca 100644 --- a/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java @@ -30,7 +30,7 @@ /** * {@link ServiceCreationConfiguration} for the default {@link SerializationProvider}. */ -public class DefaultSerializationProviderConfiguration implements ServiceCreationConfiguration { +public class DefaultSerializationProviderConfiguration implements ServiceCreationConfiguration { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultSerializationProviderConfiguration.class); @@ -130,4 +130,14 @@ private static boolean isConstructorPresent(Class clazz, Class... args) { public Map, Class>> getDefaultSerializers() { return unmodifiableMap(defaultSerializers); } + + @Override + public DefaultSerializationProviderConfiguration derive() { + return new DefaultSerializationProviderConfiguration(this); + } + + @Override + public DefaultSerializationProviderConfiguration build(DefaultSerializationProviderConfiguration configuration) { + return configuration; + } } diff --git a/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializerConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializerConfiguration.java similarity index 80% rename from impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializerConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializerConfiguration.java index e50a76ad64..33ad82c349 100644 --- a/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializerConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializerConfiguration.java @@ -23,8 +23,11 @@ /** * {@link ServiceConfiguration} for the default {@link SerializationProvider}. + *

                                  + * This class overrides the default {@link ServiceConfiguration#compatibleWith(ServiceConfiguration)} implementation + * to allow for independent configuration of the key and value serializers. */ -public class DefaultSerializerConfiguration extends ClassInstanceConfiguration> implements ServiceConfiguration { +public class DefaultSerializerConfiguration extends ClassInstanceConfiguration> implements ServiceConfiguration { private final Type type; @@ -67,6 +70,15 @@ public Type getType() { return type; } + @Override + public boolean compatibleWith(ServiceConfiguration other) { + if (other instanceof DefaultSerializerConfiguration) { + return !getType().equals(((DefaultSerializerConfiguration) other).getType()); + } else { + return ServiceConfiguration.super.compatibleWith(other); + } + } + /** * Serialization provider types */ diff --git a/impl/src/main/java/org/ehcache/impl/config/serializer/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/serializer/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/config/serializer/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/serializer/package-info.java diff --git a/impl/src/main/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreConfiguration.java similarity index 89% rename from impl/src/main/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreConfiguration.java index 0dc4118289..ad32d7b7d5 100644 --- a/impl/src/main/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreConfiguration.java @@ -22,7 +22,7 @@ /** * {@link ServiceConfiguration} for the default {@link org.ehcache.core.spi.store.Store off heap disk store}. */ -public class OffHeapDiskStoreConfiguration implements ServiceConfiguration { +public class OffHeapDiskStoreConfiguration implements ServiceConfiguration { public static final int DEFAULT_WRITER_CONCURRENCY = 1; public static final int DEFAULT_DISK_SEGMENTS = 16; @@ -33,7 +33,7 @@ public class OffHeapDiskStoreConfiguration implements ServiceConfiguration getServiceType() { return OffHeapDiskStore.Provider.class; } + + @Override + public OffHeapDiskStoreConfiguration derive() { + return new OffHeapDiskStoreConfiguration(threadPoolAlias, writerConcurrency, diskSegments); + } + + @Override + public OffHeapDiskStoreConfiguration build(OffHeapDiskStoreConfiguration config) { + return config; + } } diff --git a/impl/src/main/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreProviderConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreProviderConfiguration.java similarity index 85% rename from impl/src/main/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreProviderConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreProviderConfiguration.java index c597a181b6..61c2161337 100644 --- a/impl/src/main/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreProviderConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreProviderConfiguration.java @@ -22,7 +22,7 @@ /** * {@link ServiceCreationConfiguration} for the default {@link org.ehcache.core.spi.store.Store off heap disk store}. */ -public class OffHeapDiskStoreProviderConfiguration implements ServiceCreationConfiguration { +public class OffHeapDiskStoreProviderConfiguration implements ServiceCreationConfiguration { private final String threadPoolAlias; @@ -55,4 +55,14 @@ public String getThreadPoolAlias() { public Class getServiceType() { return OffHeapDiskStore.Provider.class; } + + @Override + public String derive() { + return getThreadPoolAlias(); + } + + @Override + public OffHeapDiskStoreProviderConfiguration build(String alias) { + return new OffHeapDiskStoreProviderConfiguration(alias); + } } diff --git a/impl/src/main/java/org/ehcache/impl/config/store/disk/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/store/disk/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/config/store/disk/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/store/disk/package-info.java diff --git a/impl/src/main/java/org/ehcache/impl/config/store/heap/DefaultSizeOfEngineConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/store/heap/DefaultSizeOfEngineConfiguration.java similarity index 89% rename from impl/src/main/java/org/ehcache/impl/config/store/heap/DefaultSizeOfEngineConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/store/heap/DefaultSizeOfEngineConfiguration.java index 174fc4966a..d9e3bc89ea 100644 --- a/impl/src/main/java/org/ehcache/impl/config/store/heap/DefaultSizeOfEngineConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/store/heap/DefaultSizeOfEngineConfiguration.java @@ -23,7 +23,7 @@ /** * {@link ServiceConfiguration} for the default {@link SizeOfEngineProvider}. */ -public class DefaultSizeOfEngineConfiguration implements ServiceConfiguration { +public class DefaultSizeOfEngineConfiguration implements ServiceConfiguration { /** * Default maximum object graph count after which sizing stops @@ -105,4 +105,14 @@ public MemoryUnit getUnit() { return this.unit; } + + @Override + public DefaultSizeOfEngineConfiguration derive() { + return new DefaultSizeOfEngineConfiguration(maxObjectSize, unit, objectGraphSize); + } + + @Override + public DefaultSizeOfEngineConfiguration build(DefaultSizeOfEngineConfiguration config) { + return config; + } } diff --git a/impl/src/main/java/org/ehcache/impl/config/store/heap/DefaultSizeOfEngineProviderConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/store/heap/DefaultSizeOfEngineProviderConfiguration.java similarity index 86% rename from impl/src/main/java/org/ehcache/impl/config/store/heap/DefaultSizeOfEngineProviderConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/store/heap/DefaultSizeOfEngineProviderConfiguration.java index 169acbd632..53543da5d8 100644 --- a/impl/src/main/java/org/ehcache/impl/config/store/heap/DefaultSizeOfEngineProviderConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/config/store/heap/DefaultSizeOfEngineProviderConfiguration.java @@ -23,7 +23,7 @@ /** * {@link ServiceCreationConfiguration} for the default {@link SizeOfEngineProvider}. */ -public class DefaultSizeOfEngineProviderConfiguration implements ServiceCreationConfiguration { +public class DefaultSizeOfEngineProviderConfiguration implements ServiceCreationConfiguration { private final long objectGraphSize; private final long maxObjectSize; @@ -89,4 +89,14 @@ public long getMaxObjectSize() { public MemoryUnit getUnit() { return this.unit; } + + @Override + public DefaultSizeOfEngineProviderConfiguration derive() { + return new DefaultSizeOfEngineProviderConfiguration(maxObjectSize, unit, objectGraphSize); + } + + @Override + public DefaultSizeOfEngineProviderConfiguration build(DefaultSizeOfEngineProviderConfiguration configuration) { + return configuration; + } } diff --git a/impl/src/main/java/org/ehcache/impl/config/store/heap/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/config/store/heap/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/config/store/heap/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/config/store/heap/package-info.java diff --git a/impl/src/main/java/org/ehcache/impl/copy/IdentityCopier.java b/ehcache-impl/src/main/java/org/ehcache/impl/copy/IdentityCopier.java similarity index 80% rename from impl/src/main/java/org/ehcache/impl/copy/IdentityCopier.java rename to ehcache-impl/src/main/java/org/ehcache/impl/copy/IdentityCopier.java index 3d9beff8b1..493b1fa670 100644 --- a/impl/src/main/java/org/ehcache/impl/copy/IdentityCopier.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/copy/IdentityCopier.java @@ -16,12 +16,22 @@ package org.ehcache.impl.copy; +import org.ehcache.spi.copy.Copier; + /** * {@link ReadWriteCopier Copier} implementation that simply returns the value passed in, doing in fact no * copy at all. */ public final class IdentityCopier extends ReadWriteCopier { + @SuppressWarnings("rawtypes") + private static final Copier COPIER = new IdentityCopier<>(); + + @SuppressWarnings("unchecked") + public static Copier identityCopier() { + return COPIER; + } + /** * This implementation returns the instance passed in as-is. */ diff --git a/impl/src/main/java/org/ehcache/impl/copy/ReadWriteCopier.java b/ehcache-impl/src/main/java/org/ehcache/impl/copy/ReadWriteCopier.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/copy/ReadWriteCopier.java rename to ehcache-impl/src/main/java/org/ehcache/impl/copy/ReadWriteCopier.java diff --git a/impl/src/main/java/org/ehcache/impl/copy/SerializingCopier.java b/ehcache-impl/src/main/java/org/ehcache/impl/copy/SerializingCopier.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/copy/SerializingCopier.java rename to ehcache-impl/src/main/java/org/ehcache/impl/copy/SerializingCopier.java diff --git a/impl/src/main/java/org/ehcache/impl/copy/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/copy/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/copy/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/copy/package-info.java diff --git a/impl/src/main/java/org/ehcache/impl/events/CacheEventAdapter.java b/ehcache-impl/src/main/java/org/ehcache/impl/events/CacheEventAdapter.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/events/CacheEventAdapter.java rename to ehcache-impl/src/main/java/org/ehcache/impl/events/CacheEventAdapter.java diff --git a/impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java b/ehcache-impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java similarity index 95% rename from impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java rename to ehcache-impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java index 5579b8bd6c..283f074797 100644 --- a/impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java @@ -17,12 +17,11 @@ package org.ehcache.impl.events; import org.ehcache.Cache; -import org.ehcache.core.CacheConfigurationChangeEvent; import org.ehcache.core.CacheConfigurationChangeListener; import org.ehcache.core.CacheConfigurationProperty; import org.ehcache.core.events.CacheEventDispatcher; import org.ehcache.core.events.CacheEvents; -import org.ehcache.core.internal.events.EventListenerWrapper; +import org.ehcache.core.events.EventListenerWrapper; import org.ehcache.event.CacheEvent; import org.ehcache.event.CacheEventListener; import org.ehcache.event.EventFiring; @@ -108,6 +107,9 @@ private synchronized void registerCacheEventListener(EventListenerWrapper aSyncListenersList.add(wrapper); break; case SYNCHRONOUS: + if (syncListenersList.isEmpty()) { + storeEventSource.setSynchronous(true); + } syncListenersList.add(wrapper); break; default: @@ -139,16 +141,19 @@ public void deregisterCacheEventListener(CacheEventListener> listenersList) { + private synchronized boolean removeWrapperFromList(EventListenerWrapper wrapper, List> listenersList) { int index = listenersList.indexOf(wrapper); if (index != -1) { - EventListenerWrapper containedWrapper = listenersList.remove(index); + EventListenerWrapper containedWrapper = listenersList.remove(index); if(containedWrapper.isOrdered() && --orderedListenerCount == 0) { storeEventSource.setEventOrdering(false); } if (--listenersCount == 0) { storeEventSource.removeEventListener(eventListener); } + if (syncListenersList.isEmpty()) { + storeEventSource.setSynchronous(false); + } return true; } return false; @@ -161,6 +166,7 @@ private synchronized boolean removeWrapperFromList(EventListenerWrapper wrapper, public synchronized void shutdown() { storeEventSource.removeEventListener(eventListener); storeEventSource.setEventOrdering(false); + storeEventSource.setSynchronous(false); syncListenersList.clear(); aSyncListenersList.clear(); unOrderedExectuor.shutdown(); diff --git a/impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java b/ehcache-impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java similarity index 96% rename from impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java rename to ehcache-impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java index 79897ca453..fbdcbdf88d 100644 --- a/impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java @@ -16,7 +16,7 @@ package org.ehcache.impl.events; -import org.ehcache.core.internal.events.EventListenerWrapper; +import org.ehcache.core.events.EventListenerWrapper; import org.ehcache.event.CacheEvent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/impl/src/main/java/org/ehcache/impl/events/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/events/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/events/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/events/package-info.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/DefaultTimeSourceService.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/DefaultTimeSourceService.java similarity index 85% rename from impl/src/main/java/org/ehcache/impl/internal/DefaultTimeSourceService.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/DefaultTimeSourceService.java index f2a27201e8..42a1d00109 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/DefaultTimeSourceService.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/DefaultTimeSourceService.java @@ -17,6 +17,7 @@ package org.ehcache.impl.internal; import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.core.spi.time.TickingTimeSource; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.spi.service.ServiceProvider; @@ -44,11 +45,15 @@ public TimeSource getTimeSource() { @Override public void start(ServiceProvider serviceProvider) { - // no-op + if (timeSource instanceof Service) { + ((Service) timeSource).start(serviceProvider); + } } @Override public void stop() { - // no-op + if (timeSource instanceof Service) { + ((Service) timeSource).stop(); + } } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/TimeSourceConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/TimeSourceConfiguration.java similarity index 86% rename from impl/src/main/java/org/ehcache/impl/internal/TimeSourceConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/TimeSourceConfiguration.java index f8c6a178a7..28c400f961 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/TimeSourceConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/TimeSourceConfiguration.java @@ -24,7 +24,7 @@ * * This configuration has to be applied at the {@link org.ehcache.CacheManager} level. */ -public class TimeSourceConfiguration implements ServiceCreationConfiguration { +public class TimeSourceConfiguration implements ServiceCreationConfiguration { private final TimeSource timeSource; @@ -51,4 +51,13 @@ public TimeSource getTimeSource() { return this.timeSource; } + @Override + public TimeSource derive() { + return getTimeSource(); + } + + @Override + public TimeSourceConfiguration build(TimeSource timeSource) { + return new TimeSourceConfiguration(timeSource); + } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/TimeSourceServiceFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/TimeSourceServiceFactory.java similarity index 83% rename from impl/src/main/java/org/ehcache/impl/internal/TimeSourceServiceFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/TimeSourceServiceFactory.java index ab7577b741..add65271c6 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/TimeSourceServiceFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/TimeSourceServiceFactory.java @@ -16,21 +16,23 @@ package org.ehcache.impl.internal; +import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.core.spi.service.ServiceFactory; +import org.osgi.service.component.annotations.Component; /** * TimeSourceServiceFactory */ +@Component public class TimeSourceServiceFactory implements ServiceFactory { @Override - public TimeSourceService create(ServiceCreationConfiguration configuration) { + public TimeSourceService create(ServiceCreationConfiguration configuration) { return new DefaultTimeSourceService((TimeSourceConfiguration) configuration); } @Override - public Class getServiceType() { - return TimeSourceService.class; + public Class getServiceType() { + return DefaultTimeSourceService.class; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java similarity index 82% rename from impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java index e7cea1f690..5ebcc3355a 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java @@ -43,6 +43,16 @@ public ClassInstanceConfiguration(T instance) { this.arguments = null; } + protected ClassInstanceConfiguration(ClassInstanceConfiguration configuration) { + this.instance = configuration.getInstance(); + this.clazz = configuration.getClazz(); + if (instance == null) { + this.arguments = asList(configuration.getArguments()); + } else { + this.arguments = null; + } + } + public Class getClazz() { return clazz; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceProvider.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceProvider.java similarity index 81% rename from impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceProvider.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceProvider.java index a53dba716e..1e41f4313d 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceProvider.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceProvider.java @@ -39,12 +39,12 @@ /** * @author Alex Snaps */ -public class ClassInstanceProvider { +public class ClassInstanceProvider, T> { /** * The order in which entries are put in is kept. */ - protected final Map> preconfigured = Collections.synchronizedMap(new LinkedHashMap>()); + protected final Map preconfigured = Collections.synchronizedMap(new LinkedHashMap()); /** * Instances provided by this provider vs their counts. @@ -52,17 +52,16 @@ public class ClassInstanceProvider { protected final ConcurrentWeakIdentityHashMap providedVsCount = new ConcurrentWeakIdentityHashMap<>(); protected final Set instantiated = Collections.newSetFromMap(new ConcurrentWeakIdentityHashMap()); - private final Class> cacheLevelConfig; + private final Class cacheLevelConfig; private final boolean uniqueClassLevelConfig; - protected ClassInstanceProvider(ClassInstanceProviderConfiguration factoryConfig, - Class> cacheLevelConfig) { + protected ClassInstanceProvider(ClassInstanceProviderConfiguration factoryConfig, + Class cacheLevelConfig) { this(factoryConfig, cacheLevelConfig, false); } - protected ClassInstanceProvider(ClassInstanceProviderConfiguration factoryConfig, - Class> cacheLevelConfig, - boolean uniqueClassLevelConfig) { + protected ClassInstanceProvider(ClassInstanceProviderConfiguration factoryConfig, + Class cacheLevelConfig, boolean uniqueClassLevelConfig) { this.uniqueClassLevelConfig = uniqueClassLevelConfig; if (factoryConfig != null) { preconfigured.putAll(factoryConfig.getDefaults()); @@ -70,16 +69,16 @@ protected ClassInstanceProvider(ClassInstanceProviderConfiguration factory this.cacheLevelConfig = cacheLevelConfig; } - protected ClassInstanceConfiguration getPreconfigured(K alias) { + protected C getPreconfigured(K alias) { return preconfigured.get(alias); } protected T newInstance(K alias, CacheConfiguration cacheConfiguration) { - ClassInstanceConfiguration config = null; + C config = null; if (uniqueClassLevelConfig) { config = findSingletonAmongst(cacheLevelConfig, cacheConfiguration.getServiceConfigurations()); } else { - Iterator> iterator = + Iterator iterator = findAmongst(cacheLevelConfig, cacheConfiguration.getServiceConfigurations()).iterator(); if (iterator.hasNext()) { config = iterator.next(); @@ -88,15 +87,24 @@ protected T newInstance(K alias, CacheConfiguration cacheConfiguration) { return newInstance(alias, config); } - protected T newInstance(K alias, ServiceConfiguration serviceConfiguration) { - ClassInstanceConfiguration config = null; + protected T newInstance(K alias, ServiceConfiguration... serviceConfigurations) { + C config = null; + Iterator iterator = findAmongst(cacheLevelConfig, (Object[]) serviceConfigurations).iterator(); + if (iterator.hasNext()) { + config = iterator.next(); + } + return newInstance(alias, config); + } + + protected T newInstance(K alias, ServiceConfiguration serviceConfiguration) { + C config = null; if (serviceConfiguration != null && cacheLevelConfig.isAssignableFrom(serviceConfiguration.getClass())) { config = cacheLevelConfig.cast(serviceConfiguration); } return newInstance(alias, config); } - private T newInstance(K alias, ClassInstanceConfiguration config) { + private T newInstance(K alias, ClassInstanceConfiguration config) { if (config == null) { config = getPreconfigured(alias); if (config == null) { @@ -104,7 +112,7 @@ private T newInstance(K alias, ClassInstanceConfiguration config) { } } - T instance = null; + T instance; if(config.getInstance() != null) { instance = config.getInstance(); diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceProviderConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceProviderConfiguration.java new file mode 100644 index 0000000000..cd102c7dc9 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceProviderConfiguration.java @@ -0,0 +1,44 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.classes; + +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * Base class for ProviderFactory config that instantiates service classes. + * Keeps the order in which defaults are added. + * + * @author Alex Snaps + */ +public class ClassInstanceProviderConfiguration> { + + private final Map defaults; + + public ClassInstanceProviderConfiguration() { + this.defaults = new LinkedHashMap<>(); + } + + public ClassInstanceProviderConfiguration(ClassInstanceProviderConfiguration config) { + this.defaults = new LinkedHashMap<>(config.getDefaults()); + } + + public Map getDefaults() { + return defaults; + } + +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/ArrayUtils.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/ArrayUtils.java new file mode 100644 index 0000000000..a4487ae4e6 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/ArrayUtils.java @@ -0,0 +1,327 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * This is a modified version of the original Apache class. It has had unused + * members removed. + */ +package org.ehcache.impl.internal.classes.commonslang; + +import java.lang.reflect.Array; + +/** + *

                                  Operations on arrays, primitive arrays (like {@code int[]}) and + * primitive wrapper arrays (like {@code Integer[]}). + * + *

                                  This class tries to handle {@code null} input gracefully. + * An exception will not be thrown for a {@code null} + * array input. However, an Object array that contains a {@code null} + * element may throw an exception. Each method documents its behaviour. + * + *

                                  #ThreadSafe# + * @since 2.0 + */ +public class ArrayUtils { + + /** + * An empty immutable {@code Object} array. + */ + public static final Object[] EMPTY_OBJECT_ARRAY = new Object[0]; + /** + * An empty immutable {@code Class} array. + */ + public static final Class[] EMPTY_CLASS_ARRAY = new Class[0]; + /** + * An empty immutable {@code long} array. + */ + public static final long[] EMPTY_LONG_ARRAY = new long[0]; + /** + * An empty immutable {@code int} array. + */ + public static final int[] EMPTY_INT_ARRAY = new int[0]; + /** + * An empty immutable {@code short} array. + */ + public static final short[] EMPTY_SHORT_ARRAY = new short[0]; + /** + * An empty immutable {@code double} array. + */ + public static final double[] EMPTY_DOUBLE_ARRAY = new double[0]; + /** + * An empty immutable {@code float} array. + */ + public static final float[] EMPTY_FLOAT_ARRAY = new float[0]; + + // NOTE: Cannot use {@code} to enclose text which includes {}, but is OK + + // nullToEmpty + //----------------------------------------------------------------------- + + /** + *

                                  Defensive programming technique to change a {@code null} + * reference to an empty one. + * + *

                                  This method returns an empty array for a {@code null} input array. + * + *

                                  As a memory optimizing technique an empty array passed in will be overridden with + * the empty {@code public static} references in this class. + * + * @param array the array to check for {@code null} or empty + * @return the same array, {@code public static} empty array if {@code null} or empty input + * @since 2.5 + */ + public static Object[] nullToEmpty(final Object[] array) { + if (isEmpty(array)) { + return EMPTY_OBJECT_ARRAY; + } + return array; + } + + /** + *

                                  Defensive programming technique to change a {@code null} + * reference to an empty one. + * + *

                                  This method returns an empty array for a {@code null} input array. + * + *

                                  As a memory optimizing technique an empty array passed in will be overridden with + * the empty {@code public static} references in this class. + * + * @param array the array to check for {@code null} or empty + * @return the same array, {@code public static} empty array if {@code null} or empty input + * @since 3.2 + */ + public static Class[] nullToEmpty(final Class[] array) { + if (isEmpty(array)) { + return EMPTY_CLASS_ARRAY; + } + return array; + } + + // Is same length + //----------------------------------------------------------------------- + /** + *

                                  Checks whether two arrays are the same length, treating + * {@code null} arrays as length {@code 0}. + * + *

                                  Any multi-dimensional aspects of the arrays are ignored. + * + * @param array1 the first array, may be {@code null} + * @param array2 the second array, may be {@code null} + * @return {@code true} if length of arrays matches, treating + * {@code null} as an empty array + */ + public static boolean isSameLength(final Object[] array1, final Object[] array2) { + return getLength(array1) == getLength(array2); + } + + //----------------------------------------------------------------------- + /** + *

                                  Returns the length of the specified array. + * This method can deal with {@code Object} arrays and with primitive arrays. + * + *

                                  If the input array is {@code null}, {@code 0} is returned. + * + *

                                  +     * ArrayUtils.getLength(null)            = 0
                                  +     * ArrayUtils.getLength([])              = 0
                                  +     * ArrayUtils.getLength([null])          = 1
                                  +     * ArrayUtils.getLength([true, false])   = 2
                                  +     * ArrayUtils.getLength([1, 2, 3])       = 3
                                  +     * ArrayUtils.getLength(["a", "b", "c"]) = 3
                                  +     * 
                                  + * + * @param array the array to retrieve the length from, may be null + * @return The length of the array, or {@code 0} if the array is {@code null} + * @throws IllegalArgumentException if the object argument is not an array. + * @since 2.1 + */ + public static int getLength(final Object array) { + if (array == null) { + return 0; + } + return Array.getLength(array); + } + + // Long array converters + // ---------------------------------------------------------------------- + /** + *

                                  Converts an array of object Longs to primitives. + * + *

                                  This method returns {@code null} for a {@code null} input array. + * + * @param array a {@code Long} array, may be {@code null} + * @return a {@code long} array, {@code null} if null array input + * @throws NullPointerException if array content is {@code null} + */ + public static long[] toPrimitive(final Long[] array) { + if (array == null) { + return null; + } else if (array.length == 0) { + return EMPTY_LONG_ARRAY; + } + final long[] result = new long[array.length]; + for (int i = 0; i < array.length; i++) { + result[i] = array[i].longValue(); + } + return result; + } + + // Int array converters + // ---------------------------------------------------------------------- + /** + *

                                  Converts an array of object Integers to primitives. + * + *

                                  This method returns {@code null} for a {@code null} input array. + * + * @param array a {@code Integer} array, may be {@code null} + * @return an {@code int} array, {@code null} if null array input + * @throws NullPointerException if array content is {@code null} + */ + public static int[] toPrimitive(final Integer[] array) { + if (array == null) { + return null; + } else if (array.length == 0) { + return EMPTY_INT_ARRAY; + } + final int[] result = new int[array.length]; + for (int i = 0; i < array.length; i++) { + result[i] = array[i].intValue(); + } + return result; + } + + // Short array converters + // ---------------------------------------------------------------------- + /** + *

                                  Converts an array of object Shorts to primitives. + * + *

                                  This method returns {@code null} for a {@code null} input array. + * + * @param array a {@code Short} array, may be {@code null} + * @return a {@code byte} array, {@code null} if null array input + * @throws NullPointerException if array content is {@code null} + */ + public static short[] toPrimitive(final Short[] array) { + if (array == null) { + return null; + } else if (array.length == 0) { + return EMPTY_SHORT_ARRAY; + } + final short[] result = new short[array.length]; + for (int i = 0; i < array.length; i++) { + result[i] = array[i].shortValue(); + } + return result; + } + + // Byte array converters + // ---------------------------------------------------------------------- + + // Double array converters + // ---------------------------------------------------------------------- + /** + *

                                  Converts an array of object Doubles to primitives. + * + *

                                  This method returns {@code null} for a {@code null} input array. + * + * @param array a {@code Double} array, may be {@code null} + * @return a {@code double} array, {@code null} if null array input + * @throws NullPointerException if array content is {@code null} + */ + public static double[] toPrimitive(final Double[] array) { + if (array == null) { + return null; + } else if (array.length == 0) { + return EMPTY_DOUBLE_ARRAY; + } + final double[] result = new double[array.length]; + for (int i = 0; i < array.length; i++) { + result[i] = array[i].doubleValue(); + } + return result; + } + + // Float array converters + // ---------------------------------------------------------------------- + /** + *

                                  Converts an array of object Floats to primitives. + * + *

                                  This method returns {@code null} for a {@code null} input array. + * + * @param array a {@code Float} array, may be {@code null} + * @return a {@code float} array, {@code null} if null array input + * @throws NullPointerException if array content is {@code null} + */ + public static float[] toPrimitive(final Float[] array) { + if (array == null) { + return null; + } else if (array.length == 0) { + return EMPTY_FLOAT_ARRAY; + } + final float[] result = new float[array.length]; + for (int i = 0; i < array.length; i++) { + result[i] = array[i].floatValue(); + } + return result; + } + + /** + *

                                  Create an array of primitive type from an array of wrapper types. + * + *

                                  This method returns {@code null} for a {@code null} input array. + * + * @param array an array of wrapper object + * @return an array of the corresponding primitive type, or the original array + * @since 3.5 + */ + public static Object toPrimitive(final Object array) { + if (array == null) { + return null; + } + final Class ct = array.getClass().getComponentType(); + final Class pt = ClassUtils.wrapperToPrimitive(ct); + if(Integer.TYPE.equals(pt)) { + return toPrimitive((Integer[]) array); + } + if(Long.TYPE.equals(pt)) { + return toPrimitive((Long[]) array); + } + if(Short.TYPE.equals(pt)) { + return toPrimitive((Short[]) array); + } + if(Double.TYPE.equals(pt)) { + return toPrimitive((Double[]) array); + } + if(Float.TYPE.equals(pt)) { + return toPrimitive((Float[]) array); + } + return array; + } + + // ---------------------------------------------------------------------- + /** + *

                                  Checks if an array of Objects is empty or {@code null}. + * + * @param array the array to test + * @return {@code true} if the array is empty or {@code null} + * @since 2.1 + */ + public static boolean isEmpty(final Object[] array) { + return getLength(array) == 0; + } + +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/ClassUtils.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/ClassUtils.java similarity index 84% rename from impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/ClassUtils.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/ClassUtils.java index c6cae1506f..8d9c7a9a7d 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/ClassUtils.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/ClassUtils.java @@ -25,19 +25,36 @@ import java.util.Map; /** - *

                                  Operates on classes without using reflection. + *

                                  Operates on classes without using reflection.

                                  * *

                                  This class handles invalid {@code null} inputs as best it can. - * Each method documents its behaviour in more detail. + * Each method documents its behaviour in more detail.

                                  * *

                                  The notion of a {@code canonical name} includes the human * readable name for the type, for example {@code int[]}. The * non-canonical method variants work with the JVM names, such as - * {@code [I}. + * {@code [I}.

                                  * * @since 2.0 */ public class ClassUtils { + + /** + * Maps names of primitives to their corresponding primitive {@code Class}es. + */ + private static final Map> namePrimitiveMap = new HashMap<>(); + static { + namePrimitiveMap.put("boolean", Boolean.TYPE); + namePrimitiveMap.put("byte", Byte.TYPE); + namePrimitiveMap.put("char", Character.TYPE); + namePrimitiveMap.put("short", Short.TYPE); + namePrimitiveMap.put("int", Integer.TYPE); + namePrimitiveMap.put("long", Long.TYPE); + namePrimitiveMap.put("double", Double.TYPE); + namePrimitiveMap.put("float", Float.TYPE); + namePrimitiveMap.put("void", Void.TYPE); + } + /** * Maps primitive {@code Class}es to their corresponding wrapper {@code Class}. */ @@ -68,44 +85,35 @@ public class ClassUtils { } } - /** - *

                                  ClassUtils instances should NOT be constructed in standard programming. - * Instead, the class should be used as - * {@code ClassUtils.getShortClassName(cls)}. - * - *

                                  This constructor is public to permit tools that require a JavaBean - * instance to operate. - */ - public ClassUtils() { - super(); - } + // Is assignable + // ---------------------------------------------------------------------- /** - *

                                  Checks if an array of Classes can be assigned to another array of Classes. + *

                                  Checks if an array of Classes can be assigned to another array of Classes.

                                  * *

                                  This method calls {@link #isAssignable(Class, Class) isAssignable} for each * Class pair in the input arrays. It can be used to check if a set of arguments * (the first parameter) are suitably compatible with a set of method parameter types - * (the second parameter). + * (the second parameter).

                                  * - *

                                  Unlike the {@link Class#isAssignableFrom(java.lang.Class)} method, this + *

                                  Unlike the {@link Class#isAssignableFrom(Class)} method, this * method takes into account widenings of primitive classes and - * {@code null}s. + * {@code null}s.

                                  * *

                                  Primitive widenings allow an int to be assigned to a {@code long}, * {@code float} or {@code double}. This method returns the correct - * result for these cases. + * result for these cases.

                                  * *

                                  {@code Null} may be assigned to any reference type. This method will * return {@code true} if {@code null} is passed in and the toClass is - * non-primitive. + * non-primitive.

                                  * *

                                  Specifically, this method tests whether the type represented by the * specified {@code Class} parameter can be converted to the type * represented by this {@code Class} object via an identity conversion * widening primitive or widening reference conversion. See * The Java Language Specification, - * sections 5.1.1, 5.1.2 and 5.1.4 for details. + * sections 5.1.1, 5.1.2 and 5.1.4 for details.

                                  * * @param classArray the array of Classes to check, may be {@code null} * @param toClassArray the array of Classes to try to assign into, may be {@code null} @@ -113,7 +121,7 @@ public ClassUtils() { * @return {@code true} if assignment possible */ public static boolean isAssignable(Class[] classArray, Class[] toClassArray, final boolean autoboxing) { - if (ArrayUtils.isSameLength(classArray, toClassArray) == false) { + if (!ArrayUtils.isSameLength(classArray, toClassArray)) { return false; } if (classArray == null) { @@ -123,7 +131,7 @@ public static boolean isAssignable(Class[] classArray, Class[] toClassArra toClassArray = ArrayUtils.EMPTY_CLASS_ARRAY; } for (int i = 0; i < classArray.length; i++) { - if (isAssignable(classArray[i], toClassArray[i], autoboxing) == false) { + if (!isAssignable(classArray[i], toClassArray[i], autoboxing)) { return false; } } @@ -132,60 +140,60 @@ public static boolean isAssignable(Class[] classArray, Class[] toClassArra /** *

                                  Checks if one {@code Class} can be assigned to a variable of - * another {@code Class}. + * another {@code Class}.

                                  * - *

                                  Unlike the {@link Class#isAssignableFrom(java.lang.Class)} method, + *

                                  Unlike the {@link Class#isAssignableFrom(Class)} method, * this method takes into account widenings of primitive classes and - * {@code null}s. + * {@code null}s.

                                  * *

                                  Primitive widenings allow an int to be assigned to a long, float or - * double. This method returns the correct result for these cases. + * double. This method returns the correct result for these cases.

                                  * *

                                  {@code Null} may be assigned to any reference type. This method * will return {@code true} if {@code null} is passed in and the - * toClass is non-primitive. + * toClass is non-primitive.

                                  * *

                                  Specifically, this method tests whether the type represented by the * specified {@code Class} parameter can be converted to the type * represented by this {@code Class} object via an identity conversion * widening primitive or widening reference conversion. See * The Java Language Specification, - * sections 5.1.1, 5.1.2 and 5.1.4 for details. + * sections 5.1.1, 5.1.2 and 5.1.4 for details.

                                  * *

                                  Since Lang 3.0, this method will default behavior for * calculating assignability between primitive and wrapper types corresponding * to the running Java version; i.e. autoboxing will be the default - * behavior in VMs running Java versions > 1.5. + * behavior in VMs running Java versions > 1.5.

                                  * * @param cls the Class to check, may be null * @param toClass the Class to try to assign into, returns false if null * @return {@code true} if assignment possible */ public static boolean isAssignable(final Class cls, final Class toClass) { - return isAssignable(cls, toClass, SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_5)); + return isAssignable(cls, toClass, true); } /** *

                                  Checks if one {@code Class} can be assigned to a variable of - * another {@code Class}. + * another {@code Class}.

                                  * - *

                                  Unlike the {@link Class#isAssignableFrom(java.lang.Class)} method, + *

                                  Unlike the {@link Class#isAssignableFrom(Class)} method, * this method takes into account widenings of primitive classes and - * {@code null}s. + * {@code null}s.

                                  * *

                                  Primitive widenings allow an int to be assigned to a long, float or - * double. This method returns the correct result for these cases. + * double. This method returns the correct result for these cases.

                                  * *

                                  {@code Null} may be assigned to any reference type. This method * will return {@code true} if {@code null} is passed in and the - * toClass is non-primitive. + * toClass is non-primitive.

                                  * *

                                  Specifically, this method tests whether the type represented by the * specified {@code Class} parameter can be converted to the type * represented by this {@code Class} object via an identity conversion * widening primitive or widening reference conversion. See * The Java Language Specification, - * sections 5.1.1, 5.1.2 and 5.1.4 for details. + * sections 5.1.1, 5.1.2 and 5.1.4 for details.

                                  * * @param cls the Class to check, may be null * @param toClass the Class to try to assign into, returns false if null @@ -219,7 +227,7 @@ public static boolean isAssignable(Class cls, final Class toClass, final b return true; } if (cls.isPrimitive()) { - if (toClass.isPrimitive() == false) { + if (!toClass.isPrimitive()) { return false; } if (Integer.TYPE.equals(cls)) { @@ -267,10 +275,10 @@ public static boolean isAssignable(Class cls, final Class toClass, final b /** *

                                  Converts the specified primitive Class object to its corresponding - * wrapper Class object. + * wrapper Class object.

                                  * *

                                  NOTE: From v2.2, this method handles {@code Void.TYPE}, - * returning {@code Void.TYPE}. + * returning {@code Void.TYPE}.

                                  * * @param cls the class to convert, may be null * @return the wrapper class for {@code cls} or {@code cls} if @@ -287,13 +295,13 @@ public static Class primitiveToWrapper(final Class cls) { /** *

                                  Converts the specified wrapper class to its corresponding primitive - * class. + * class.

                                  * *

                                  This method is the counter part of {@code primitiveToWrapper()}. * If the passed in class is a wrapper class for a primitive type, this * primitive type will be returned (e.g. {@code Integer.TYPE} for * {@code Integer.class}). For other classes, or if the parameter is - * null, the return value is null. + * null, the return value is null.

                                  * * @param cls the class to convert, may be null * @return the corresponding primitive type if {@code cls} is a @@ -305,11 +313,13 @@ public static Class wrapperToPrimitive(final Class cls) { return wrapperPrimitiveMap.get(cls); } + // ---------------------------------------------------------------------- + /** *

                                  Converts an array of {@code Object} in to an array of {@code Class} objects. - * If any of these objects is null, a null element will be inserted into the array. + * If any of these objects is null, a null element will be inserted into the array.

                                  * - *

                                  This method returns {@code null} for a {@code null} input array. + *

                                  This method returns {@code null} for a {@code null} input array.

                                  * * @param array an {@code Object} array * @return a {@code Class} array, {@code null} if null array input @@ -321,7 +331,7 @@ public static Class[] toClass(final Object... array) { } else if (array.length == 0) { return ArrayUtils.EMPTY_CLASS_ARRAY; } - final Class[] classes = new Class[array.length]; + final Class[] classes = new Class[array.length]; for (int i = 0; i < array.length; i++) { classes[i] = array[i] == null ? null : array[i].getClass(); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/ConstructorUtils.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/ConstructorUtils.java similarity index 86% rename from impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/ConstructorUtils.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/ConstructorUtils.java index 1dc2de83bf..25e11e82e9 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/ConstructorUtils.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/ConstructorUtils.java @@ -23,51 +23,39 @@ import org.ehcache.impl.internal.classes.commonslang.ArrayUtils; import org.ehcache.impl.internal.classes.commonslang.ClassUtils; -import org.ehcache.impl.internal.classes.commonslang.Validate; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Modifier; +import java.util.Objects; /** *

                                  Utility reflection methods focused on constructors, modeled after - * {@link MethodUtils}. + * {@link MethodUtils}.

                                  * *

                                  Known Limitations

                                  Accessing Public Constructors In A Default * Access Superclass

                                  There is an issue when invoking {@code public} constructors * contained in a default access superclass. Reflection correctly locates these * constructors and assigns them as {@code public}. However, an * {@link IllegalAccessException} is thrown if the constructor is - * invoked. + * invoked.

                                  * *

                                  {@link ConstructorUtils} contains a workaround for this situation: it * will attempt to call {@link java.lang.reflect.AccessibleObject#setAccessible(boolean)} on this constructor. If this * call succeeds, then the method can be invoked as normal. This call will only * succeed when the application has sufficient security privileges. If this call - * fails then a warning will be logged and the method may fail. + * fails then a warning will be logged and the method may fail.

                                  * * @since 2.5 */ public class ConstructorUtils { - /** - *

                                  ConstructorUtils instances should NOT be constructed in standard - * programming. Instead, the class should be used as - * {@code ConstructorUtils.invokeConstructor(cls, args)}. - * - *

                                  This constructor is {@code public} to permit tools that require a JavaBean - * instance to operate. - */ - public ConstructorUtils() { - super(); - } - /** *

                                  Returns a new instance of the specified class inferring the right constructor - * from the types of the arguments. + * from the types of the arguments.

                                  * *

                                  This locates and calls a constructor. - * The constructor signature must match the argument types by assignment compatibility. + * The constructor signature must match the argument types by assignment compatibility.

                                  * * @param the type to be constructed * @param cls the class to be constructed, not {@code null} @@ -79,7 +67,7 @@ public ConstructorUtils() { * @throws IllegalAccessException if invocation is not permitted by security * @throws InvocationTargetException if an error occurs on invocation * @throws InstantiationException if an error occurs on instantiation - * @see #invokeConstructor(java.lang.Class, java.lang.Object[], java.lang.Class[]) + * @see #invokeConstructor(Class, Object[], Class[]) */ public static T invokeConstructor(final Class cls, Object... args) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, @@ -91,10 +79,10 @@ public static T invokeConstructor(final Class cls, Object... args) /** *

                                  Returns a new instance of the specified class choosing the right constructor - * from the list of parameter types. + * from the list of parameter types.

                                  * *

                                  This locates and calls a constructor. - * The constructor signature must match the parameter types by assignment compatibility. + * The constructor signature must match the parameter types by assignment compatibility.

                                  * * @param the type to be constructed * @param cls the class to be constructed, not {@code null} @@ -119,37 +107,43 @@ public static T invokeConstructor(final Class cls, Object[] args, Class[] methodParameterTypes = ctor.getParameterTypes(); + args = MethodUtils.getVarArgs(args, methodParameterTypes); + } return ctor.newInstance(args); } + //----------------------------------------------------------------------- + /** - *

                                  Checks if the specified constructor is accessible. + *

                                  Checks if the specified constructor is accessible.

                                  * - *

                                  This simply ensures that the constructor is accessible. + *

                                  This simply ensures that the constructor is accessible.

                                  * * @param the constructor type * @param ctor the prototype constructor object, not {@code null} * @return the constructor, {@code null} if no matching accessible constructor found - * @see java.lang.SecurityManager + * @see SecurityManager * @throws NullPointerException if {@code ctor} is {@code null} */ public static Constructor getAccessibleConstructor(final Constructor ctor) { - Validate.notNull(ctor, "constructor cannot be null"); + Objects.requireNonNull(ctor, "constructor cannot be null"); return MemberUtils.isAccessible(ctor) && isAccessible(ctor.getDeclaringClass()) ? ctor : null; } /** - *

                                  Finds an accessible constructor with compatible parameters. + *

                                  Finds an accessible constructor with compatible parameters.

                                  * *

                                  This checks all the constructor and finds one with compatible parameters * This requires that every parameter is assignable from the given parameter types. - * This is a more flexible search than the normal exact matching algorithm. + * This is a more flexible search than the normal exact matching algorithm.

                                  * *

                                  First it checks if there is a constructor matching the exact signature. * If not then all the constructors of the class are checked to see if their * signatures are assignment-compatible with the parameter types. - * The first assignment-compatible matching constructor is returned. + * The first assignment-compatible matching constructor is returned.

                                  * * @param the constructor type * @param cls the class to find a constructor for, not {@code null} @@ -159,7 +153,7 @@ public static Constructor getAccessibleConstructor(final Constructor c */ public static Constructor getMatchingAccessibleConstructor(final Class cls, final Class... parameterTypes) { - Validate.notNull(cls, "class cannot be null"); + Objects.requireNonNull(cls, "class cannot be null"); // see if we can find the constructor directly // most of the time this works and it's much faster try { @@ -178,14 +172,12 @@ public static Constructor getMatchingAccessibleConstructor(final Class // return best match: for (Constructor ctor : ctors) { // compare parameters - if (ClassUtils.isAssignable(parameterTypes, ctor.getParameterTypes(), true)) { + if (MemberUtils.isMatchingConstructor(ctor, parameterTypes)) { // get accessible version of constructor ctor = getAccessibleConstructor(ctor); if (ctor != null) { MemberUtils.setAccessibleWorkaround(ctor); - if (result == null - || MemberUtils.compareParameterTypes(ctor.getParameterTypes(), result - .getParameterTypes(), parameterTypes) < 0) { + if (result == null || MemberUtils.compareConstructorFit(ctor, result, parameterTypes) < 0) { // temporary variable for annotation, see comment above (1) @SuppressWarnings("unchecked") final diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/MemberUtils.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/MemberUtils.java new file mode 100644 index 0000000000..0849b9d234 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/MemberUtils.java @@ -0,0 +1,292 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * This is a modified version of the original Apache class. It has had unused + * members removed. + */ + +package org.ehcache.impl.internal.classes.commonslang.reflect; + +import org.ehcache.impl.internal.classes.commonslang.ClassUtils; + +import java.lang.reflect.AccessibleObject; +import java.lang.reflect.Constructor; +import java.lang.reflect.Member; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; + +/** + * Contains common code for working with {@link Method Methods}/{@link Constructor Constructors}, + * extracted and refactored from {@link MethodUtils} when it was imported from Commons BeanUtils. + * + * @since 2.5 + */ +abstract class MemberUtils { + // TODO extract an interface to implement compareParameterSets(...)? + + private static final int ACCESS_TEST = Modifier.PUBLIC | Modifier.PROTECTED | Modifier.PRIVATE; + + /** Array of primitive number types ordered by "promotability" */ + private static final Class[] ORDERED_PRIMITIVE_TYPES = { Byte.TYPE, Short.TYPE, + Character.TYPE, Integer.TYPE, Long.TYPE, Float.TYPE, Double.TYPE }; + + /** + * XXX Default access superclass workaround. + * + * When a {@code public} class has a default access superclass with {@code public} members, + * these members are accessible. Calling them from compiled code works fine. + * Unfortunately, on some JVMs, using reflection to invoke these members + * seems to (wrongly) prevent access even when the modifier is {@code public}. + * Calling {@code setAccessible(true)} solves the problem but will only work from + * sufficiently privileged code. Better workarounds would be gratefully + * accepted. + * @param o the AccessibleObject to set as accessible + * @return a boolean indicating whether the accessibility of the object was set to true. + */ + static boolean setAccessibleWorkaround(final AccessibleObject o) { + if (o == null || o.isAccessible()) { + return false; + } + final Member m = (Member) o; + if (!o.isAccessible() && Modifier.isPublic(m.getModifiers()) && isPackageAccess(m.getDeclaringClass().getModifiers())) { + try { + o.setAccessible(true); + return true; + } catch (final SecurityException e) { // NOPMD + // ignore in favor of subsequent IllegalAccessException + } + } + return false; + } + + /** + * Returns whether a given set of modifiers implies package access. + * @param modifiers to test + * @return {@code true} unless {@code package}/{@code protected}/{@code private} modifier detected + */ + static boolean isPackageAccess(final int modifiers) { + return (modifiers & ACCESS_TEST) == 0; + } + + /** + * Returns whether a {@link Member} is accessible. + * @param m Member to check + * @return {@code true} if m is accessible + */ + static boolean isAccessible(final Member m) { + return m != null && Modifier.isPublic(m.getModifiers()) && !m.isSynthetic(); + } + + /** + * Compares the relative fitness of two Constructors in terms of how well they + * match a set of runtime parameter types, such that a list ordered + * by the results of the comparison would return the best match first + * (least). + * + * @param left the "left" Constructor + * @param right the "right" Constructor + * @param actual the runtime parameter types to match against + * {@code left}/{@code right} + * @return int consistent with {@code compare} semantics + * @since 3.5 + */ + static int compareConstructorFit(final Constructor left, final Constructor right, final Class[] actual) { + return compareParameterTypes(Executable.of(left), Executable.of(right), actual); + } + + /** + * Compares the relative fitness of two Executables in terms of how well they + * match a set of runtime parameter types, such that a list ordered + * by the results of the comparison would return the best match first + * (least). + * + * @param left the "left" Executable + * @param right the "right" Executable + * @param actual the runtime parameter types to match against + * {@code left}/{@code right} + * @return int consistent with {@code compare} semantics + */ + private static int compareParameterTypes(final Executable left, final Executable right, final Class[] actual) { + final float leftCost = getTotalTransformationCost(actual, left); + final float rightCost = getTotalTransformationCost(actual, right); + return leftCost < rightCost ? -1 : rightCost < leftCost ? 1 : 0; + } + + /** + * Returns the sum of the object transformation cost for each class in the + * source argument list. + * @param srcArgs The source arguments + * @param executable The executable to calculate transformation costs for + * @return The total transformation cost + */ + private static float getTotalTransformationCost(final Class[] srcArgs, final Executable executable) { + final Class[] destArgs = executable.getParameterTypes(); + final boolean isVarArgs = executable.isVarArgs(); + + // "source" and "destination" are the actual and declared args respectively. + float totalCost = 0.0f; + final long normalArgsLen = isVarArgs ? destArgs.length-1 : destArgs.length; + if (srcArgs.length < normalArgsLen) { + return Float.MAX_VALUE; + } + for (int i = 0; i < normalArgsLen; i++) { + totalCost += getObjectTransformationCost(srcArgs[i], destArgs[i]); + } + if (isVarArgs) { + // When isVarArgs is true, srcArgs and dstArgs may differ in length. + // There are two special cases to consider: + final boolean noVarArgsPassed = srcArgs.length < destArgs.length; + final boolean explicitArrayForVarags = srcArgs.length == destArgs.length && srcArgs[srcArgs.length-1].isArray(); + + final float varArgsCost = 0.001f; + final Class destClass = destArgs[destArgs.length-1].getComponentType(); + if (noVarArgsPassed) { + // When no varargs passed, the best match is the most generic matching type, not the most specific. + totalCost += getObjectTransformationCost(destClass, Object.class) + varArgsCost; + } else if (explicitArrayForVarags) { + final Class sourceClass = srcArgs[srcArgs.length-1].getComponentType(); + totalCost += getObjectTransformationCost(sourceClass, destClass) + varArgsCost; + } else { + // This is typical varargs case. + for (int i = destArgs.length-1; i < srcArgs.length; i++) { + final Class srcClass = srcArgs[i]; + totalCost += getObjectTransformationCost(srcClass, destClass) + varArgsCost; + } + } + } + return totalCost; + } + + /** + * Gets the number of steps required needed to turn the source class into + * the destination class. This represents the number of steps in the object + * hierarchy graph. + * @param srcClass The source class + * @param destClass The destination class + * @return The cost of transforming an object + */ + private static float getObjectTransformationCost(Class srcClass, final Class destClass) { + if (destClass.isPrimitive()) { + return getPrimitivePromotionCost(srcClass, destClass); + } + float cost = 0.0f; + while (srcClass != null && !destClass.equals(srcClass)) { + if (destClass.isInterface() && ClassUtils.isAssignable(srcClass, destClass)) { + // slight penalty for interface match. + // we still want an exact match to override an interface match, + // but + // an interface match should override anything where we have to + // get a superclass. + cost += 0.25f; + break; + } + cost++; + srcClass = srcClass.getSuperclass(); + } + /* + * If the destination class is null, we've traveled all the way up to + * an Object match. We'll penalize this by adding 1.5 to the cost. + */ + if (srcClass == null) { + cost += 1.5f; + } + return cost; + } + + /** + * Gets the number of steps required to promote a primitive number to another + * type. + * @param srcClass the (primitive) source class + * @param destClass the (primitive) destination class + * @return The cost of promoting the primitive + */ + private static float getPrimitivePromotionCost(final Class srcClass, final Class destClass) { + float cost = 0.0f; + Class cls = srcClass; + if (!cls.isPrimitive()) { + // slight unwrapping penalty + cost += 0.1f; + cls = ClassUtils.wrapperToPrimitive(cls); + } + for (int i = 0; cls != destClass && i < ORDERED_PRIMITIVE_TYPES.length; i++) { + if (cls == ORDERED_PRIMITIVE_TYPES[i]) { + cost += 0.1f; + if (i < ORDERED_PRIMITIVE_TYPES.length - 1) { + cls = ORDERED_PRIMITIVE_TYPES[i + 1]; + } + } + } + return cost; + } + + static boolean isMatchingConstructor(final Constructor method, final Class[] parameterTypes) { + return MemberUtils.isMatchingExecutable(Executable.of(method), parameterTypes); + } + + private static boolean isMatchingExecutable(final Executable method, final Class[] parameterTypes) { + final Class[] methodParameterTypes = method.getParameterTypes(); + if (ClassUtils.isAssignable(parameterTypes, methodParameterTypes, true)) { + return true; + } + + if (method.isVarArgs()) { + int i; + for (i = 0; i < methodParameterTypes.length - 1 && i < parameterTypes.length; i++) { + if (!ClassUtils.isAssignable(parameterTypes[i], methodParameterTypes[i], true)) { + return false; + } + } + final Class varArgParameterType = methodParameterTypes[methodParameterTypes.length - 1].getComponentType(); + for (; i < parameterTypes.length; i++) { + if (!ClassUtils.isAssignable(parameterTypes[i], varArgParameterType, true)) { + return false; + } + } + return true; + } + + return false; + } + + /** + *

                                  A class providing a subset of the API of java.lang.reflect.Executable in Java 1.8, + * providing a common representation for function signatures for Constructors and Methods.

                                  + */ + private static final class Executable { + private final Class[] parameterTypes; + private final boolean isVarArgs; + + private static Executable of(final Constructor constructor) { + return new Executable(constructor); + } + + private Executable(final Constructor constructor) { + parameterTypes = constructor.getParameterTypes(); + isVarArgs = constructor.isVarArgs(); + } + + public Class[] getParameterTypes() { + return parameterTypes; + } + + public boolean isVarArgs() { + return isVarArgs; + } + } + +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/MethodUtils.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/MethodUtils.java new file mode 100644 index 0000000000..7ebe2cd8b1 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/MethodUtils.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * This is a modified version of the original Apache class. It has had unused + * members removed. + */ +package org.ehcache.impl.internal.classes.commonslang.reflect; + +import org.ehcache.impl.internal.classes.commonslang.ArrayUtils; +import org.ehcache.impl.internal.classes.commonslang.ClassUtils; + +import java.lang.reflect.Array; +import java.lang.reflect.Method; + +/** + *

                                  Utility reflection methods focused on {@link Method}s, originally from Commons BeanUtils. + * Differences from the BeanUtils version may be noted, especially where similar functionality + * already existed within Lang. + *

                                  + * + *

                                  Known Limitations

                                  + *

                                  Accessing Public Methods In A Default Access Superclass

                                  + *

                                  There is an issue when invoking {@code public} methods contained in a default access superclass on JREs prior to 1.4. + * Reflection locates these methods fine and correctly assigns them as {@code public}. + * However, an {@link IllegalAccessException} is thrown if the method is invoked.

                                  + * + *

                                  {@link MethodUtils} contains a workaround for this situation. + * It will attempt to call {@link java.lang.reflect.AccessibleObject#setAccessible(boolean)} on this method. + * If this call succeeds, then the method can be invoked as normal. + * This call will only succeed when the application has sufficient security privileges. + * If this call fails then the method may fail.

                                  + * + * @since 2.5 + */ +public class MethodUtils { + + /** + *

                                  Given an arguments array passed to a varargs method, return an array of arguments in the canonical form, + * i.e. an array with the declared number of parameters, and whose last parameter is an array of the varargs type. + *

                                  + * + * @param args the array of arguments passed to the varags method + * @param methodParameterTypes the declared array of method parameter types + * @return an array of the variadic arguments passed to the method + * @since 3.5 + */ + static Object[] getVarArgs(final Object[] args, final Class[] methodParameterTypes) { + if (args.length == methodParameterTypes.length + && args[args.length - 1].getClass().equals(methodParameterTypes[methodParameterTypes.length - 1])) { + // The args array is already in the canonical form for the method. + return args; + } + + // Construct a new array matching the method's declared parameter types. + final Object[] newArgs = new Object[methodParameterTypes.length]; + + // Copy the normal (non-varargs) parameters + System.arraycopy(args, 0, newArgs, 0, methodParameterTypes.length - 1); + + // Construct a new array for the variadic parameters + final Class varArgComponentType = methodParameterTypes[methodParameterTypes.length - 1].getComponentType(); + final int varArgLength = args.length - methodParameterTypes.length + 1; + + Object varArgsArray = Array.newInstance(ClassUtils.primitiveToWrapper(varArgComponentType), varArgLength); + // Copy the variadic arguments into the varargs array. + System.arraycopy(args, methodParameterTypes.length - 1, varArgsArray, 0, varArgLength); + + if(varArgComponentType.isPrimitive()) { + // unbox from wrapper type to primitive type + varArgsArray = ArrayUtils.toPrimitive(varArgsArray); + } + + // Store the varargs array in the last position of the array to return + newArgs[methodParameterTypes.length - 1] = varArgsArray; + + // Return the canonical varargs array. + return newArgs; + } + +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java similarity index 81% rename from impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java index 8e73521cc7..f2bc22432d 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java @@ -16,7 +16,6 @@ package org.ehcache.impl.internal.events; -import org.ehcache.ValueSupplier; import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.core.events.StoreEventSink; import org.ehcache.core.spi.store.events.StoreEventFilter; @@ -26,11 +25,12 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.LinkedBlockingQueue; +import java.util.function.Supplier; /** * AbstractStoreEventDispatcher */ -abstract class AbstractStoreEventDispatcher implements StoreEventDispatcher { +public abstract class AbstractStoreEventDispatcher implements StoreEventDispatcher { protected static final StoreEventSink NO_OP_EVENT_SINK = new CloseableStoreEventSink() { @Override @@ -49,17 +49,17 @@ public void reset() { } @Override - public void removed(Object key, ValueSupplier value) { + public void removed(Object key, Supplier value) { // Do nothing } @Override - public void updated(Object key, ValueSupplier oldValue, Object newValue) { + public void updated(Object key, Supplier oldValue, Object newValue) { // Do nothing } @Override - public void expired(Object key, ValueSupplier value) { + public void expired(Object key, Supplier value) { // Do nothing } @@ -69,7 +69,7 @@ public void created(Object key, Object value) { } @Override - public void evicted(Object key, ValueSupplier value) { + public void evicted(Object key, Supplier value) { // Do nothing } }; @@ -84,7 +84,8 @@ protected AbstractStoreEventDispatcher(int dispatcherConcurrency) { throw new IllegalArgumentException("Dispatcher concurrency must be an integer greater than 0"); } @SuppressWarnings("unchecked") - LinkedBlockingQueue>[] queues = new LinkedBlockingQueue[dispatcherConcurrency]; + LinkedBlockingQueue>[] queues = (LinkedBlockingQueue>[]) + new LinkedBlockingQueue[dispatcherConcurrency]; orderedQueues = queues; for (int i = 0; i < orderedQueues.length; i++) { orderedQueues[i] = new LinkedBlockingQueue<>(10000); @@ -123,6 +124,11 @@ public void setEventOrdering(boolean ordering) { this.ordered = ordering; } + @Override + public void setSynchronous(boolean synchronous) throws IllegalArgumentException { + //dispatcher is synchronous by default + } + @Override public boolean isEventOrdering() { return ordered; @@ -142,4 +148,9 @@ public void releaseEventSinkAfterFailure(StoreEventSink eventSink, Throwab public void reset(StoreEventSink eventSink) { ((CloseableStoreEventSink) eventSink).reset(); } + + @Override + public StoreEventSink eventSink() { + return new InvocationScopedEventSink<>(getFilters(), isEventOrdering(), getOrderedQueues(), getListeners()); + } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImpl.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImpl.java similarity index 97% rename from impl/src/main/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImpl.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImpl.java index d4b92c7f54..166d1c8f07 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImpl.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImpl.java @@ -61,7 +61,7 @@ public void stop() { } @Override - public CacheEventDispatcher createCacheEventDispatcher(Store store, ServiceConfiguration... serviceConfigs) { + public CacheEventDispatcher createCacheEventDispatcher(Store store, ServiceConfiguration... serviceConfigs) { String threadPoolAlias = defaultThreadPoolAlias; DefaultCacheEventDispatcherConfiguration config = findSingletonAmongst(DefaultCacheEventDispatcherConfiguration.class, (Object[]) serviceConfigs); if (config != null) { diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java similarity index 93% rename from impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java index acb05eae07..5c16d68427 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java @@ -19,11 +19,13 @@ import org.ehcache.core.events.CacheEventDispatcherFactory; import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.core.spi.service.ServiceFactory; +import org.osgi.service.component.annotations.Component; +@Component public class CacheEventNotificationListenerServiceProviderFactory implements ServiceFactory { @Override - public CacheEventDispatcherFactory create(ServiceCreationConfiguration configuration) { + public CacheEventDispatcherFactory create(ServiceCreationConfiguration configuration) { if (configuration == null) { return new CacheEventDispatcherFactoryImpl(); } else if (configuration instanceof CacheEventDispatcherFactoryConfiguration) { diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/CloseableStoreEventSink.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/CloseableStoreEventSink.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/events/CloseableStoreEventSink.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/events/CloseableStoreEventSink.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/DisabledCacheEventNotificationService.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/DisabledCacheEventNotificationService.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/events/DisabledCacheEventNotificationService.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/events/DisabledCacheEventNotificationService.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/FireableStoreEventHolder.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/FireableStoreEventHolder.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/events/FireableStoreEventHolder.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/events/FireableStoreEventHolder.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSink.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSink.java similarity index 97% rename from impl/src/main/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSink.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSink.java index 430a52ef5e..5079c5ce22 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSink.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSink.java @@ -16,7 +16,6 @@ package org.ehcache.impl.internal.events; -import org.ehcache.ValueSupplier; import org.ehcache.event.EventType; import org.ehcache.core.spi.store.events.StoreEventFilter; import org.ehcache.core.spi.store.events.StoreEventListener; @@ -24,6 +23,7 @@ import java.util.Iterator; import java.util.Set; import java.util.concurrent.BlockingQueue; +import java.util.function.Supplier; /** * This class is responsible for handling the event fudging that needs to happen @@ -45,7 +45,7 @@ class FudgingInvocationScopedEventSink extends InvocationScopedEventSink value) { + public void evicted(K key, Supplier value) { V eventFudgingValue = handleEvictionPostWriteOnSameKey(key); super.evicted(key, value); if (eventFudgingValue != null) { diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/InvocationScopedEventSink.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/InvocationScopedEventSink.java similarity index 94% rename from impl/src/main/java/org/ehcache/impl/internal/events/InvocationScopedEventSink.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/events/InvocationScopedEventSink.java index 66f743ccfa..21a961e38a 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/InvocationScopedEventSink.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/InvocationScopedEventSink.java @@ -16,7 +16,6 @@ package org.ehcache.impl.internal.events; -import org.ehcache.ValueSupplier; import org.ehcache.event.EventType; import org.ehcache.core.spi.store.events.StoreEventFilter; import org.ehcache.core.spi.store.events.StoreEventListener; @@ -26,6 +25,7 @@ import java.util.Iterator; import java.util.Set; import java.util.concurrent.BlockingQueue; +import java.util.function.Supplier; import static org.ehcache.impl.internal.events.StoreEvents.createEvent; import static org.ehcache.impl.internal.events.StoreEvents.evictEvent; @@ -54,24 +54,24 @@ class InvocationScopedEventSink implements CloseableStoreEventSink { } @Override - public void removed(K key, ValueSupplier value) { - V removedValue = value.value(); + public void removed(K key, Supplier value) { + V removedValue = value.get(); if (acceptEvent(EventType.REMOVED, key, removedValue, null)) { handleEvent(key, new FireableStoreEventHolder<>(removeEvent(key, removedValue))); } } @Override - public void updated(K key, ValueSupplier oldValue, V newValue) { - V oldValueValue = oldValue.value(); + public void updated(K key, Supplier oldValue, V newValue) { + V oldValueValue = oldValue.get(); if (acceptEvent(EventType.UPDATED, key, oldValueValue, newValue)) { handleEvent(key, new FireableStoreEventHolder<>(updateEvent(key, oldValueValue, newValue))); } } @Override - public void expired(K key, ValueSupplier value) { - V expired = value.value(); + public void expired(K key, Supplier value) { + V expired = value.get(); if (acceptEvent(EventType.EXPIRED, key, expired, null)) { handleEvent(key, new FireableStoreEventHolder<>(expireEvent(key, expired))); } @@ -85,8 +85,8 @@ public void created(K key, V value) { } @Override - public void evicted(K key, ValueSupplier value) { - V evicted = value.value(); + public void evicted(K key, Supplier value) { + V evicted = value.get(); if (acceptEvent(EventType.EVICTED, key, evicted, null)) { handleEvent(key, new FireableStoreEventHolder<>(evictEvent(key, evicted))); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/StoreEventImpl.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/StoreEventImpl.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/events/StoreEventImpl.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/events/StoreEventImpl.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/StoreEvents.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/StoreEvents.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/events/StoreEvents.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/events/StoreEvents.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/ThreadLocalStoreEventDispatcher.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/events/ThreadLocalStoreEventDispatcher.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/events/ThreadLocalStoreEventDispatcher.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/events/ThreadLocalStoreEventDispatcher.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/executor/DefaultExecutionServiceFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/DefaultExecutionServiceFactory.java similarity index 81% rename from impl/src/main/java/org/ehcache/impl/internal/executor/DefaultExecutionServiceFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/DefaultExecutionServiceFactory.java index c1423cd0a3..a1eaa43e2f 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/executor/DefaultExecutionServiceFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/DefaultExecutionServiceFactory.java @@ -15,19 +15,21 @@ */ package org.ehcache.impl.internal.executor; +import org.ehcache.core.spi.service.ExecutionService; import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration; -import org.ehcache.core.spi.service.ExecutionService; import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; /** * * @author cdennis */ +@Component public class DefaultExecutionServiceFactory implements ServiceFactory { @Override - public ExecutionService create(ServiceCreationConfiguration configuration) { + public ExecutionService create(ServiceCreationConfiguration configuration) { if (configuration == null) { return new OnDemandExecutionService(); } else if (configuration instanceof PooledExecutionServiceConfiguration) { @@ -40,7 +42,12 @@ public ExecutionService create(ServiceCreationConfiguration co } @Override - public Class getServiceType() { + public Class getServiceType() { + /* + * XXX : There isn't a unique concrete type returned by this factory + * Currently this isn't a problem since neither of the concrete types + * returned have service depencies. + */ return ExecutionService.class; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/executor/ExecutorUtil.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/ExecutorUtil.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/executor/ExecutorUtil.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/ExecutorUtil.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/executor/OnDemandExecutionService.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/OnDemandExecutionService.java similarity index 98% rename from impl/src/main/java/org/ehcache/impl/internal/executor/OnDemandExecutionService.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/OnDemandExecutionService.java index 2ce6f6db9f..256793b903 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/executor/OnDemandExecutionService.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/OnDemandExecutionService.java @@ -20,7 +20,7 @@ import java.util.concurrent.Executors; import static java.util.concurrent.Executors.unconfigurableExecutorService; import static java.util.concurrent.Executors.unconfigurableScheduledExecutorService; -import java.util.concurrent.RejectedExecutionHandler; + import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; diff --git a/impl/src/main/java/org/ehcache/impl/internal/executor/OutOfBandScheduledExecutor.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/OutOfBandScheduledExecutor.java similarity index 97% rename from impl/src/main/java/org/ehcache/impl/internal/executor/OutOfBandScheduledExecutor.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/OutOfBandScheduledExecutor.java index 37436b1fec..79f9f4e7cb 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/executor/OutOfBandScheduledExecutor.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/OutOfBandScheduledExecutor.java @@ -20,13 +20,10 @@ import java.util.concurrent.Delayed; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; import java.util.concurrent.Future; -import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.RunnableScheduledFuture; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; -import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; diff --git a/impl/src/main/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutor.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutor.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutor.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutor.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/executor/PartitionedScheduledExecutor.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/PartitionedScheduledExecutor.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/executor/PartitionedScheduledExecutor.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/PartitionedScheduledExecutor.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/executor/PartitionedUnorderedExecutor.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/PartitionedUnorderedExecutor.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/executor/PartitionedUnorderedExecutor.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/PartitionedUnorderedExecutor.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/executor/PooledExecutionService.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/PooledExecutionService.java similarity index 99% rename from impl/src/main/java/org/ehcache/impl/internal/executor/PooledExecutionService.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/PooledExecutionService.java index a24b5fa551..a620ca641d 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/executor/PooledExecutionService.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/executor/PooledExecutionService.java @@ -150,7 +150,7 @@ public boolean isStopping() { /** * {@link #stop} has been called and has managed to finish processing all tasks. * - * @return + * @return if this executor has been stopped */ public boolean isStopped() { return scheduledExecutor.isTerminated(); diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehind.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehind.java new file mode 100644 index 0000000000..38650185b7 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehind.java @@ -0,0 +1,51 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.loaderwriter.writebehind; + +import org.ehcache.spi.loaderwriter.CacheWritingException; +import org.ehcache.impl.internal.loaderwriter.writebehind.operations.DeleteOperation; +import org.ehcache.impl.internal.loaderwriter.writebehind.operations.SingleOperation; +import org.ehcache.impl.internal.loaderwriter.writebehind.operations.WriteOperation; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; + +abstract class AbstractWriteBehind implements WriteBehind { + + private final CacheLoaderWriter cacheLoaderWriter; + + public AbstractWriteBehind(CacheLoaderWriter cacheLoaderWriter) { + this.cacheLoaderWriter = cacheLoaderWriter; + } + + @Override + public V load(K key) throws Exception { + SingleOperation operation = getOperation(key); + return operation == null ? cacheLoaderWriter.load(key) : (operation.getClass() == WriteOperation.class ? ((WriteOperation) operation).getValue() : null); + } + + @Override + public void write(K key, V value) throws CacheWritingException { + addOperation(new WriteOperation<>(key, value)); + } + + @Override + public void delete(K key) throws CacheWritingException { + addOperation(new DeleteOperation<>(key)); + } + + protected abstract SingleOperation getOperation(K key); + + protected abstract void addOperation(final SingleOperation operation); +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java similarity index 99% rename from impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java index 78ebb88879..4a2dd7b85a 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java @@ -73,7 +73,7 @@ public class BatchingLocalHeapWriteBehindQueue extends AbstractWriteBehind private volatile Batch openBatch; - public BatchingLocalHeapWriteBehindQueue(ExecutionService executionService, String defaultThreadPool, WriteBehindConfiguration config, CacheLoaderWriter cacheLoaderWriter) { + public BatchingLocalHeapWriteBehindQueue(ExecutionService executionService, String defaultThreadPool, WriteBehindConfiguration config, CacheLoaderWriter cacheLoaderWriter) { super(cacheLoaderWriter); this.cacheLoaderWriter = cacheLoaderWriter; BatchingConfiguration batchingConfig = config.getBatchingConfiguration(); diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java similarity index 98% rename from impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java index 5cabc589cf..2fcc075a32 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java @@ -44,7 +44,7 @@ public class NonBatchingLocalHeapWriteBehindQueue extends AbstractWriteBeh private final BlockingQueue executorQueue; private final ExecutorService executor; - public NonBatchingLocalHeapWriteBehindQueue(ExecutionService executionService, String defaultThreadPool, WriteBehindConfiguration config, CacheLoaderWriter cacheLoaderWriter) { + public NonBatchingLocalHeapWriteBehindQueue(ExecutionService executionService, String defaultThreadPool, WriteBehindConfiguration config, CacheLoaderWriter cacheLoaderWriter) { super(cacheLoaderWriter); this.cacheLoaderWriter = cacheLoaderWriter; this.executorQueue = new LinkedBlockingQueue<>(config.getMaxQueueSize()); @@ -57,6 +57,7 @@ public NonBatchingLocalHeapWriteBehindQueue(ExecutionService executionService, S @Override protected SingleOperation getOperation(K key) { + return latest.get(key); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/StripedWriteBehind.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/StripedWriteBehind.java similarity index 78% rename from impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/StripedWriteBehind.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/StripedWriteBehind.java index c56aac4f0a..af9a99c1f6 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/StripedWriteBehind.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/StripedWriteBehind.java @@ -16,12 +16,8 @@ package org.ehcache.impl.internal.loaderwriter.writebehind; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; -import java.util.Map.Entry; import java.util.concurrent.locks.ReentrantReadWriteLock; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; @@ -39,7 +35,7 @@ public class StripedWriteBehind implements WriteBehind { private final List> stripes = new ArrayList<>(); - public StripedWriteBehind(ExecutionService executionService, String defaultThreadPool, WriteBehindConfiguration config, CacheLoaderWriter cacheLoaderWriter) { + public StripedWriteBehind(ExecutionService executionService, String defaultThreadPool, WriteBehindConfiguration config, CacheLoaderWriter cacheLoaderWriter) { int writeBehindConcurrency = config.getConcurrency(); for (int i = 0; i < writeBehindConcurrency; i++) { if (config.getBatchingConfiguration() == null) { @@ -68,7 +64,7 @@ public void start() { @Override public V load(K key) throws Exception { - V v = null; + V v; readLock.lock(); try { v = getStripe(key).load(key); @@ -78,15 +74,6 @@ public V load(K key) throws Exception { return v; } - @Override - public Map loadAll(Iterable keys) throws Exception { - Map entries = new HashMap<>(); - for (K k : keys) { - entries.put(k, load(k)) ; - } - return entries; - } - @Override public void write(K key, V value) throws Exception { readLock.lock(); @@ -97,13 +84,6 @@ public void write(K key, V value) throws Exception { } } - @Override - public void writeAll(Iterable> entries) throws BulkCacheWritingException, Exception { - for (Entry entry : entries) { - write(entry.getKey(), entry.getValue()); - } - } - @Override public void delete(K key) throws Exception { readLock.lock(); @@ -114,13 +94,6 @@ public void delete(K key) throws Exception { } } - @Override - public void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception { - for (K k : keys) { - delete(k); - } - } - @Override public void stop() { writeLock.lock(); diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehind.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehind.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehind.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehind.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindProviderFactory.java similarity index 92% rename from impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindProviderFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindProviderFactory.java index 1328dd60b2..f70df30c7d 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindProviderFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindProviderFactory.java @@ -15,25 +15,27 @@ */ package org.ehcache.impl.internal.loaderwriter.writebehind; +import org.ehcache.core.spi.service.ExecutionService; +import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.impl.config.loaderwriter.writebehind.WriteBehindProviderConfiguration; -import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; import org.ehcache.spi.loaderwriter.WriteBehindProvider; -import org.ehcache.core.spi.service.ExecutionService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.spi.service.ServiceDependencies; -import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceProvider; +import org.osgi.service.component.annotations.Component; /** * @author Abhilash * */ +@Component public class WriteBehindProviderFactory implements ServiceFactory { @Override - public WriteBehindProvider create(ServiceCreationConfiguration configuration) { + public WriteBehindProvider create(ServiceCreationConfiguration configuration) { if (configuration == null) { return new Provider(); } else if (configuration instanceof WriteBehindProviderConfiguration) { @@ -69,7 +71,7 @@ public void start(ServiceProvider serviceProvider) { } @Override - public WriteBehind createWriteBehindLoaderWriter(CacheLoaderWriter cacheLoaderWriter, WriteBehindConfiguration configuration) { + public WriteBehind createWriteBehindLoaderWriter(CacheLoaderWriter cacheLoaderWriter, WriteBehindConfiguration configuration) { if (cacheLoaderWriter == null) { throw new NullPointerException("WriteBehind requires a non null CacheLoaderWriter."); } @@ -85,8 +87,8 @@ public void releaseWriteBehindLoaderWriter(CacheLoaderWriter cacheLoaderWr } @Override - public Class getServiceType() { - return WriteBehindProvider.class; + public Class getServiceType() { + return Provider.class; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/BatchOperation.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/BatchOperation.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/BatchOperation.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/BatchOperation.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteAllOperation.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteAllOperation.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteAllOperation.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteAllOperation.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteOperation.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteOperation.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteOperation.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteOperation.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/KeyBasedOperation.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/KeyBasedOperation.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/KeyBasedOperation.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/KeyBasedOperation.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/SingleOperation.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/SingleOperation.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/SingleOperation.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/SingleOperation.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteAllOperation.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteAllOperation.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteAllOperation.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteAllOperation.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteOperation.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteOperation.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteOperation.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteOperation.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java similarity index 84% rename from impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java index 46a6c24d4f..bc142ee477 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java @@ -16,20 +16,21 @@ package org.ehcache.impl.internal.persistence; -import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.impl.persistence.DefaultDiskResourceService; import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; +@Component public class DefaultDiskResourceServiceFactory implements ServiceFactory { @Override - public DefaultDiskResourceService create(final ServiceCreationConfiguration serviceConfiguration) { + public DefaultDiskResourceService create(final ServiceCreationConfiguration serviceConfiguration) { return new DefaultDiskResourceService(); } @Override - public Class getServiceType() { + public Class getServiceType() { return DefaultDiskResourceService.class; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java similarity index 83% rename from impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java index 55ae580bc2..dfc91f30b3 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java @@ -16,25 +16,27 @@ package org.ehcache.impl.internal.persistence; -import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.core.spi.service.LocalPersistenceService; +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.impl.persistence.DefaultLocalPersistenceService; import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.core.spi.service.ServiceFactory; +import org.osgi.service.component.annotations.Component; /** * @author Alex Snaps */ +@Component @ServiceFactory.RequiresConfiguration public class DefaultLocalPersistenceServiceFactory implements ServiceFactory { @Override - public DefaultLocalPersistenceService create(final ServiceCreationConfiguration serviceConfiguration) { + public DefaultLocalPersistenceService create(final ServiceCreationConfiguration serviceConfiguration) { return new DefaultLocalPersistenceService((DefaultPersistenceConfiguration) serviceConfiguration); } @Override - public Class getServiceType() { - return LocalPersistenceService.class; + public Class getServiceType() { + return DefaultLocalPersistenceService.class; } } diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/resilience/AbstractResilienceStrategy.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/resilience/AbstractResilienceStrategy.java new file mode 100644 index 0000000000..614e98fe71 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/resilience/AbstractResilienceStrategy.java @@ -0,0 +1,193 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.resilience; + +import org.ehcache.Cache; +import org.ehcache.CacheIterationException; +import org.ehcache.impl.internal.util.Pacer; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.spi.resilience.RecoveryStore; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.StoreAccessException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Base class that can be used for resilience strategies. It provides helper methods for the resilience strategy to use + * for cleanup in case or error. It then notifies if the cache was able to recover from the error or is now in an + * inconsistent state. By default, the notification is to log the error. + */ +public abstract class AbstractResilienceStrategy implements ResilienceStrategy { + + private final Logger LOGGER = LoggerFactory.getLogger(getClass()); + + private final RecoveryStore store; + + /** + * Used to prevent logging the same error continuously. Instead, we will log in error every 30 seconds. + */ + private final Pacer pacer = new Pacer(SystemTimeSource.INSTANCE, 30_000); + + /** + * Unique constructor. It takes a {@link RecoveryStore} that will be used for cleanup operations. + * + * @param store store to clean + */ + protected AbstractResilienceStrategy(RecoveryStore store) { + this.store = store; + } + + /** + * {@inheritDoc} + */ + @Override + public Cache.Entry iteratorFailure(StoreAccessException e) { + LOGGER.error("Ehcache iterator terminated early due to exception", e); + throw new CacheIterationException(e); + } + + /** + * Clear all entries from the store. + * + * @param from original failure causing the cleanup + */ + protected void cleanup(StoreAccessException from) { + try { + store.obliterate(); + } catch (StoreAccessException e) { + inconsistent(from, e); + return; + } + recovered(from); + } + + /** + * Clean all keys from the store. + * + * @param keys keys to clean + * @param from original failure causing the cleanup + */ + protected void cleanup(Iterable keys, StoreAccessException from) { + try { + store.obliterate(keys); + } catch (StoreAccessException e) { + inconsistent(keys, from, e); + return; + } + recovered(keys, from); + } + + /** + * Clean the key from the store. + * + * @param key key to clean + * @param from original failure causing the cleanup + */ + protected void cleanup(K key, StoreAccessException from) { + try { + store.obliterate(key); + } catch (StoreAccessException e) { + inconsistent(key, from, e); + return; + } + recovered(key, from); + } + + /** + * Called when the cache recovered from a failing store operation on the key. + * + * @param key key that failed + * @param from exception thrown by the failing operation + */ + protected void recovered(K key, StoreAccessException from) { + LOGGER.info("Ehcache key {} recovered from", key, from); + } + + /** + * Called when the cache recovered from a failing store operation on a list of keys. + * + * @param keys keys that failed + * @param from exception thrown by the failing operation + */ + protected void recovered(Iterable keys, StoreAccessException from) { + LOGGER.info("Ehcache keys {} recovered from", keys, from); + } + + /** + * Called when the cache recovered from a failing store global operation (no specific key involved e.g. + * {@code clear()}. + * + * @param from exception thrown by the failing operation + */ + protected void recovered(StoreAccessException from) { + LOGGER.info("Ehcache recovered from", from); + } + + /** + * Called when the cache failed to recover from a failing store operation on a key. + * + * @param key key now inconsistent + * @param because exception thrown by the failing operation + * @param cleanup all the exceptions that occurred during cleanup + */ + protected void inconsistent(K key, StoreAccessException because, StoreAccessException... cleanup) { + pacedErrorLog("Ehcache key {} in possible inconsistent state", key, because); + } + + /** + * Called when the cache failed to recover from a failing store operation on a list of keys. + * + * @param keys + * @param because exception thrown by the failing operation + * @param cleanup all the exceptions that occurred during cleanup + */ + protected void inconsistent(Iterable keys, StoreAccessException because, StoreAccessException... cleanup) { + pacedErrorLog("Ehcache keys {} in possible inconsistent state", keys, because); + } + + /** + * Called when the cache failed to recover from a failing store global operation (no specific key involved e.g. + * {@code clear()}. + * + * @param because exception thrown by the failing operation + * @param cleanup all the exceptions that occurred during cleanup + */ + protected void inconsistent(StoreAccessException because, StoreAccessException... cleanup) { + pacedErrorLog("Ehcache in possible inconsistent state", because); + } + + /** + * Log messages in error at worst every 30 seconds. Log everything at debug level. + * + * @param message message to log + * @param e exception to log + */ + protected void pacedErrorLog(String message, StoreAccessException e) { + pacer.pacedCall(() -> LOGGER.error(message + " - Similar messages will be suppressed for 30 seconds", e), () -> LOGGER.debug(message, e)); + } + + /** + * Log messages in error at worst every 30 seconds. Log everything at debug level. + * + * @param message message to log + * @param arg1 first log param + * @param arg2 second log param + */ + protected void pacedErrorLog(String message, Object arg1, Object arg2) { + pacer.pacedCall(() -> LOGGER.error(message + " - Similar messages will be suppressed for 30 seconds", arg1, arg2), () -> LOGGER.debug(message, arg1, arg2)); + } + +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/resilience/RobustLoaderWriterResilienceStrategy.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/resilience/RobustLoaderWriterResilienceStrategy.java new file mode 100644 index 0000000000..7f018f7d82 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/resilience/RobustLoaderWriterResilienceStrategy.java @@ -0,0 +1,322 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.resilience; + +import org.ehcache.core.exceptions.ExceptionFactory; +import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; +import org.ehcache.spi.loaderwriter.BulkCacheWritingException; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.RecoveryStore; +import org.ehcache.spi.resilience.StoreAccessException; + +import java.util.Map; +import java.util.Objects; + +/** + * Default resilience strategy used by a {@link org.ehcache.Cache} with a {@link CacheLoaderWriter} specified. It will + * behaves in two ways: + *
                                    + *
                                  • Keep the loader-writer in sync. E.g. a put will write to it
                                  • + *
                                  • Answer by retrieving the value from the loader-writer
                                  • + *
                                      + * Note: This behavior is the most accurate one but will add load to the loader-writer backend. + *

                                      + * It also tries to cleanup any corrupted key. + */ +public class RobustLoaderWriterResilienceStrategy extends AbstractResilienceStrategy { + + private final CacheLoaderWriter loaderWriter; + + public RobustLoaderWriterResilienceStrategy(RecoveryStore store, CacheLoaderWriter loaderWriter) { + super(store); + this.loaderWriter = Objects.requireNonNull(loaderWriter); + } + + /** + * Get the value from the loader-writer. + * + * @param key the key being retrieved + * @param e the triggered failure + * @return value as loaded from the loader-writer + */ + @Override + public V getFailure(K key, StoreAccessException e) { + try { + return loaderWriter.load(key); + } catch (Exception e1) { + throw ExceptionFactory.newCacheLoadingException(e1, e); + } finally { + cleanup(key, e); + } + } + + /** + * Return false. It doesn't matter if the key is present in the backend, we consider it's not in the cache. + * + * @param key the key being queried + * @param e the triggered failure + * @return false + */ + @Override + public boolean containsKeyFailure(K key, StoreAccessException e) { + cleanup(key, e); + return false; + } + + /** + * Write the value to the loader-write. + * + * @param key the key being put + * @param value the value being put + * @param e the triggered failure + */ + @Override + public void putFailure(K key, V value, StoreAccessException e) { + try { + loaderWriter.write(key, value); + } catch (Exception e1) { + throw ExceptionFactory.newCacheWritingException(e1, e); + } finally { + cleanup(key, e); + } + } + + /** + * Delete the key from the loader-writer. + * + * @param key the key being removed + * @param e the triggered failure + */ + @Override + public void removeFailure(K key, StoreAccessException e) { + try { + loaderWriter.delete(key); + } catch(Exception e1) { + throw ExceptionFactory.newCacheWritingException(e1, e); + } finally { + cleanup(key, e); + } + } + + /** + * Do nothing. + * + * @param e the triggered failure + */ + @Override + public void clearFailure(StoreAccessException e) { + cleanup(e); + } + + /** + * Write the value to the loader-writer if it doesn't already exist in it. Note that the load and write pair + * is not atomic. This atomicity, if needed, should be handled by the something else. + * + * @param key the key being put + * @param value the value being put + * @param e the triggered failure + * @return the existing value or null if the new was set + */ + @Override + public V putIfAbsentFailure(K key, V value, StoreAccessException e) { + // FIXME: Should I care about useLoaderInAtomics? + try { + try { + V loaded = loaderWriter.load(key); + if (loaded != null) { + return loaded; + } + } catch (Exception e1) { + throw ExceptionFactory.newCacheLoadingException(e1, e); + } + try { + loaderWriter.write(key, value); + } catch (Exception e1) { + throw ExceptionFactory.newCacheWritingException(e1, e); + } + } finally { + cleanup(key, e); + } + return null; + } + + /** + * Delete the key from the loader-writer if it is found with a matching value. Note that the load and write pair + * is not atomic. This atomicity, if needed, should be handled by the something else. + * + * @param key the key being removed + * @param value the value being removed + * @param e the triggered failure + * @return if the value was removed + */ + @Override + public boolean removeFailure(K key, V value, StoreAccessException e) { + try { + V loadedValue; + + try { + loadedValue = loaderWriter.load(key); + } catch (Exception e1) { + throw ExceptionFactory.newCacheLoadingException(e1, e); + } + + if (loadedValue == null) { + return false; + } + if (!loadedValue.equals(value)) { + return false; + } + + try { + loaderWriter.delete(key); + } catch (Exception e1) { + throw ExceptionFactory.newCacheWritingException(e1, e); + } + return true; + } finally { + cleanup(key, e); + } + } + + /** + * Write the value to the loader-writer if the key already exists. Note that the load and write pair + * is not atomic. This atomicity, if needed, should be handled by the something else. + * + * @param key the key being replaced + * @param value the value being replaced + * @param e the triggered failure + * @return the old value or null if not found + */ + @Override + public V replaceFailure(K key, V value, StoreAccessException e) { + try { + V oldValue; + try { + oldValue = loaderWriter.load(key); + } catch (Exception e1) { + throw ExceptionFactory.newCacheLoadingException(e1, e); + } + + if (oldValue != null) { + try { + loaderWriter.write(key, value); + } catch (Exception e1) { + throw ExceptionFactory.newCacheWritingException(e1, e); + } + } + return oldValue; + } finally { + cleanup(key, e); + } + + } + + /** + * Write the value to the loader-writer if the entry already exists with a matching value. Note that the load and write pair + * is not atomic. This atomicity, if needed, should be handled by the something else. + * + * @param key the key being replaced + * @param value the expected value + * @param newValue the replacement value + * @param e the triggered failure + * @return if the value was replaced + */ + @Override + public boolean replaceFailure(K key, V value, V newValue, StoreAccessException e) { + try { + V oldValue; + try { + oldValue = loaderWriter.load(key); + } catch (Exception e1) { + throw ExceptionFactory.newCacheLoadingException(e1, e); + } + + if (oldValue != null && oldValue.equals(value)) { + try { + loaderWriter.write(key, newValue); + return true; + } catch (Exception e1) { + throw ExceptionFactory.newCacheWritingException(e1, e); + } + } + + return false; + } finally { + cleanup(key, e); + } + } + + /** + * Get all entries for the provided keys. Entries not found by the loader-writer are expected to be an entry + * with the key and a null value. + * + * @param keys the keys being retrieved + * @param e the triggered failure + * @return a map of key-value pairs as loaded by the loader-writer + */ + @SuppressWarnings("unchecked") + @Override + public Map getAllFailure(Iterable keys, StoreAccessException e) { + try { + return loaderWriter.loadAll((Iterable) keys); // FIXME: bad typing that we should fix + } catch(BulkCacheLoadingException e1) { + throw e1; + } catch (Exception e1) { + throw ExceptionFactory.newCacheLoadingException(e1, e); + } finally { + cleanup(keys, e); + } + } + + /** + * Write all entries to the loader-writer. + * + * @param entries the entries being put + * @param e the triggered failure + */ + @Override + public void putAllFailure(Map entries, StoreAccessException e) { + try { + loaderWriter.writeAll(entries.entrySet()); // FIXME: bad typing that we should fix + } catch(BulkCacheWritingException e1) { + throw e1; + } catch (Exception e1) { + throw ExceptionFactory.newCacheWritingException(e1, e); + } finally { + cleanup(entries.keySet(), e); + } + } + + /** + * Delete all keys from the loader-writer. + * + * @param keys the keys being removed + * @param e the triggered failure + */ + @Override + public void removeAllFailure(Iterable keys, StoreAccessException e) { + try { + loaderWriter.deleteAll(keys); + } catch(BulkCacheWritingException e1) { + throw e1; + } catch (Exception e1) { + throw ExceptionFactory.newCacheWritingException(e1, e); + } finally { + cleanup(keys, e); + } + } + +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/resilience/RobustResilienceStrategy.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/resilience/RobustResilienceStrategy.java new file mode 100644 index 0000000000..3ec5cb8bbb --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/resilience/RobustResilienceStrategy.java @@ -0,0 +1,203 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.resilience; + +import org.ehcache.spi.resilience.RecoveryStore; +import org.ehcache.spi.resilience.StoreAccessException; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +/** + * Default resilience strategy used by a {@link org.ehcache.Cache} without {@link org.ehcache.spi.loaderwriter.CacheLoaderWriter}. + * It behaves in two specific ways: + * + *

                                        + *
                                      • An empty cache. It never founds anything
                                      • + *
                                      • Everything added to it gets evicted right away
                                      • + *
                                      + * + * It also tries to cleanup any corrupted key. + */ +public class RobustResilienceStrategy extends AbstractResilienceStrategy { + + /** + * Unique constructor for create this resilience strategy. + * + * @param store store used as a storage system for the cache using this resiliency strategy. + */ + public RobustResilienceStrategy(RecoveryStore store) { + super(store); + } + + /** + * Return null. + * + * @param key the key being retrieved + * @param e the triggered failure + * @return null + */ + @Override + public V getFailure(K key, StoreAccessException e) { + cleanup(key, e); + return null; + } + + /** + * Return false. + * + * @param key the key being queried + * @param e the triggered failure + * @return false + */ + @Override + public boolean containsKeyFailure(K key, StoreAccessException e) { + cleanup(key, e); + return false; + } + + /** + * Do nothing. + * + * @param key the key being put + * @param value the value being put + * @param e the triggered failure + */ + @Override + public void putFailure(K key, V value, StoreAccessException e) { + cleanup(key, e); + } + + /** + * Do nothing. + * + * @param key the key being removed + * @param e the triggered failure + */ + @Override + public void removeFailure(K key, StoreAccessException e) { + cleanup(key, e); + } + + /** + * Do nothing. + * + * @param e the triggered failure + */ + @Override + public void clearFailure(StoreAccessException e) { + cleanup(e); + } + + /** + * Do nothing and return null. + * + * @param key the key being put + * @param value the value being put + * @param e the triggered failure + * @return null + */ + @Override + public V putIfAbsentFailure(K key, V value, StoreAccessException e) { + cleanup(key, e); + return null; + } + + /** + * Do nothing and return false. + * + * @param key the key being removed + * @param value the value being removed + * @param e the triggered failure + * @return false + */ + @Override + public boolean removeFailure(K key, V value, StoreAccessException e) { + cleanup(key, e); + return false; + } + + /** + * Do nothing and return null. + * + * @param key the key being replaced + * @param value the value being replaced + * @param e the triggered failure + * @return null + */ + @Override + public V replaceFailure(K key, V value, StoreAccessException e) { + cleanup(key, e); + return null; + } + + /** + * Do nothing and return false. + * + * @param key the key being replaced + * @param value the expected value + * @param newValue the replacement value + * @param e the triggered failure + * @return false + */ + @Override + public boolean replaceFailure(K key, V value, V newValue, StoreAccessException e) { + cleanup(key, e); + return false; + } + + /** + * Do nothing and return a map of all the provided keys and null values. + * + * @param keys the keys being retrieved + * @param e the triggered failure + * @return map of all provided keys and null values + */ + @Override + public Map getAllFailure(Iterable keys, StoreAccessException e) { + cleanup(keys, e); + + HashMap result = keys instanceof Collection ? new HashMap<>(((Collection) keys).size()) : new HashMap<>(); + for (K key : keys) { + result.put(key, null); + } + return result; + } + + /** + * Do nothing. + * + * @param entries the entries being put + * @param e the triggered failure + */ + @Override + public void putAllFailure(Map entries, StoreAccessException e) { + cleanup(entries.keySet(), e); + } + + /** + * Do nothing. + * + * @param keys the keys being removed + * @param e the triggered failure + */ + @Override + public void removeAllFailure(Iterable keys, StoreAccessException e) { + cleanup(keys, e); + } + +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java similarity index 93% rename from impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java index 71760e5430..925bae9f2e 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java @@ -37,12 +37,11 @@ public class DefaultSizeOfEngine implements SizeOfEngine { private final SizeOf sizeOf; private final long chmTreeBinOffset; private final long onHeapKeyOffset; - private final SizeOfFilterSource filterSource = new SizeOfFilterSource(true); public DefaultSizeOfEngine(long maxObjectGraphSize, long maxObjectSize) { this.maxObjectGraphSize = maxObjectGraphSize; this.maxObjectSize = maxObjectSize; - this.sizeOf = SizeOf.newInstance(filterSource.getFilters()); + this.sizeOf = SizeOf.newInstance(new SizeOfFilterSource(true).getFilters()); this.onHeapKeyOffset = sizeOf.deepSizeOf(new CopiedOnHeapKey<>(new Object(), new IdentityCopier<>())); this.chmTreeBinOffset = sizeOf.deepSizeOf(ConcurrentHashMap.FAKE_TREE_BIN); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java similarity index 97% rename from impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java index f83d9e94ef..02384cf566 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java @@ -50,7 +50,7 @@ public void stop() { } @Override - public SizeOfEngine createSizeOfEngine(ResourceUnit resourceUnit, ServiceConfiguration... serviceConfigs) { + public SizeOfEngine createSizeOfEngine(ResourceUnit resourceUnit, ServiceConfiguration... serviceConfigs) { boolean isByteSized = resourceUnit instanceof MemoryUnit; if(!isByteSized) { return new NoopSizeOfEngine(); // Noop Size of Engine diff --git a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderFactory.java similarity index 88% rename from impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderFactory.java index 3cbd00c945..7f03033ba6 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderFactory.java @@ -16,21 +16,22 @@ package org.ehcache.impl.internal.sizeof; +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.core.spi.store.heap.SizeOfEngineProvider; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.core.spi.service.ServiceFactory; -import org.ehcache.core.spi.store.heap.SizeOfEngineProvider; +import org.osgi.service.component.annotations.Component; /** * @author Abhilash * */ - +@Component public class DefaultSizeOfEngineProviderFactory implements ServiceFactory { @Override - public SizeOfEngineProvider create(ServiceCreationConfiguration configuration) { + public SizeOfEngineProvider create(ServiceCreationConfiguration configuration) { long maxTraversals = DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE; long maxSize = DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE; if(configuration != null) { @@ -42,8 +43,8 @@ public SizeOfEngineProvider create(ServiceCreationConfiguration getServiceType() { - return SizeOfEngineProvider.class; + public Class getServiceType() { + return DefaultSizeOfEngineProvider.class; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/sizeof/NoopSizeOfEngine.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/NoopSizeOfEngine.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/sizeof/NoopSizeOfEngine.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/NoopSizeOfEngine.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/sizeof/listeners/EhcacheVisitorListener.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/listeners/EhcacheVisitorListener.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/sizeof/listeners/EhcacheVisitorListener.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/listeners/EhcacheVisitorListener.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/sizeof/listeners/exceptions/VisitorListenerException.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/listeners/exceptions/VisitorListenerException.java similarity index 92% rename from impl/src/main/java/org/ehcache/impl/internal/sizeof/listeners/exceptions/VisitorListenerException.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/listeners/exceptions/VisitorListenerException.java index 5baa6d0ba2..d94677fc1d 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/sizeof/listeners/exceptions/VisitorListenerException.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/sizeof/listeners/exceptions/VisitorListenerException.java @@ -22,6 +22,8 @@ */ public class VisitorListenerException extends RuntimeException { + private static final long serialVersionUID = 524283391526103012L; + public VisitorListenerException(String message) { super(message); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java similarity index 83% rename from impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java index 0da757abaf..61530b2352 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java @@ -20,7 +20,6 @@ import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.impl.config.copy.DefaultCopierConfiguration.Type; import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; -import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; import org.ehcache.impl.internal.classes.ClassInstanceProvider; import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.impl.copy.SerializingCopier; @@ -36,7 +35,7 @@ /** * @author Albin Suresh */ -public class DefaultCopyProvider extends ClassInstanceProvider, Copier> implements CopyProvider { +public class DefaultCopyProvider extends ClassInstanceProvider, DefaultCopierConfiguration, Copier> implements CopyProvider { private static final Logger LOG = LoggerFactory.getLogger(DefaultCopyProvider.class); @@ -47,12 +46,12 @@ public DefaultCopyProvider(DefaultCopyProviderConfiguration configuration) { @Override - public Copier createKeyCopier(final Class clazz, Serializer serializer, ServiceConfiguration... configs) { + public Copier createKeyCopier(final Class clazz, Serializer serializer, ServiceConfiguration... configs) { return createCopier(Type.KEY, clazz, serializer, configs); } @Override - public Copier createValueCopier(final Class clazz, Serializer serializer, ServiceConfiguration... configs) { + public Copier createValueCopier(final Class clazz, Serializer serializer, ServiceConfiguration... configs) { return createCopier(Type.VALUE, clazz, serializer, configs); } @@ -64,10 +63,10 @@ public void releaseCopier(Copier copier) throws Exception { } private Copier createCopier(Type type, Class clazz, - Serializer serializer, ServiceConfiguration... configs) { + Serializer serializer, ServiceConfiguration... configs) { DefaultCopierConfiguration conf = find(type, configs); Copier copier; - final ClassInstanceConfiguration> preConfigured = preconfigured.get(clazz); + final DefaultCopierConfiguration preConfigured = preconfigured.get(clazz); if (conf != null && conf.getClazz().isAssignableFrom(SerializingCopier.class)) { if (serializer == null) { throw new IllegalStateException("No Serializer configured for type '" + clazz.getName() @@ -91,25 +90,26 @@ private Copier createCopier(Class clazz, DefaultCopierConfiguration @SuppressWarnings("unchecked") Copier copier = (Copier) newInstance(clazz, config); if (copier == null) { - @SuppressWarnings("unchecked") - Copier defaultInstance = (Copier) newInstance(clazz, new DefaultCopierConfiguration((Class) IdentityCopier.class, type)); + @SuppressWarnings({"unchecked", "rawtypes"}) + Copier defaultInstance = (Copier) newInstance(clazz, new DefaultCopierConfiguration<>((Class>) (Class) IdentityCopier.class, type)); copier = defaultInstance; } return copier; } @SuppressWarnings("unchecked") - private static DefaultCopierConfiguration find(Type type, ServiceConfiguration... serviceConfigurations) { + private static DefaultCopierConfiguration find(Type type, ServiceConfiguration... serviceConfigurations) { DefaultCopierConfiguration result = null; - Collection copierConfigurations = + @SuppressWarnings("rawtypes") + Collection> copierConfigurations = (Collection) ServiceUtils.findAmongst(DefaultCopierConfiguration.class, (Object[])serviceConfigurations); - for (DefaultCopierConfiguration copierConfiguration : copierConfigurations) { + for (DefaultCopierConfiguration copierConfiguration : copierConfigurations) { if (copierConfiguration.getType() == type) { if (result != null) { throw new IllegalArgumentException("Duplicate " + type + " copier : " + copierConfiguration); } - result = copierConfiguration; + result = (DefaultCopierConfiguration) copierConfiguration; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderFactory.java similarity index 88% rename from impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderFactory.java index e6c2b83ec6..3616eab932 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderFactory.java @@ -16,19 +16,20 @@ package org.ehcache.impl.internal.spi.copy; +import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; -import org.ehcache.impl.internal.spi.copy.DefaultCopyProvider; import org.ehcache.spi.copy.CopyProvider; import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.core.spi.service.ServiceFactory; +import org.osgi.service.component.annotations.Component; /** * @author Albin Suresh */ +@Component public class DefaultCopyProviderFactory implements ServiceFactory { @Override - public CopyProvider create(final ServiceCreationConfiguration configuration) { + public CopyProvider create(final ServiceCreationConfiguration configuration) { if (configuration != null && !(configuration instanceof DefaultCopyProviderConfiguration)) { throw new IllegalArgumentException("Expected a configuration of type DefaultCopyProviderConfiguration but got " + configuration.getClass().getSimpleName()); @@ -38,7 +39,7 @@ public CopyProvider create(final ServiceCreationConfiguration conf } @Override - public Class getServiceType() { - return CopyProvider.class; + public Class getServiceType() { + return DefaultCopyProvider.class; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProvider.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProvider.java similarity index 87% rename from impl/src/main/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProvider.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProvider.java index 63c550731c..e9409a511e 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProvider.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProvider.java @@ -25,7 +25,7 @@ /** * @author rism */ -public class DefaultCacheEventListenerProvider extends ClassInstanceProvider> implements CacheEventListenerProvider { +public class DefaultCacheEventListenerProvider extends ClassInstanceProvider> implements CacheEventListenerProvider { public DefaultCacheEventListenerProvider() { super(null, DefaultCacheEventListenerConfiguration.class); @@ -33,7 +33,7 @@ public DefaultCacheEventListenerProvider() { @SuppressWarnings("unchecked") @Override - public CacheEventListener createEventListener(String alias, ServiceConfiguration serviceConfiguration) { + public CacheEventListener createEventListener(String alias, ServiceConfiguration serviceConfiguration) { return (CacheEventListener) newInstance(alias, serviceConfiguration); } @@ -41,4 +41,4 @@ public CacheEventListener createEventListener(String alias, Service public void releaseEventListener(CacheEventListener cacheEventListener) throws Exception { releaseInstance(cacheEventListener); } -} \ No newline at end of file +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderFactory.java similarity index 76% rename from impl/src/main/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderFactory.java index 5a419fe4c9..6545a34b7a 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderFactory.java @@ -17,21 +17,23 @@ package org.ehcache.impl.internal.spi.event; import org.ehcache.core.events.CacheEventListenerProvider; -import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; /** * @author rism */ +@Component public class DefaultCacheEventListenerProviderFactory implements ServiceFactory { @Override - public DefaultCacheEventListenerProvider create(ServiceCreationConfiguration configuration) { + public CacheEventListenerProvider create(ServiceCreationConfiguration configuration) { return new DefaultCacheEventListenerProvider(); } @Override - public Class getServiceType() { - return CacheEventListenerProvider.class; + public Class getServiceType() { + return DefaultCacheEventListenerProvider.class; } -} \ No newline at end of file +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProvider.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProvider.java new file mode 100644 index 0000000000..1c1682b805 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProvider.java @@ -0,0 +1,70 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.spi.loaderwriter; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; +import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterProviderConfiguration; +import org.ehcache.impl.internal.classes.ClassInstanceProvider; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterConfiguration; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; + +import java.util.HashSet; +import java.util.Set; + +/** + * @author Alex Snaps + */ +public class DefaultCacheLoaderWriterProvider extends ClassInstanceProvider> implements CacheLoaderWriterProvider { + + private final Set cachesWithJsrRegisteredLoaders = new HashSet<>(); + + public DefaultCacheLoaderWriterProvider(DefaultCacheLoaderWriterProviderConfiguration configuration) { + super(configuration, DefaultCacheLoaderWriterConfiguration.class, true); + } + + @SuppressWarnings("unchecked") + @Override + public CacheLoaderWriter createCacheLoaderWriter(final String alias, final CacheConfiguration cacheConfiguration) { + return (CacheLoaderWriter) newInstance(alias, cacheConfiguration); + } + + @Override + public void releaseCacheLoaderWriter(String alias, CacheLoaderWriter cacheLoaderWriter) throws Exception { + releaseInstance(cacheLoaderWriter); + } + + @Override + public CacheLoaderWriterConfiguration getPreConfiguredCacheLoaderWriterConfig(String alias) { + return getPreconfigured(alias); + } + + @Override + public boolean isLoaderJsrProvided(String alias) { + return cachesWithJsrRegisteredLoaders.contains(alias); + } + + protected void registerJsrLoaderForCache(String alias) { + cachesWithJsrRegisteredLoaders.add(alias); + } + + protected void deregisterJsrLoaderForCache(String alias) { + cachesWithJsrRegisteredLoaders.remove(alias); + } + +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderFactory.java similarity index 82% rename from impl/src/main/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderFactory.java index e99a445507..475c7df4a3 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderFactory.java @@ -16,19 +16,20 @@ package org.ehcache.impl.internal.spi.loaderwriter; +import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterProviderConfiguration; -import org.ehcache.impl.internal.spi.loaderwriter.DefaultCacheLoaderWriterProvider; import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.core.spi.service.ServiceFactory; +import org.osgi.service.component.annotations.Component; /** * @author Alex Snaps */ +@Component public class DefaultCacheLoaderWriterProviderFactory implements ServiceFactory { @Override - public DefaultCacheLoaderWriterProvider create(ServiceCreationConfiguration configuration) { + public CacheLoaderWriterProvider create(ServiceCreationConfiguration configuration) { if (configuration != null && !(configuration instanceof DefaultCacheLoaderWriterProviderConfiguration)) { throw new IllegalArgumentException("Expected a configuration of type DefaultCacheLoaderWriterProviderConfiguration but got " + configuration .getClass() @@ -38,7 +39,7 @@ public DefaultCacheLoaderWriterProvider create(ServiceCreationConfiguration getServiceType() { - return CacheLoaderWriterProvider.class; + public Class getServiceType() { + return DefaultCacheLoaderWriterProvider.class; } } diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/resilience/DefaultResilienceStrategyProvider.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/resilience/DefaultResilienceStrategyProvider.java new file mode 100644 index 0000000000..0807f32cad --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/resilience/DefaultResilienceStrategyProvider.java @@ -0,0 +1,122 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.spi.resilience; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyConfiguration; +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyProviderConfiguration; +import org.ehcache.impl.internal.classes.ClassInstanceProvider; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.RecoveryStore; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.ResilienceStrategyProvider; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceProvider; + +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; + +public class DefaultResilienceStrategyProvider implements ResilienceStrategyProvider { + + private final ComponentProvider regularStrategies; + private final ComponentProvider loaderWriterStrategies; + + protected DefaultResilienceStrategyProvider() { + this(new DefaultResilienceStrategyProviderConfiguration()); + } + + protected DefaultResilienceStrategyProvider(DefaultResilienceStrategyProviderConfiguration configuration) { + this.regularStrategies = new ComponentProvider(configuration.getDefaultConfiguration(), configuration); + this.loaderWriterStrategies = new ComponentProvider(configuration.getDefaultLoaderWriterConfiguration(), configuration); + } + + @Override + public ResilienceStrategy createResilienceStrategy(String alias, CacheConfiguration configuration, + RecoveryStore recoveryStore) { + DefaultResilienceStrategyConfiguration config = findSingletonAmongst(DefaultResilienceStrategyConfiguration.class, configuration.getServiceConfigurations()); + return regularStrategies.create(alias, config, recoveryStore); + } + + @Override + public ResilienceStrategy createResilienceStrategy(String alias, CacheConfiguration configuration, + RecoveryStore recoveryStore, CacheLoaderWriter loaderWriter) { + DefaultResilienceStrategyConfiguration config = findSingletonAmongst(DefaultResilienceStrategyConfiguration.class, configuration.getServiceConfigurations()); + return loaderWriterStrategies.create(alias, config, recoveryStore, loaderWriter); + } + + @Override + public void start(ServiceProvider serviceProvider) { + regularStrategies.start(serviceProvider); + try { + loaderWriterStrategies.start(serviceProvider); + } catch (Throwable t) { + try { + regularStrategies.stop(); + } catch (Throwable u) { + t.addSuppressed(u); + } + throw t; + } + } + + @Override + public void stop() { + try { + regularStrategies.stop(); + } finally { + loaderWriterStrategies.stop(); + } + } + + static class ComponentProvider extends ClassInstanceProvider> { + + private DefaultResilienceStrategyConfiguration defaultConfiguration; + + protected ComponentProvider(DefaultResilienceStrategyConfiguration dflt, DefaultResilienceStrategyProviderConfiguration factoryConfig) { + super(factoryConfig, DefaultResilienceStrategyConfiguration.class); + this.defaultConfiguration = dflt; + } + + @SuppressWarnings("unchecked") + public ResilienceStrategy create(String alias, DefaultResilienceStrategyConfiguration config, + RecoveryStore recoveryStore, CacheLoaderWriter loaderWriter) { + if (config == null) { + DefaultResilienceStrategyConfiguration preconfigured = getPreconfigured(alias); + if (preconfigured == null) { + return (ResilienceStrategy) newInstance(alias, defaultConfiguration.bind(recoveryStore, loaderWriter)); + } else { + return (ResilienceStrategy) newInstance(alias, preconfigured.bind(recoveryStore, loaderWriter)); + } + } else { + return (ResilienceStrategy) newInstance(alias, config.bind(recoveryStore, loaderWriter)); + } + } + + @SuppressWarnings("unchecked") + public ResilienceStrategy create(String alias, DefaultResilienceStrategyConfiguration config, RecoveryStore recoveryStore) { + if (config == null) { + DefaultResilienceStrategyConfiguration preconfigured = getPreconfigured(alias); + if (preconfigured == null) { + return (ResilienceStrategy) newInstance(alias, defaultConfiguration.bind(recoveryStore)); + } else { + return (ResilienceStrategy) newInstance(alias, preconfigured.bind(recoveryStore)); + } + } else { + return (ResilienceStrategy) newInstance(alias, config.bind(recoveryStore)); + } + } + } + +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/resilience/DefaultResilienceStrategyProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/resilience/DefaultResilienceStrategyProviderFactory.java new file mode 100644 index 0000000000..e0f74d0bdb --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/resilience/DefaultResilienceStrategyProviderFactory.java @@ -0,0 +1,44 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.spi.resilience; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyProviderConfiguration; +import org.ehcache.spi.resilience.ResilienceStrategyProvider; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; + +@Component +public class DefaultResilienceStrategyProviderFactory implements ServiceFactory { + @Override + public ResilienceStrategyProvider create(ServiceCreationConfiguration configuration) { + if (configuration == null) { + return new DefaultResilienceStrategyProvider(); + } else if (configuration instanceof DefaultResilienceStrategyProviderConfiguration) { + return new DefaultResilienceStrategyProvider((DefaultResilienceStrategyProviderConfiguration) configuration); + } else { + throw new IllegalArgumentException("Expected a configuration of type DefaultResilienceStrategyProviderConfiguration (or none) but got " + configuration + .getClass() + .getName()); + } + } + + @Override + public Class getServiceType() { + return DefaultResilienceStrategyProvider.class; + } + +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java similarity index 91% rename from impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java index 940fb7220a..e5bfa39a3a 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java @@ -71,7 +71,7 @@ public DefaultSerializationProvider(DefaultSerializationProviderConfiguration co } @Override - public Serializer createKeySerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException { + public Serializer createKeySerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException { DefaultSerializerConfiguration configuration = find(DefaultSerializerConfiguration.Type.KEY, configs); Serializer serializer = getUserProvidedSerializer(configuration); if (serializer == null) { @@ -83,7 +83,7 @@ public Serializer createKeySerializer(Class clazz, ClassLoader classLo } @Override - public Serializer createValueSerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException { + public Serializer createValueSerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException { DefaultSerializerConfiguration configuration = find(DefaultSerializerConfiguration.Type.VALUE, configs); Serializer serializer = getUserProvidedSerializer(configuration); if (serializer == null) { @@ -94,7 +94,7 @@ public Serializer createValueSerializer(Class clazz, ClassLoader class return serializer; } - private Serializer createSerializer(Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException { + private Serializer createSerializer(Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException { Class> klazz = getSerializerClassFor(clazz, config); try { @@ -208,16 +208,18 @@ private static Serializer getUserProvidedSerializer(DefaultSerializerConf } @SuppressWarnings("unchecked") - private static DefaultSerializerConfiguration find(DefaultSerializerConfiguration.Type type, ServiceConfiguration... serviceConfigurations) { + private static DefaultSerializerConfiguration find(DefaultSerializerConfiguration.Type type, ServiceConfiguration... serviceConfigurations) { DefaultSerializerConfiguration result = null; - Collection serializationProviderConfigurations = ServiceUtils.findAmongst(DefaultSerializerConfiguration.class, (Object[]) serviceConfigurations); - for (DefaultSerializerConfiguration serializationProviderConfiguration : serializationProviderConfigurations) { + @SuppressWarnings("rawtypes") + Collection> serializationProviderConfigurations = + (Collection) ServiceUtils.findAmongst(DefaultSerializerConfiguration.class, (Object[]) serviceConfigurations); + for (DefaultSerializerConfiguration serializationProviderConfiguration : serializationProviderConfigurations) { if (serializationProviderConfiguration.getType() == type) { if (result != null) { throw new IllegalArgumentException("Duplicate " + type + " serialization provider : " + serializationProviderConfiguration); } - result = serializationProviderConfiguration; + result = (DefaultSerializerConfiguration) serializationProviderConfiguration; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderFactory.java similarity index 86% rename from impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderFactory.java index a738fee32a..19f85eb43a 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderFactory.java @@ -16,19 +16,20 @@ package org.ehcache.impl.internal.spi.serialization; +import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; -import org.ehcache.impl.internal.spi.serialization.DefaultSerializationProvider; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.core.spi.service.ServiceFactory; +import org.osgi.service.component.annotations.Component; /** * @author Ludovic Orban */ +@Component public class DefaultSerializationProviderFactory implements ServiceFactory { @Override - public DefaultSerializationProvider create(ServiceCreationConfiguration configuration) { + public DefaultSerializationProvider create(ServiceCreationConfiguration configuration) { if (configuration != null && !(configuration instanceof DefaultSerializationProviderConfiguration)) { throw new IllegalArgumentException("Expected a configuration of type DefaultSerializationProviderConfiguration but got " + configuration .getClass() @@ -38,7 +39,7 @@ public DefaultSerializationProvider create(ServiceCreationConfiguration getServiceType() { - return SerializationProvider.class; + public Class getServiceType() { + return DefaultSerializationProvider.class; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/BinaryValueHolder.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/BinaryValueHolder.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/BinaryValueHolder.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/BinaryValueHolder.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java similarity index 78% rename from impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java index e81c683276..8ff6b2af6c 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java @@ -17,7 +17,7 @@ import org.ehcache.Cache; import org.ehcache.core.CacheConfigurationChangeListener; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.events.StoreEventFilter; import org.ehcache.core.spi.store.events.StoreEventListener; import org.ehcache.core.spi.store.events.StoreEventSource; @@ -27,8 +27,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.NoSuchElementException; import java.util.Set; import java.util.function.BiFunction; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; @@ -51,7 +53,7 @@ public ValueHolder getAndFault(K key) throws StoreAccessException { } @Override - public ValueHolder computeIfAbsentAndFault(K key, Function mappingFunction) throws StoreAccessException { + public ValueHolder computeIfAbsentAndFault(K key, Function mappingFunction) { return null; } @@ -66,12 +68,12 @@ public void setInvalidationValve(InvalidationValve valve) { } @Override - public ValueHolder get(K key) throws StoreAccessException { + public ValueHolder get(K key) { return null; } @Override - public boolean containsKey(K key) throws StoreAccessException { + public boolean containsKey(K key) { return false; } @@ -81,8 +83,8 @@ public PutStatus put(K key, V value) throws StoreAccessException { } @Override - public ValueHolder putIfAbsent(K key, V value) throws StoreAccessException { - return EmptyValueHolder.empty(); + public ValueHolder putIfAbsent(K key, V value, Consumer put) throws StoreAccessException { + return null; } @Override @@ -106,7 +108,7 @@ public ReplaceStatus replace(K key, V oldValue, V newValue) throws StoreAccessEx } @Override - public void clear() throws StoreAccessException { + public void clear() { } @@ -133,6 +135,11 @@ public void setEventOrdering(boolean ordering) { } + @Override + public void setSynchronous(boolean synchronous) throws IllegalArgumentException { + + } + @Override public boolean isEventOrdering() { return false; @@ -149,24 +156,24 @@ public boolean hasNext() { } @Override - public Cache.Entry> next() throws StoreAccessException { - return null; + public Cache.Entry> next() { + throw new NoSuchElementException(); } }; } @Override - public ValueHolder compute(K key, BiFunction mappingFunction) throws StoreAccessException { - return EmptyValueHolder.empty(); + public ValueHolder getAndCompute(K key, BiFunction mappingFunction) { + return null; } @Override - public ValueHolder compute(K key, BiFunction mappingFunction, Supplier replaceEqual) throws StoreAccessException { + public ValueHolder computeAndGet(K key, BiFunction mappingFunction, Supplier replaceEqual, Supplier invokeWriter) { return null; } @Override - public ValueHolder computeIfAbsent(K key, Function mappingFunction) throws StoreAccessException { + public ValueHolder computeIfAbsent(K key, Function mappingFunction) { return null; } @@ -176,19 +183,19 @@ public Map> bulkCompute(Set keys, Function> bulkCompute(Set keys, Function>, Iterable>> remappingFunction, Supplier replaceEqual) throws StoreAccessException { + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction, Supplier replaceEqual) { Map> map = new HashMap<>(keys.size()); for(K key : keys) { - map.put(key, EmptyValueHolder.empty()); + map.put(key, null); } return map; } @Override - public Map> bulkComputeIfAbsent(Set keys, Function, Iterable>> mappingFunction) throws StoreAccessException { + public Map> bulkComputeIfAbsent(Set keys, Function, Iterable>> mappingFunction) { Map> map = new HashMap<>(keys.size()); for(K key : keys) { - map.put(key, EmptyValueHolder.empty()); + map.put(key, null); } return map; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/DiskWriteThreadPool.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/DiskWriteThreadPool.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/disk/DiskWriteThreadPool.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/DiskWriteThreadPool.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCache.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCache.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCache.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCache.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java similarity index 79% rename from impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java index 1340fb1d04..d1f91c5681 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java @@ -22,8 +22,8 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourceType; import org.ehcache.core.spi.service.DiskResourceService; -import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; -import org.ehcache.core.statistics.StoreOperationOutcomes; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.statistics.OperationStatistic; import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; @@ -34,13 +34,13 @@ import org.ehcache.impl.internal.store.offheap.EhcacheOffHeapBackingMap; import org.ehcache.impl.internal.store.offheap.SwitchableEvictionAdvisor; import org.ehcache.impl.internal.store.offheap.OffHeapValueHolder; -import org.ehcache.impl.internal.store.offheap.portability.OffHeapValueHolderPortability; import org.ehcache.impl.internal.store.offheap.portability.SerializerPortability; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.spi.persistence.PersistableResourceService.PersistenceSpaceIdentifier; import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.StatefulSerializer; +import org.ehcache.spi.service.OptionalServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; @@ -61,8 +61,6 @@ import org.terracotta.offheapstore.disk.storage.FileBackedStorageEngine; import org.terracotta.offheapstore.storage.portability.Portability; import org.terracotta.offheapstore.util.Factory; -import org.terracotta.statistics.MappedOperationStatistic; -import org.terracotta.statistics.StatisticsManager; import java.io.File; import java.io.FileInputStream; @@ -70,13 +68,10 @@ import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; -import java.lang.reflect.InvocationHandler; -import java.lang.reflect.Method; import java.lang.reflect.Proxy; -import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Properties; @@ -87,6 +82,7 @@ import static java.lang.Math.max; import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; +import static java.util.Arrays.asList; import static org.terracotta.offheapstore.util.MemoryUnit.BYTES; /** @@ -96,8 +92,6 @@ public class OffHeapDiskStore extends AbstractOffHeapStore implement private static final Logger LOGGER = LoggerFactory.getLogger(OffHeapDiskStore.class); - private static final String STATISTICS_TAG = "Disk"; - private static final String KEY_TYPE_PROPERTY_NAME = "keyType"; private static final String VALUE_TYPE_PROPERTY_NAME = "valueType"; @@ -120,8 +114,8 @@ public class OffHeapDiskStore extends AbstractOffHeapStore implement public OffHeapDiskStore(FileBasedPersistenceContext fileBasedPersistenceContext, ExecutionService executionService, String threadPoolAlias, int writerConcurrency, int diskSegments, - final Configuration config, TimeSource timeSource, StoreEventDispatcher eventDispatcher, long sizeInBytes) { - super(STATISTICS_TAG, config, timeSource, eventDispatcher); + final Configuration config, TimeSource timeSource, StoreEventDispatcher eventDispatcher, long sizeInBytes, StatisticsService statisticsService) { + super(config, timeSource, eventDispatcher, statisticsService); this.fileBasedPersistenceContext = fileBasedPersistenceContext; this.executionService = executionService; this.threadPoolAlias = threadPoolAlias; @@ -146,6 +140,11 @@ public OffHeapDiskStore(FileBasedPersistenceContext fileBasedPersistenceContext, } } + @Override + protected String getStatisticsTag() { + return "Disk"; + } + @Override public List getConfigurationChangeListeners() { return Collections.emptyList(); @@ -217,11 +216,11 @@ private EhcachePersistentConcurrentOffHeapClockCache> r MappedPageSource source = new MappedPageSource(dataFile, false, size); try { PersistentPortability keyPortability = persistent(new SerializerPortability<>(keySerializer)); - PersistentPortability> elementPortability = persistent(new OffHeapValueHolderPortability<>(valueSerializer)); + PersistentPortability> valuePortability = persistent(createValuePortability(valueSerializer)); DiskWriteThreadPool writeWorkers = new DiskWriteThreadPool(executionService, threadPoolAlias, writerConcurrency); Factory>> storageEngineFactory = FileBackedStorageEngine.createFactory(source, - max((size / diskSegments) / 10, 1024), BYTES, keyPortability, elementPortability, writeWorkers, false); + max((size / diskSegments) / 10, 1024), BYTES, keyPortability, valuePortability, writeWorkers, false); EhcachePersistentSegmentFactory> factory = new EhcachePersistentSegmentFactory<>( source, @@ -255,11 +254,11 @@ private EhcachePersistentConcurrentOffHeapClockCache> c MappedPageSource source = new MappedPageSource(getDataFile(), size); PersistentPortability keyPortability = persistent(new SerializerPortability<>(keySerializer)); - PersistentPortability> elementPortability = persistent(new OffHeapValueHolderPortability<>(valueSerializer)); + PersistentPortability> valuePortability = persistent(createValuePortability(valueSerializer)); DiskWriteThreadPool writeWorkers = new DiskWriteThreadPool(executionService, threadPoolAlias, writerConcurrency); Factory>> storageEngineFactory = FileBackedStorageEngine.createFactory(source, - max((size / diskSegments) / 10, 1024), BYTES, keyPortability, elementPortability, writeWorkers, true); + max((size / diskSegments) / 10, 1024), BYTES, keyPortability, valuePortability, writeWorkers, true); EhcachePersistentSegmentFactory> factory = new EhcachePersistentSegmentFactory<>( source, @@ -294,12 +293,11 @@ private File getMetadataFile() { } @ServiceDependencies({TimeSourceService.class, SerializationProvider.class, ExecutionService.class, DiskResourceService.class}) - public static class Provider implements Store.Provider, AuthoritativeTier.Provider { + public static class Provider extends BaseStoreProvider implements AuthoritativeTier.Provider { - private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap<>(); - private final Map, PersistenceSpaceIdentifier> createdStores = new ConcurrentWeakIdentityHashMap<>(); + private final Map, OperationStatistic[]> tierOperationStatistics = new ConcurrentWeakIdentityHashMap<>(); + private final Map, PersistenceSpaceIdentifier> createdStores = new ConcurrentWeakIdentityHashMap<>(); private final String defaultThreadPool; - private volatile ServiceProvider serviceProvider; private volatile DiskResourceService diskPersistenceService; public Provider() { @@ -311,44 +309,40 @@ public Provider(String threadPoolAlias) { } @Override - public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { - return resourceTypes.equals(Collections.singleton(ResourceType.Core.DISK)) ? 1 : 0; + protected ResourceType getResourceType() { + return ResourceType.Core.DISK; } @Override - public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { - return authorityResource.equals(ResourceType.Core.DISK) ? 1 : 0; + public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { + return resourceTypes.equals(Collections.singleton(getResourceType())) ? 1 : 0; } @Override - public OffHeapDiskStore createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - OffHeapDiskStore store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher<>(storeConfig.getDispatcherConcurrency()), serviceConfigs); - Collection> tieredOps = new ArrayList<>(); + public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { + return authorityResource.equals(getResourceType()) ? 1 : 0; + } - MappedOperationStatistic get = - new MappedOperationStatistic<>( - store, TierOperationOutcomes.GET_TRANSLATION, "get", ResourceType.Core.DISK.getTierHeight(), "get", STATISTICS_TAG); - StatisticsManager.associate(get).withParent(store); - tieredOps.add(get); + @Override + public OffHeapDiskStore createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + OffHeapDiskStore store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher<>(storeConfig.getDispatcherConcurrency()), serviceConfigs); - MappedOperationStatistic evict = - new MappedOperationStatistic<>( - store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.DISK.getTierHeight(), "eviction", STATISTICS_TAG); - StatisticsManager.associate(evict).withParent(store); - tieredOps.add(evict); + tierOperationStatistics.put(store, new OperationStatistic[] { + createTranslatedStatistic(store, "get", TierOperationOutcomes.GET_TRANSLATION, "get"), + createTranslatedStatistic(store, "eviction", TierOperationOutcomes.EVICTION_TRANSLATION, "eviction") + }); - tierOperationStatistics.put(store, tieredOps); return store; } - private OffHeapDiskStore createStoreInternal(Configuration storeConfig, StoreEventDispatcher eventDispatcher, ServiceConfiguration... serviceConfigs) { - if (serviceProvider == null) { + private OffHeapDiskStore createStoreInternal(Configuration storeConfig, StoreEventDispatcher eventDispatcher, ServiceConfiguration... serviceConfigs) { + if (getServiceProvider() == null) { throw new NullPointerException("ServiceProvider is null in OffHeapDiskStore.Provider."); } - TimeSource timeSource = serviceProvider.getService(TimeSourceService.class).getTimeSource(); - ExecutionService executionService = serviceProvider.getService(ExecutionService.class); + TimeSource timeSource = getServiceProvider().getService(TimeSourceService.class).getTimeSource(); + ExecutionService executionService = getServiceProvider().getService(ExecutionService.class); - SizedResourcePool diskPool = storeConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK); + SizedResourcePool diskPool = storeConfig.getResourcePools().getPoolForResource(getResourceType()); if (!(diskPool.getUnit() instanceof MemoryUnit)) { throw new IllegalArgumentException("OffHeapDiskStore only supports resources configuration expressed in \"memory\" unit"); } @@ -376,7 +370,7 @@ private OffHeapDiskStore createStoreInternal(Configuration st OffHeapDiskStore offHeapStore = new OffHeapDiskStore<>(persistenceContext, executionService, threadPoolAlias, writerConcurrency, diskSegments, - storeConfig, timeSource, eventDispatcher, unit.toBytes(diskPool.getSize())); + storeConfig, timeSource, eventDispatcher, unit.toBytes(diskPool.getSize()), getServiceProvider().getService(StatisticsService.class)); createdStores.put(offHeapStore, space); return offHeapStore; } catch (CachePersistenceException cpex) { @@ -392,7 +386,7 @@ public void releaseStore(Store resource) { try { OffHeapDiskStore offHeapDiskStore = (OffHeapDiskStore)resource; close(offHeapDiskStore); - StatisticsManager.nodeFor(offHeapDiskStore).clean(); + getStatisticsService().ifPresent(s -> s.cleanForNode(offHeapDiskStore)); tierOperationStatistics.remove(offHeapDiskStore); } catch (IOException e) { throw new RuntimeException(e); @@ -414,15 +408,15 @@ static void close(final OffHeapDiskStore resource) throws IOExcepti @Override public void initStore(Store resource) { - PersistenceSpaceIdentifier identifier = createdStores.get(resource); + PersistenceSpaceIdentifier identifier = createdStores.get(resource); if (identifier == null) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } OffHeapDiskStore diskStore = (OffHeapDiskStore) resource; - Serializer keySerializer = diskStore.keySerializer; + Serializer keySerializer = diskStore.keySerializer; if (keySerializer instanceof StatefulSerializer) { - StateRepository stateRepository = null; + StateRepository stateRepository; try { stateRepository = diskPersistenceService.getStateRepositoryWithin(identifier, "key-serializer"); } catch (CachePersistenceException e) { @@ -430,9 +424,9 @@ public void initStore(Store resource) { } ((StatefulSerializer)keySerializer).init(stateRepository); } - Serializer valueSerializer = diskStore.valueSerializer; + Serializer valueSerializer = diskStore.valueSerializer; if (valueSerializer instanceof StatefulSerializer) { - StateRepository stateRepository = null; + StateRepository stateRepository; try { stateRepository = diskPersistenceService.getStateRepositoryWithin(identifier, "value-serializer"); } catch (CachePersistenceException e) { @@ -450,7 +444,7 @@ static void init(final OffHeapDiskStore resource) { @Override public void start(ServiceProvider serviceProvider) { - this.serviceProvider = serviceProvider; + super.start(serviceProvider); diskPersistenceService = serviceProvider.getService(DiskResourceService.class); if (diskPersistenceService == null) { throw new IllegalStateException("Unable to find file based persistence service"); @@ -459,30 +453,24 @@ public void start(ServiceProvider serviceProvider) { @Override public void stop() { - this.serviceProvider = null; - createdStores.clear(); - diskPersistenceService = null; + try { + createdStores.clear(); + diskPersistenceService = null; + } finally { + super.stop(); + } } @Override - public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { OffHeapDiskStore authoritativeTier = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher<>(storeConfig .getDispatcherConcurrency()), serviceConfigs); - Collection> tieredOps = new ArrayList<>(); - MappedOperationStatistic get = - new MappedOperationStatistic<>( - authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", ResourceType.Core.DISK.getTierHeight(), "getAndFault", STATISTICS_TAG); - StatisticsManager.associate(get).withParent(authoritativeTier); - tieredOps.add(get); + tierOperationStatistics.put(authoritativeTier, new OperationStatistic[] { + createTranslatedStatistic(authoritativeTier, "get", TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "getAndFault"), + createTranslatedStatistic(authoritativeTier, "eviction", TierOperationOutcomes.EVICTION_TRANSLATION, "eviction") + }); - MappedOperationStatistic evict = - new MappedOperationStatistic<>( - authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.DISK.getTierHeight(), "eviction", STATISTICS_TAG); - StatisticsManager.associate(evict).withParent(authoritativeTier); - tieredOps.add(evict); - - tierOperationStatistics.put(authoritativeTier, tieredOps); return authoritativeTier; } @@ -503,18 +491,23 @@ public void initAuthoritativeTier(AuthoritativeTier resource) { */ @SuppressWarnings("unchecked") public static PersistentPortability persistent(final Portability normal) { - final Class normalKlazz = normal.getClass(); - Class[] delegateInterfaces = normalKlazz.getInterfaces(); - Class[] proxyInterfaces = Arrays.copyOf(delegateInterfaces, delegateInterfaces.length + 1); - proxyInterfaces[delegateInterfaces.length] = PersistentPortability.class; - - return (PersistentPortability) Proxy.newProxyInstance(normal.getClass().getClassLoader(), proxyInterfaces, (o, method, os) -> { - if (method.getDeclaringClass().equals(Persistent.class)) { - return null; - } else { - return method.invoke(normal, os); + if (normal instanceof PersistentPortability) { + return (PersistentPortability) normal; + } else { + LinkedHashSet> proxyInterfaces = new LinkedHashSet<>(); + for (Class klazz = normal.getClass(); klazz != null; klazz = klazz.getSuperclass()) { + proxyInterfaces.addAll(asList(klazz.getInterfaces())); } - }); + proxyInterfaces.add(PersistentPortability.class); + + return (PersistentPortability) Proxy.newProxyInstance(normal.getClass().getClassLoader(), proxyInterfaces.toArray(new Class[0]), (o, method, os) -> { + if (method.getDeclaringClass().equals(Persistent.class)) { + return null; + } else { + return method.invoke(normal, os); + } + }); + } } String getThreadPoolAlias() { diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderFactory.java similarity index 88% rename from impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderFactory.java index b259debf1a..b7d4d4b8ca 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderFactory.java @@ -16,17 +16,19 @@ package org.ehcache.impl.internal.store.disk; +import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.impl.config.store.disk.OffHeapDiskStoreProviderConfiguration; import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.core.spi.service.ServiceFactory; +import org.osgi.service.component.annotations.Component; /** * @author Chris Dennis */ +@Component public class OffHeapDiskStoreProviderFactory implements ServiceFactory { @Override - public OffHeapDiskStore.Provider create(ServiceCreationConfiguration configuration) { + public OffHeapDiskStore.Provider create(ServiceCreationConfiguration configuration) { if (configuration == null) { return new OffHeapDiskStore.Provider(); } else if (configuration instanceof OffHeapDiskStoreProviderConfiguration) { @@ -37,7 +39,7 @@ public OffHeapDiskStore.Provider create(ServiceCreationConfiguration getServiceType() { + public Class getServiceType() { return OffHeapDiskStore.Provider.class; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentFactory.java similarity index 85% rename from impl/src/main/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentFactory.java index 11828ce2ae..d8bd571a3a 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentFactory.java @@ -16,6 +16,7 @@ package org.ehcache.impl.internal.store.disk.factories; +import org.ehcache.core.spi.store.Store; import org.ehcache.impl.internal.store.offheap.SwitchableEvictionAdvisor; import org.ehcache.impl.internal.store.offheap.factories.EhcacheSegmentFactory.EhcacheSegment; import org.ehcache.impl.internal.store.offheap.factories.EhcacheSegmentFactory.EhcacheSegment.EvictionListener; @@ -26,6 +27,10 @@ import org.terracotta.offheapstore.pinning.PinnableSegment; import org.terracotta.offheapstore.util.Factory; +import java.nio.IntBuffer; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; import java.util.concurrent.locks.Lock; import static org.ehcache.impl.internal.store.offheap.factories.EhcacheSegmentFactory.EhcacheSegment.ADVISED_AGAINST_EVICTION; @@ -111,5 +116,29 @@ public boolean evict(int index, boolean shrink) { lock.unlock(); } } + + @Override + protected Set> createEntrySet() { + return new EntrySet(); + } + + private class EntrySet extends LockedEntrySet { + @Override + public Iterator> iterator() { + readLock().lock(); + try { + return new LockedEntryIterator() { + @Override + protected Map.Entry create(IntBuffer entry) { + Map.Entry entryObject = super.create(entry); + ((Store.ValueHolder) entryObject.getValue()).get(); + return entryObject; + } + }; + } finally { + readLock().unlock(); + } + } + } } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java similarity index 99% rename from impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java index b3d51bdd6e..82d46e99cf 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java @@ -38,7 +38,7 @@ interface Backend { OnHeapValueHolder compute(K key, BiFunction, OnHeapValueHolder> biFunction); - Backend clear(); + void clear(); Collection>> removeAllWithHash(int hash); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java similarity index 90% rename from impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java index 4a37035303..ed43573ff2 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java @@ -19,6 +19,7 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.core.spi.store.Store; import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.impl.internal.concurrent.EvictingConcurrentMap; import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapKey; import org.ehcache.impl.internal.store.heap.holders.LookupOnlyOnHeapKey; import org.ehcache.impl.internal.store.heap.holders.OnHeapKey; @@ -29,12 +30,12 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; -import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Random; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiFunction; +import java.util.function.Supplier; /** * Backend dealing with a key copier and storing keys as {@code OnHeapKey} @@ -44,15 +45,17 @@ */ class KeyCopyBackend implements Backend { - private final ConcurrentHashMap, OnHeapValueHolder> keyCopyMap; + private volatile EvictingConcurrentMap, OnHeapValueHolder> keyCopyMap; + private final Supplier, OnHeapValueHolder>> keyCopyMapSupplier; private final boolean byteSized; private final Copier keyCopier; private final AtomicLong byteSize = new AtomicLong(0L); - KeyCopyBackend(boolean byteSized, Copier keyCopier) { + KeyCopyBackend(boolean byteSized, Copier keyCopier, Supplier, OnHeapValueHolder>> keyCopyMapSupplier) { this.byteSized = byteSized; this.keyCopier = keyCopier; - keyCopyMap = new ConcurrentHashMap<>(); + this.keyCopyMap = keyCopyMapSupplier.get(); + this.keyCopyMapSupplier = keyCopyMapSupplier; } @Override @@ -101,8 +104,6 @@ public void updateUsageInBytesIfRequired(long delta) { } } - - @Override public Iterable keySet() { final Iterator> iter = keyCopyMap.keySet().iterator(); @@ -154,8 +155,9 @@ public OnHeapValueHolder compute(final K key, final BiFunction clear() { - return new KeyCopyBackend<>(byteSized, keyCopier); + public void clear() { + // This is faster than performing a clear on the underlying map + keyCopyMap = keyCopyMapSupplier.get(); } @Override diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java new file mode 100644 index 0000000000..a1a6423b09 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java @@ -0,0 +1,1776 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.heap; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import org.ehcache.Cache; +import org.ehcache.config.SizedResourcePool; +import org.ehcache.core.CacheConfigurationChangeEvent; +import org.ehcache.core.CacheConfigurationChangeListener; +import org.ehcache.core.CacheConfigurationProperty; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.ResourceType; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.config.ExpiryUtils; +import org.ehcache.core.events.StoreEventDispatcher; +import org.ehcache.core.events.StoreEventSink; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.statistics.StatisticType; +import org.ehcache.core.statistics.OperationObserver; +import org.ehcache.core.statistics.OperationStatistic; +import org.ehcache.impl.internal.concurrent.EvictingConcurrentMap; +import org.ehcache.impl.store.BaseStore; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.core.spi.store.heap.LimitExceededException; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.copy.IdentityCopier; +import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.impl.copy.SerializingCopier; +import org.ehcache.core.events.NullStoreEventDispatcher; +import org.ehcache.impl.store.DefaultStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; +import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapValueHolder; +import org.ehcache.impl.internal.store.heap.holders.OnHeapValueHolder; +import org.ehcache.impl.internal.store.heap.holders.SerializedOnHeapValueHolder; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.impl.store.HashUtils; +import org.ehcache.impl.serialization.TransientStateRepository; +import org.ehcache.sizeof.annotations.IgnoreSizeOf; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.events.StoreEventSource; +import org.ehcache.core.spi.store.tiering.CachingTier; +import org.ehcache.core.spi.store.tiering.HigherCachingTier; +import org.ehcache.impl.internal.store.BinaryValueHolder; +import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.copy.CopyProvider; +import org.ehcache.spi.service.OptionalServiceDependencies; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.core.spi.store.heap.SizeOfEngine; +import org.ehcache.core.spi.store.heap.SizeOfEngineProvider; +import org.ehcache.core.statistics.CachingTierOperationOutcomes; +import org.ehcache.core.statistics.HigherCachingTierOperationOutcomes; +import org.ehcache.core.statistics.StoreOperationOutcomes; +import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap; +import org.ehcache.core.statistics.TierOperationOutcomes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.NoSuchElementException; +import java.util.Objects; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.ehcache.config.Eviction.noAdvice; +import static org.ehcache.core.config.ExpiryUtils.isExpiryDurationInfinite; +import static org.ehcache.core.exceptions.StorePassThroughException.handleException; + +/** + * {@link Store} and {@link HigherCachingTier} implementation for on heap. + * + *

                                      + * It currently carries the following responsibilities: + *

                                        + *
                                      • Expiry
                                      • + *
                                      • Eviction
                                      • + *
                                      • Events
                                      • + *
                                      • Statistics
                                      • + *
                                      + * + * The storage of mappings is handled by a {@link ConcurrentHashMap} accessed through {@link Backend}. + */ +public class OnHeapStore extends BaseStore implements HigherCachingTier { + + private static final Logger LOG = LoggerFactory.getLogger(OnHeapStore.class); + + private static final int ATTEMPT_RATIO = 4; + private static final int EVICTION_RATIO = 2; + + private static final EvictionAdvisor> EVICTION_ADVISOR = (key, value) -> value.evictionAdvice(); + + /** + * Comparator for eviction candidates: + * The highest priority is the ValueHolder having the smallest lastAccessTime. + */ + private static final Comparator> EVICTION_PRIORITIZER = (t, u) -> { + if (t instanceof Fault) { + return -1; + } else if (u instanceof Fault) { + return 1; + } else { + return Long.signum(u.lastAccessTime() - t.lastAccessTime()); + } + }; + + private static final InvalidationListener NULL_INVALIDATION_LISTENER = (InvalidationListener) (key, valueHolder) -> { + // Do nothing + }; + + static final int SAMPLE_SIZE = 8; + private final Backend map; + + private final Copier valueCopier; + + private final SizeOfEngine sizeOfEngine; + private final OnHeapStrategy strategy; + + private volatile long capacity; + private final EvictionAdvisor evictionAdvisor; + private final ExpiryPolicy expiry; + private final TimeSource timeSource; + private final StoreEventDispatcher storeEventDispatcher; + @SuppressWarnings("unchecked") + private volatile InvalidationListener invalidationListener = (InvalidationListener) NULL_INVALIDATION_LISTENER; + + private final CacheConfigurationChangeListener cacheConfigurationChangeListener = new CacheConfigurationChangeListener() { + @Override + public void cacheConfigurationChange(CacheConfigurationChangeEvent event) { + if(event.getProperty().equals(CacheConfigurationProperty.UPDATE_SIZE)) { + ResourcePools updatedPools = (ResourcePools)event.getNewValue(); + ResourcePools configuredPools = (ResourcePools)event.getOldValue(); + if(updatedPools.getPoolForResource(ResourceType.Core.HEAP).getSize() != + configuredPools.getPoolForResource(ResourceType.Core.HEAP).getSize()) { + LOG.info("Updating size to: {}", updatedPools.getPoolForResource(ResourceType.Core.HEAP).getSize()); + SizedResourcePool pool = updatedPools.getPoolForResource(ResourceType.Core.HEAP); + if (pool.getUnit() instanceof MemoryUnit) { + capacity = ((MemoryUnit)pool.getUnit()).toBytes(pool.getSize()); + } else { + capacity = pool.getSize(); + } + } + } + } + }; + + private final OperationObserver getObserver; + private final OperationObserver putObserver; + private final OperationObserver removeObserver; + private final OperationObserver putIfAbsentObserver; + private final OperationObserver conditionalRemoveObserver; + private final OperationObserver replaceObserver; + private final OperationObserver conditionalReplaceObserver; + private final OperationObserver computeObserver; + private final OperationObserver computeIfAbsentObserver; + private final OperationObserver evictionObserver; + private final OperationObserver expirationObserver; + + private final OperationObserver getOrComputeIfAbsentObserver; + private final OperationObserver invalidateObserver; + private final OperationObserver invalidateAllObserver; + private final OperationObserver invalidateAllWithHashObserver; + private final OperationObserver silentInvalidateObserver; + private final OperationObserver silentInvalidateAllObserver; + private final OperationObserver silentInvalidateAllWithHashObserver; + + private static final Supplier REPLACE_EQUALS_TRUE = () -> Boolean.TRUE; + + public OnHeapStore(Configuration config, TimeSource timeSource, Copier keyCopier, Copier valueCopier, SizeOfEngine sizeOfEngine, StoreEventDispatcher eventDispatcher, StatisticsService statisticsService) { + this(config, timeSource, keyCopier, valueCopier, sizeOfEngine, eventDispatcher, ConcurrentHashMap::new, statisticsService); + } + + public OnHeapStore(Configuration config, TimeSource timeSource, Copier keyCopier, Copier valueCopier, + SizeOfEngine sizeOfEngine, StoreEventDispatcher eventDispatcher, Supplier> backingMapSupplier, StatisticsService statisticsService) { + super(config, statisticsService); + + Objects.requireNonNull(keyCopier, "keyCopier must not be null"); + + this.valueCopier = Objects.requireNonNull(valueCopier, "valueCopier must not be null"); + this.timeSource = Objects.requireNonNull(timeSource, "timeSource must not be null"); + this.sizeOfEngine = Objects.requireNonNull(sizeOfEngine, "sizeOfEngine must not be null"); + + SizedResourcePool heapPool = config.getResourcePools().getPoolForResource(ResourceType.Core.HEAP); + if (heapPool == null) { + throw new IllegalArgumentException("OnHeap store must be configured with a resource of type 'heap'"); + } + + boolean byteSized = !(this.sizeOfEngine instanceof NoopSizeOfEngine); + this.capacity = byteSized ? ((MemoryUnit) heapPool.getUnit()).toBytes(heapPool.getSize()) : heapPool.getSize(); + + if (config.getEvictionAdvisor() == null) { + this.evictionAdvisor = noAdvice(); + } else { + this.evictionAdvisor = config.getEvictionAdvisor(); + } + this.expiry = config.getExpiry(); + this.storeEventDispatcher = eventDispatcher; + + if (keyCopier instanceof IdentityCopier) { + this.map = new SimpleBackend<>(byteSized, castBackend(backingMapSupplier)); + } else { + this.map = new KeyCopyBackend<>(byteSized, keyCopier, castBackend(backingMapSupplier)); + } + + strategy = OnHeapStrategy.strategy(this, expiry, timeSource); + + getObserver = createObserver("get", StoreOperationOutcomes.GetOutcome.class, true); + putObserver = createObserver("put", StoreOperationOutcomes.PutOutcome.class, true); + removeObserver = createObserver("remove", StoreOperationOutcomes.RemoveOutcome.class, true); + putIfAbsentObserver = createObserver("putIfAbsent", StoreOperationOutcomes.PutIfAbsentOutcome.class, true); + conditionalRemoveObserver = createObserver("conditionalRemove", StoreOperationOutcomes.ConditionalRemoveOutcome.class, true); + replaceObserver = createObserver("replace", StoreOperationOutcomes.ReplaceOutcome.class, true); + conditionalReplaceObserver = createObserver("conditionalReplace", StoreOperationOutcomes.ConditionalReplaceOutcome.class, true); + computeObserver = createObserver("compute", StoreOperationOutcomes.ComputeOutcome.class, true); + computeIfAbsentObserver = createObserver("computeIfAbsent", StoreOperationOutcomes.ComputeIfAbsentOutcome.class, true); + evictionObserver = createObserver("eviction", StoreOperationOutcomes.EvictionOutcome.class, false); + expirationObserver = createObserver("expiration", StoreOperationOutcomes.ExpirationOutcome.class, false); + + getOrComputeIfAbsentObserver = createObserver("getOrComputeIfAbsent", CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class, true); + invalidateObserver = createObserver("invalidate", CachingTierOperationOutcomes.InvalidateOutcome.class, true); + invalidateAllObserver = createObserver("invalidateAll", CachingTierOperationOutcomes.InvalidateAllOutcome.class, true); + invalidateAllWithHashObserver = createObserver("invalidateAllWithHash", CachingTierOperationOutcomes.InvalidateAllWithHashOutcome.class, true); + + silentInvalidateObserver = createObserver("silentInvalidate", HigherCachingTierOperationOutcomes.SilentInvalidateOutcome.class, true); + silentInvalidateAllObserver = createObserver("silentInvalidateAll", HigherCachingTierOperationOutcomes.SilentInvalidateAllOutcome.class, true); + silentInvalidateAllWithHashObserver = createObserver("silentInvalidateAllWithHash", HigherCachingTierOperationOutcomes.SilentInvalidateAllWithHashOutcome.class, true); + + Set tags = new HashSet<>(Arrays.asList(getStatisticsTag(), "tier")); + registerStatistic("mappings", StatisticType.COUNTER, tags, () -> map.mappingCount()); + if (byteSized) { + registerStatistic("occupiedMemory", StatisticType.GAUGE, tags, () -> map.byteSize()); + } + } + + @Override + protected String getStatisticsTag() { + return "OnHeap"; + } + + @SuppressWarnings({"unchecked", "rawtype"}) + private Supplier> castBackend(Supplier> backingMap) { + return (Supplier) backingMap; + } + + @Override + public ValueHolder get(K key) throws StoreAccessException { + checkKey(key); + + getObserver.begin(); + try { + OnHeapValueHolder mapping = getQuiet(key); + + if (mapping == null) { + getObserver.end(StoreOperationOutcomes.GetOutcome.MISS); + return null; + } + + strategy.setAccessAndExpiryTimeWhenCallerOutsideLock(key, mapping, timeSource.getTimeMillis()); + + getObserver.end(StoreOperationOutcomes.GetOutcome.HIT); + return mapping; + } catch (RuntimeException re) { + throw handleException(re); + } + } + + private OnHeapValueHolder getQuiet(K key) throws StoreAccessException { + try { + OnHeapValueHolder mapping = map.get(key); + if (mapping == null) { + return null; + } + + if (strategy.isExpired(mapping)) { + expireMappingUnderLock(key, mapping); + return null; + } + return mapping; + } catch (RuntimeException re) { + throw handleException(re); + } + } + + @Override + public boolean containsKey(K key) throws StoreAccessException { + checkKey(key); + return getQuiet(key) != null; + } + + @Override + public PutStatus put(K key, V value) throws StoreAccessException { + checkKey(key); + checkValue(value); + + putObserver.begin(); + + long now = timeSource.getTimeMillis(); + AtomicReference statOutcome = new AtomicReference<>(StoreOperationOutcomes.PutOutcome.NOOP); + StoreEventSink eventSink = storeEventDispatcher.eventSink(); + + try { + map.compute(key, (mappedKey, mappedValue) -> { + + long delta = 0; + + if (mappedValue != null && mappedValue.isExpired(now)) { + delta -= mappedValue.size(); + mappedValue = null; + } + + OnHeapValueHolder newValue; + + if (mappedValue == null) { + newValue = newCreateValueHolder(key, value, now, eventSink); + if (newValue != null) { + delta += newValue.size(); + statOutcome.set(StoreOperationOutcomes.PutOutcome.PUT); + } + } else { + newValue = newUpdateValueHolder(key, mappedValue, value, now, eventSink); + if (newValue != null) { + delta += newValue.size() - mappedValue.size(); + } else { + delta -= mappedValue.size(); + } + statOutcome.set(StoreOperationOutcomes.PutOutcome.PUT); + } + + updateUsageInBytesIfRequired(delta); + + return newValue; + }); + storeEventDispatcher.releaseEventSink(eventSink); + + enforceCapacity(); + + StoreOperationOutcomes.PutOutcome outcome = statOutcome.get(); + putObserver.end(outcome); + switch (outcome) { + case PUT: + return PutStatus.PUT; + case NOOP: + return PutStatus.NOOP; + default: + throw new AssertionError("Unknown enum value " + outcome); + } + } catch (RuntimeException re) { + storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); + putObserver.end(StoreOperationOutcomes.PutOutcome.FAILURE); + throw handleException(re); + } + } + + @Override + public boolean remove(K key) throws StoreAccessException { + checkKey(key); + + removeObserver.begin(); + StoreEventSink eventSink = storeEventDispatcher.eventSink(); + long now = timeSource.getTimeMillis(); + + try { + AtomicReference statisticOutcome = new AtomicReference<>(StoreOperationOutcomes.RemoveOutcome.MISS); + + map.computeIfPresent(key, (mappedKey, mappedValue) -> { + updateUsageInBytesIfRequired(- mappedValue.size()); + if (mappedValue.isExpired(now)) { + fireOnExpirationEvent(mappedKey, mappedValue, eventSink); + return null; + } + + statisticOutcome.set(StoreOperationOutcomes.RemoveOutcome.REMOVED); + eventSink.removed(mappedKey, mappedValue); + return null; + }); + storeEventDispatcher.releaseEventSink(eventSink); + StoreOperationOutcomes.RemoveOutcome outcome = statisticOutcome.get(); + removeObserver.end(outcome); + switch (outcome) { + case REMOVED: + return true; + case MISS: + return false; + default: + throw new AssertionError("Unknown enum value " + outcome); + } + } catch (RuntimeException re) { + storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); + throw handleException(re); + } + } + + @Override + public ValueHolder putIfAbsent(K key, V value, Consumer put) throws StoreAccessException { + checkKey(key); + checkValue(value); + + putIfAbsentObserver.begin(); + + AtomicReference> returnValue = new AtomicReference<>(null); + AtomicBoolean entryActuallyAdded = new AtomicBoolean(); + long now = timeSource.getTimeMillis(); + StoreEventSink eventSink = storeEventDispatcher.eventSink(); + + try { + map.compute(key, (mappedKey, mappedValue) -> { + long delta = 0; + + OnHeapValueHolder holder; + + if (mappedValue == null || mappedValue.isExpired(now)) { + if (mappedValue != null) { + delta -= mappedValue.size(); + fireOnExpirationEvent(mappedKey, mappedValue, eventSink); + } + + holder = newCreateValueHolder(key, value, now, eventSink); + if (holder != null) { + delta += holder.size(); + } + entryActuallyAdded.set(holder != null); + } else { + returnValue.set(mappedValue); + holder = strategy.setAccessAndExpiryWhenCallerlUnderLock(key, mappedValue, now, eventSink); + if (holder == null) { + delta -= mappedValue.size(); + } + } + + updateUsageInBytesIfRequired(delta); + + return holder; + }); + + storeEventDispatcher.releaseEventSink(eventSink); + + if (entryActuallyAdded.get()) { + enforceCapacity(); + putIfAbsentObserver.end(StoreOperationOutcomes.PutIfAbsentOutcome.PUT); + } else { + putIfAbsentObserver.end(StoreOperationOutcomes.PutIfAbsentOutcome.HIT); + } + } catch (RuntimeException re) { + storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); + throw handleException(re); + } + + return returnValue.get(); + } + + @Override + public RemoveStatus remove(K key, V value) throws StoreAccessException { + checkKey(key); + checkValue(value); + + conditionalRemoveObserver.begin(); + + AtomicReference outcome = new AtomicReference<>(RemoveStatus.KEY_MISSING); + StoreEventSink eventSink = storeEventDispatcher.eventSink(); + + try { + map.computeIfPresent(key, (mappedKey, mappedValue) -> { + long now = timeSource.getTimeMillis(); + + if (mappedValue.isExpired(now)) { + updateUsageInBytesIfRequired(- mappedValue.size()); + fireOnExpirationEvent(mappedKey, mappedValue, eventSink); + return null; + } else if (value.equals(mappedValue.get())) { + updateUsageInBytesIfRequired(- mappedValue.size()); + eventSink.removed(mappedKey, mappedValue); + outcome.set(RemoveStatus.REMOVED); + return null; + } else { + outcome.set(RemoveStatus.KEY_PRESENT); + OnHeapValueHolder holder = strategy.setAccessAndExpiryWhenCallerlUnderLock(key, mappedValue, now, eventSink); + if (holder == null) { + updateUsageInBytesIfRequired(- mappedValue.size()); + } + return holder; + } + }); + storeEventDispatcher.releaseEventSink(eventSink); + RemoveStatus removeStatus = outcome.get(); + switch (removeStatus) { + case REMOVED: + conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED); + break; + case KEY_MISSING: + case KEY_PRESENT: + conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS); + break; + default: + + } + return removeStatus; + } catch (RuntimeException re) { + storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); + throw handleException(re); + } + + } + + @Override + public ValueHolder replace(K key, V value) throws StoreAccessException { + checkKey(key); + checkValue(value); + + replaceObserver.begin(); + + AtomicReference> returnValue = new AtomicReference<>(null); + StoreEventSink eventSink = storeEventDispatcher.eventSink(); + + try { + map.computeIfPresent(key, (mappedKey, mappedValue) -> { + long now = timeSource.getTimeMillis(); + + if (mappedValue.isExpired(now)) { + updateUsageInBytesIfRequired(- mappedValue.size()); + fireOnExpirationEvent(mappedKey, mappedValue, eventSink); + return null; + } else { + returnValue.set(mappedValue); + OnHeapValueHolder holder = newUpdateValueHolder(key, mappedValue, value, now, eventSink); + if (holder != null) { + updateUsageInBytesIfRequired(holder.size() - mappedValue.size()); + } else { + updateUsageInBytesIfRequired(- mappedValue.size()); + } + return holder; + } + }); + OnHeapValueHolder valueHolder = returnValue.get(); + storeEventDispatcher.releaseEventSink(eventSink); + enforceCapacity(); + if (valueHolder != null) { + replaceObserver.end(StoreOperationOutcomes.ReplaceOutcome.REPLACED); + } else { + replaceObserver.end(StoreOperationOutcomes.ReplaceOutcome.MISS); + } + } catch (RuntimeException re) { + storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); + throw handleException(re); + } + + return returnValue.get(); + } + + @Override + public ReplaceStatus replace(K key, V oldValue, V newValue) throws StoreAccessException { + checkKey(key); + checkValue(oldValue); + checkValue(newValue); + + conditionalReplaceObserver.begin(); + + StoreEventSink eventSink = storeEventDispatcher.eventSink(); + AtomicReference outcome = new AtomicReference<>(ReplaceStatus.MISS_NOT_PRESENT); + + try { + map.computeIfPresent(key, (mappedKey, mappedValue) -> { + long now = timeSource.getTimeMillis(); + + V existingValue = mappedValue.get(); + if (mappedValue.isExpired(now)) { + fireOnExpirationEvent(mappedKey, mappedValue, eventSink); + updateUsageInBytesIfRequired(- mappedValue.size()); + return null; + } else if (oldValue.equals(existingValue)) { + outcome.set(ReplaceStatus.HIT); + OnHeapValueHolder holder = newUpdateValueHolder(key, mappedValue, newValue, now, eventSink); + if (holder != null) { + updateUsageInBytesIfRequired(holder.size() - mappedValue.size()); + } else { + updateUsageInBytesIfRequired(- mappedValue.size()); + } + return holder; + } else { + outcome.set(ReplaceStatus.MISS_PRESENT); + OnHeapValueHolder holder = strategy.setAccessAndExpiryWhenCallerlUnderLock(key, mappedValue, now, eventSink); + if (holder == null) { + updateUsageInBytesIfRequired(- mappedValue.size()); + } + return holder; + } + }); + storeEventDispatcher.releaseEventSink(eventSink); + enforceCapacity(); + ReplaceStatus replaceStatus = outcome.get(); + switch (replaceStatus) { + case HIT: + conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED); + break; + case MISS_PRESENT: + case MISS_NOT_PRESENT: + conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS); + break; + default: + throw new AssertionError("Unknown enum value " + replaceStatus); + } + return replaceStatus; + } catch (RuntimeException re) { + storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); + throw handleException(re); + } + } + + @Override + public void clear() { + map.clear(); + } + + @Override + public Iterator>> iterator() { + java.util.Iterator>> iterator = map.entrySetIterator(); + return new Iterator>>() { + private Cache.Entry> prefetched = advance(); + + @Override + public boolean hasNext() { + return prefetched != null; + } + + @Override + public Cache.Entry> next() throws StoreAccessException { + if (prefetched == null) { + throw new NoSuchElementException(); + } else { + Cache.Entry> next = prefetched; + prefetched = advance(); + return next; + } + } + + private Cache.Entry> advance() { + while (iterator.hasNext()) { + Entry> next = iterator.next(); + + if (strategy.isExpired(next.getValue())) { + expireMappingUnderLock(next.getKey(), next.getValue()); + } else { + return new Cache.Entry>() { + @Override + public K getKey() { + return next.getKey(); + } + + @Override + public ValueHolder getValue() { + return next.getValue(); + } + }; + } + } + return null; + } + }; + } + + @Override + public ValueHolder getOrComputeIfAbsent(K key, Function> source) throws StoreAccessException { + try { + getOrComputeIfAbsentObserver.begin(); + Backend backEnd = map; + + // First try to find the value from heap + OnHeapValueHolder cachedValue = backEnd.get(key); + + long now = timeSource.getTimeMillis(); + if (cachedValue == null) { + Fault fault = new Fault<>(() -> source.apply(key)); + cachedValue = backEnd.putIfAbsent(key, fault); + + if (cachedValue == null) { + return resolveFault(key, backEnd, now, fault); + } + } + + // If we have a real value (not a fault), we make sure it is not expired + // If yes, we remove it and ask the source just in case. If no, we return it (below) + if (!(cachedValue instanceof Fault)) { + if (cachedValue.isExpired(now)) { + expireMappingUnderLock(key, cachedValue); + + // On expiration, we might still be able to get a value from the fault. For instance, when a load-writer is used + Fault fault = new Fault<>(() -> source.apply(key)); + cachedValue = backEnd.putIfAbsent(key, fault); + + if (cachedValue == null) { + return resolveFault(key, backEnd, now, fault); + } + } + else { + strategy.setAccessAndExpiryTimeWhenCallerOutsideLock(key, cachedValue, now); + } + } + + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.HIT); + + // Return the value that we found in the cache (by getting the fault or just returning the plain value depending on what we found) + return getValue(cachedValue); + } catch (RuntimeException re) { + throw handleException(re); + } + } + + @Override + public ValueHolder getOrDefault(K key, Function> source) throws StoreAccessException { + try { + Backend backEnd = map; + + // First try to find the value from heap + OnHeapValueHolder cachedValue = backEnd.get(key); + + if (cachedValue == null) { + return source.apply(key); + } else { + // If we have a real value (not a fault), we make sure it is not expired + if (!(cachedValue instanceof Fault)) { + if (cachedValue.isExpired(timeSource.getTimeMillis())) { + expireMappingUnderLock(key, cachedValue); + return null; + } + } + + // Return the value that we found in the cache (by getting the fault or just returning the plain value depending on what we found) + return getValue(cachedValue); + } + } catch (RuntimeException re) { + throw handleException(re); + } + } + + private ValueHolder resolveFault(K key, Backend backEnd, long now, Fault fault) throws StoreAccessException { + try { + ValueHolder value = fault.getValueHolder(); + OnHeapValueHolder newValue; + if(value != null) { + newValue = importValueFromLowerTier(key, value, now, backEnd, fault); + if (newValue == null) { + // Inline expiry or sizing failure + backEnd.remove(key, fault); + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); + return value; + } + } else { + backEnd.remove(key, fault); + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); + return null; + } + + if (backEnd.replace(key, fault, newValue)) { + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED); + updateUsageInBytesIfRequired(newValue.size()); + enforceCapacity(); + return newValue; + } + + AtomicReference> invalidatedValue = new AtomicReference<>(); + backEnd.computeIfPresent(key, (mappedKey, mappedValue) -> { + notifyInvalidation(key, mappedValue); + invalidatedValue.set(mappedValue); + updateUsageInBytesIfRequired(mappedValue.size()); + return null; + }); + + ValueHolder p = getValue(invalidatedValue.get()); + if (p != null) { + if (p.isExpired(now)) { + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS); + return null; + } + + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); + return p; + } + + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); + return newValue; + + } catch (Throwable e) { + backEnd.remove(key, fault); + throw new StoreAccessException(e); + } + } + + private void invalidateInGetOrComputeIfAbsent(Backend map, K key, ValueHolder value, Fault fault, long now, Duration expiration) { + map.computeIfPresent(key, (mappedKey, mappedValue) -> { + if(mappedValue.equals(fault)) { + try { + invalidationListener.onInvalidation(key, cloneValueHolder(key, value, now, expiration, false)); + } catch (LimitExceededException ex) { + throw new AssertionError("Sizing is not expected to happen."); + } + return null; + } + return mappedValue; + }); + } + + @Override + public void invalidate(K key) throws StoreAccessException { + checkKey(key); + + invalidateObserver.begin(); + try { + AtomicReference outcome = new AtomicReference<>(CachingTierOperationOutcomes.InvalidateOutcome.MISS); + + map.computeIfPresent(key, (k, present) -> { + if (!(present instanceof Fault)) { + notifyInvalidation(key, present); + outcome.set(CachingTierOperationOutcomes.InvalidateOutcome.REMOVED); + } + updateUsageInBytesIfRequired(- present.size()); + return null; + }); + invalidateObserver.end(outcome.get()); + } catch (RuntimeException re) { + throw handleException(re); + } + } + + @Override + public void silentInvalidate(K key, Function, Void> function) throws StoreAccessException { + checkKey(key); + + silentInvalidateObserver.begin(); + try { + AtomicReference outcome = + new AtomicReference<>(HigherCachingTierOperationOutcomes.SilentInvalidateOutcome.MISS); + + map.compute(key, (mappedKey, mappedValue) -> { + long size = 0L; + OnHeapValueHolder holderToPass = null; + if (mappedValue != null) { + size = mappedValue.size(); + if (!(mappedValue instanceof Fault)) { + holderToPass = mappedValue; + outcome.set(HigherCachingTierOperationOutcomes.SilentInvalidateOutcome.REMOVED); + } + } + function.apply(holderToPass); + updateUsageInBytesIfRequired(- size); + return null; + }); + silentInvalidateObserver.end(outcome.get()); + } catch (RuntimeException re) { + throw handleException(re); + } + } + + @Override + public void invalidateAll() throws StoreAccessException { + invalidateAllObserver.begin(); + long errorCount = 0; + StoreAccessException firstException = null; + for(K key : map.keySet()) { + try { + invalidate(key); + } catch (StoreAccessException cae) { + errorCount++; + if (firstException == null) { + firstException = cae; + } + } + } + if (firstException != null) { + invalidateAllObserver.end(CachingTierOperationOutcomes.InvalidateAllOutcome.FAILURE); + throw new StoreAccessException("Error(s) during invalidation - count is " + errorCount, firstException); + } + clear(); + invalidateAllObserver.end(CachingTierOperationOutcomes.InvalidateAllOutcome.SUCCESS); + } + + @Override + public void silentInvalidateAll(BiFunction, Void> biFunction) throws StoreAccessException { + silentInvalidateAllObserver.begin(); + StoreAccessException exception = null; + long errorCount = 0; + + for (K k : map.keySet()) { + try { + silentInvalidate(k, mappedValue -> { + biFunction.apply(k, mappedValue); + return null; + }); + } catch (StoreAccessException e) { + errorCount++; + if (exception == null) { + exception = e; + } + } + } + + if (exception != null) { + silentInvalidateAllObserver.end(HigherCachingTierOperationOutcomes.SilentInvalidateAllOutcome.FAILURE); + throw new StoreAccessException("silentInvalidateAll failed - error count: " + errorCount, exception); + } + silentInvalidateAllObserver.end(HigherCachingTierOperationOutcomes.SilentInvalidateAllOutcome.SUCCESS); + } + + @Override + public void silentInvalidateAllWithHash(long hash, BiFunction, Void> biFunction) { + silentInvalidateAllWithHashObserver.begin(); + int intHash = HashUtils.longHashToInt(hash); + Collection>> removed = map.removeAllWithHash(intHash); + for (Entry> entry : removed) { + biFunction.apply(entry.getKey(), entry.getValue()); + } + silentInvalidateAllWithHashObserver.end(HigherCachingTierOperationOutcomes.SilentInvalidateAllWithHashOutcome.SUCCESS); + } + + private void notifyInvalidation(K key, ValueHolder p) { + InvalidationListener invalidationListener = this.invalidationListener; + if(invalidationListener != null) { + invalidationListener.onInvalidation(key, p); + } + } + + @Override + public void setInvalidationListener(InvalidationListener providedInvalidationListener) { + this.invalidationListener = (key, valueHolder) -> { + if (!(valueHolder instanceof Fault)) { + providedInvalidationListener.onInvalidation(key, valueHolder); + } + }; + } + + @Override + public void invalidateAllWithHash(long hash) { + invalidateAllWithHashObserver.begin(); + int intHash = HashUtils.longHashToInt(hash); + Collection>> removed = map.removeAllWithHash(intHash); + for (Entry> entry : removed) { + notifyInvalidation(entry.getKey(), entry.getValue()); + } + LOG.debug("CLIENT: onheap store removed all with hash {}", intHash); + invalidateAllWithHashObserver.end(CachingTierOperationOutcomes.InvalidateAllWithHashOutcome.SUCCESS); + } + + private ValueHolder getValue(ValueHolder cachedValue) { + if (cachedValue instanceof Fault) { + return ((Fault)cachedValue).getValueHolder(); + } else { + return cachedValue; + } + } + + private long getSizeOfKeyValuePairs(K key, OnHeapValueHolder holder) throws LimitExceededException { + return sizeOfEngine.sizeof(key, holder); + } + + /** + * Place holder used when loading an entry from the authority into this caching tier + * + * @param the value type of the caching tier + */ + private static class Fault extends OnHeapValueHolder { + + private static final int FAULT_ID = -1; + + @IgnoreSizeOf + private final Supplier> source; + private ValueHolder value; + private Throwable throwable; + private boolean complete; + + public Fault(Supplier> source) { + super(FAULT_ID, 0, true); + this.source = source; + } + + private void complete(ValueHolder value) { + synchronized (this) { + this.value = value; + this.complete = true; + notifyAll(); + } + } + + private ValueHolder getValueHolder() { + synchronized (this) { + if (!complete) { + try { + complete(source.get()); + } catch (Throwable e) { + fail(e); + } + } + } + + return throwOrReturn(); + } + + @Override + public long getId() { + throw new UnsupportedOperationException("You should NOT call that?!"); + } + + private ValueHolder throwOrReturn() { + if (throwable != null) { + if (throwable instanceof RuntimeException) { + throw (RuntimeException) throwable; + } + throw new RuntimeException("Faulting from repository failed", throwable); + } + return value; + } + + private void fail(Throwable t) { + synchronized (this) { + this.throwable = t; + this.complete = true; + notifyAll(); + } + throwOrReturn(); + } + + @Override + public V get() { + throw new UnsupportedOperationException(); + } + + @Override + public long creationTime() { + throw new UnsupportedOperationException(); + } + + @Override + public void setExpirationTime(long expirationTime) { + throw new UnsupportedOperationException(); + } + + @Override + public long expirationTime() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean isExpired(long expirationTime) { + throw new UnsupportedOperationException(); + } + + @Override + public long lastAccessTime() { + return Long.MAX_VALUE; + } + + @Override + public void setLastAccessTime(long lastAccessTime) { + throw new UnsupportedOperationException(); + } + + @Override + public void setSize(long size) { + throw new UnsupportedOperationException("Faults should not be sized"); + } + + /** + * Faults always have a size of 0 + * + * @return {@code 0} + */ + @Override + public long size() { + return 0L; + } + + @Override + public String toString() { + return "[Fault : " + (complete ? (throwable == null ? String.valueOf(value) : throwable.getMessage()) : "???") + "]"; + } + + @Override + public boolean equals(Object obj) { + return obj == this; + } + } + + @Override + public ValueHolder getAndCompute(K key, BiFunction mappingFunction) throws StoreAccessException { + checkKey(key); + + computeObserver.begin(); + + long now = timeSource.getTimeMillis(); + StoreEventSink eventSink = storeEventDispatcher.eventSink(); + try { + AtomicReference> oldValue = new AtomicReference<>(); + AtomicReference outcome = + new AtomicReference<>(StoreOperationOutcomes.ComputeOutcome.MISS); + + map.compute(key, (mappedKey, mappedValue) -> { + long delta = 0L; + + if (mappedValue != null && mappedValue.isExpired(now)) { + fireOnExpirationEvent(mappedKey, mappedValue, eventSink); + delta -= mappedValue.size(); + mappedValue = null; + } + + OnHeapValueHolder holder; + V existingValue = mappedValue == null ? null : mappedValue.get(); + if (mappedValue != null) { + oldValue.set(mappedValue); + } + V computedValue = mappingFunction.apply(mappedKey, existingValue); + if (computedValue == null) { + if (existingValue != null) { + eventSink.removed(mappedKey, mappedValue); + outcome.set(StoreOperationOutcomes.ComputeOutcome.REMOVED); + delta -= mappedValue.size(); + } + holder = null; + } else { + checkValue(computedValue); + if (mappedValue != null) { + outcome.set(StoreOperationOutcomes.ComputeOutcome.PUT); + holder = newUpdateValueHolder(key, mappedValue, computedValue, now, eventSink); + delta -= mappedValue.size(); + if (holder != null) { + delta += holder.size(); + } + } else { + holder = newCreateValueHolder(key, computedValue, now, eventSink); + if (holder != null) { + outcome.set(StoreOperationOutcomes.ComputeOutcome.PUT); + delta += holder.size(); + } + } + } + + updateUsageInBytesIfRequired(delta); + + return holder; + }); + + storeEventDispatcher.releaseEventSink(eventSink); + enforceCapacity(); + computeObserver.end(outcome.get()); + return oldValue.get(); + } catch (RuntimeException re) { + storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); + throw handleException(re); + } + } + + @Override + public ValueHolder computeAndGet(K key, BiFunction mappingFunction, Supplier replaceEqual, Supplier invokeWriter) throws StoreAccessException { + checkKey(key); + + computeObserver.begin(); + + long now = timeSource.getTimeMillis(); + StoreEventSink eventSink = storeEventDispatcher.eventSink(); + try { + AtomicReference> valueHeld = new AtomicReference<>(); + AtomicReference outcome = + new AtomicReference<>(StoreOperationOutcomes.ComputeOutcome.MISS); + + OnHeapValueHolder computeResult = map.compute(key, (mappedKey, mappedValue) -> { + long delta = 0L; + + if (mappedValue != null && mappedValue.isExpired(now)) { + fireOnExpirationEvent(mappedKey, mappedValue, eventSink); + delta -= mappedValue.size(); + mappedValue = null; + } + + OnHeapValueHolder holder; + V existingValue = mappedValue == null ? null : mappedValue.get(); + V computedValue = mappingFunction.apply(mappedKey, existingValue); + if (computedValue == null) { + if (existingValue != null) { + eventSink.removed(mappedKey, mappedValue); + outcome.set(StoreOperationOutcomes.ComputeOutcome.REMOVED); + delta -= mappedValue.size(); + } + holder = null; + } else if (Objects.equals(existingValue, computedValue) && !replaceEqual.get() && mappedValue != null) { + holder = strategy.setAccessAndExpiryWhenCallerlUnderLock(key, mappedValue, now, eventSink); + outcome.set(StoreOperationOutcomes.ComputeOutcome.HIT); + if (holder == null) { + valueHeld.set(mappedValue); + delta -= mappedValue.size(); + } + } else { + checkValue(computedValue); + if (mappedValue != null) { + outcome.set(StoreOperationOutcomes.ComputeOutcome.PUT); + long expirationTime = mappedValue.expirationTime(); + holder = newUpdateValueHolder(key, mappedValue, computedValue, now, eventSink); + delta -= mappedValue.size(); + if (holder == null) { + try { + valueHeld.set(makeValue(key, computedValue, now, expirationTime, valueCopier, false)); + } catch (LimitExceededException e) { + // Not happening + } + } else { + delta += holder.size(); + } + } else { + holder = newCreateValueHolder(key, computedValue, now, eventSink); + if (holder != null) { + outcome.set(StoreOperationOutcomes.ComputeOutcome.PUT); + delta += holder.size(); + } + } + } + + updateUsageInBytesIfRequired(delta); + + return holder; + }); + + if (computeResult == null && valueHeld.get() != null) { + computeResult = valueHeld.get(); + } + storeEventDispatcher.releaseEventSink(eventSink); + enforceCapacity(); + computeObserver.end(outcome.get()); + return computeResult; + } catch (RuntimeException re) { + storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); + throw handleException(re); + } + } + + @Override + public ValueHolder computeIfAbsent(K key, Function mappingFunction) throws StoreAccessException { + checkKey(key); + + computeIfAbsentObserver.begin(); + + StoreEventSink eventSink = storeEventDispatcher.eventSink(); + try { + long now = timeSource.getTimeMillis(); + + AtomicReference> previousValue = new AtomicReference<>(); + AtomicReference outcome = + new AtomicReference<>(StoreOperationOutcomes.ComputeIfAbsentOutcome.NOOP); + OnHeapValueHolder computeResult = map.compute(key, (mappedKey, mappedValue) -> { + long delta = 0; + OnHeapValueHolder holder; + + if (mappedValue == null || mappedValue.isExpired(now)) { + if (mappedValue != null) { + delta -= mappedValue.size(); + fireOnExpirationEvent(mappedKey, mappedValue, eventSink); + } + V computedValue = mappingFunction.apply(mappedKey); + if (computedValue == null) { + holder = null; + } else { + checkValue(computedValue); + holder = newCreateValueHolder(key, computedValue, now, eventSink); + if (holder != null) { + outcome.set(StoreOperationOutcomes.ComputeIfAbsentOutcome.PUT); + delta += holder.size(); + } + } + } else { + previousValue.set(mappedValue); + outcome.set(StoreOperationOutcomes.ComputeIfAbsentOutcome.HIT); + holder = strategy.setAccessAndExpiryWhenCallerlUnderLock(key, mappedValue, now, eventSink); + if (holder == null) { + delta -= mappedValue.size(); + } + } + + updateUsageInBytesIfRequired(delta); + + return holder; + }); + OnHeapValueHolder previousValueHolder = previousValue.get(); + + storeEventDispatcher.releaseEventSink(eventSink); + if (computeResult != null) { + enforceCapacity(); + } + computeIfAbsentObserver.end(outcome.get()); + if (computeResult == null && previousValueHolder != null) { + // There was a value - it expired on access + return previousValueHolder; + } + return computeResult; + } catch (RuntimeException re) { + storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); + throw handleException(re); + } + } + + @Override + public Map> bulkComputeIfAbsent(Set keys, Function, Iterable>> mappingFunction) throws StoreAccessException { + Map> result = new HashMap<>(keys.size()); + + for (K key : keys) { + ValueHolder newValue = computeIfAbsent(key, k -> { + Iterable keySet = Collections.singleton(k); + Iterable> entries = mappingFunction.apply(keySet); + java.util.Iterator> iterator = entries.iterator(); + Entry next = iterator.next(); + + K computedKey = next.getKey(); + checkKey(computedKey); + + V computedValue = next.getValue(); + if (computedValue == null) { + return null; + } + + checkValue(computedValue); + return computedValue; + }); + result.put(key, newValue); + } + return result; + } + + @Override + public List getConfigurationChangeListeners() { + List configurationChangeListenerList + = new ArrayList<>(); + configurationChangeListenerList.add(this.cacheConfigurationChangeListener); + return configurationChangeListenerList; + } + + @Override + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction) throws StoreAccessException { + return bulkCompute(keys, remappingFunction, REPLACE_EQUALS_TRUE); + } + + @Override + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction, Supplier replaceEqual) throws StoreAccessException { + + // The Store here is free to slice & dice the keys as it sees fit + // As this OnHeapStore doesn't operate in segments, the best it can do is do a "bulk" write in batches of... one! + + Map> result = new HashMap<>(); + for (K key : keys) { + checkKey(key); + + ValueHolder newValue = computeAndGet(key, (k, oldValue) -> { + Set> entrySet = Collections.singletonMap(k, oldValue).entrySet(); + Iterable> entries = remappingFunction.apply(entrySet); + java.util.Iterator> iterator = entries.iterator(); + Entry next = iterator.next(); + + K key1 = next.getKey(); + V value = next.getValue(); + checkKey(key1); + if (value != null) { + checkValue(value); + } + return value; + }, replaceEqual, () -> false); + result.put(key, newValue); + } + return result; + } + + @Override + public StoreEventSource getStoreEventSource() { + return storeEventDispatcher; + } + + void expireMappingUnderLock(K key, ValueHolder value) { + + StoreEventSink eventSink = storeEventDispatcher.eventSink(); + try { + map.computeIfPresent(key, (mappedKey, mappedValue) -> { + if(mappedValue.equals(value)) { + fireOnExpirationEvent(key, value, eventSink); + updateUsageInBytesIfRequired(- mappedValue.size()); + return null; + } + return mappedValue; + }); + storeEventDispatcher.releaseEventSink(eventSink); + } catch(RuntimeException re) { + storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); + throw re; + } + } + + private OnHeapValueHolder newUpdateValueHolder(K key, OnHeapValueHolder oldValue, V newValue, long now, StoreEventSink eventSink) { + Objects.requireNonNull(oldValue); + Objects.requireNonNull(newValue); + + Duration duration = strategy.getUpdateDuration(key, oldValue, newValue); + + if (Duration.ZERO.equals(duration)) { + eventSink.updated(key, oldValue, newValue); + eventSink.expired(key, () -> newValue); + return null; + } + + long expirationTime; + if (duration == null) { + expirationTime = oldValue.expirationTime(); + } else { + if (isExpiryDurationInfinite(duration)) { + expirationTime = ValueHolder.NO_EXPIRE; + } else { + expirationTime = ExpiryUtils.getExpirationMillis(now, duration); + } + } + + OnHeapValueHolder holder = null; + try { + holder = makeValue(key, newValue, now, expirationTime, this.valueCopier); + eventSink.updated(key, oldValue, newValue); + } catch (LimitExceededException e) { + LOG.warn(e.getMessage()); + eventSink.removed(key, oldValue); + } + return holder; + } + + private OnHeapValueHolder newCreateValueHolder(K key, V value, long now, StoreEventSink eventSink) { + Objects.requireNonNull(value); + + Duration duration = ExpiryUtils.getExpiryForCreation(key, value, expiry); + if(duration.isZero()) { + return null; + } + + long expirationTime = isExpiryDurationInfinite(duration) ? ValueHolder.NO_EXPIRE : ExpiryUtils.getExpirationMillis(now, duration); + + OnHeapValueHolder holder = null; + try { + holder = makeValue(key, value, now, expirationTime, this.valueCopier); + eventSink.created(key, value); + } catch (LimitExceededException e) { + LOG.warn(e.getMessage()); + } + return holder; + } + + private OnHeapValueHolder importValueFromLowerTier(K key, ValueHolder valueHolder, long now, Backend backEnd, Fault fault) { + Duration expiration = strategy.getAccessDuration(key, valueHolder); + + if (Duration.ZERO.equals(expiration)) { + invalidateInGetOrComputeIfAbsent(backEnd, key, valueHolder, fault, now, Duration.ZERO); + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); + return null; + } + + try{ + return cloneValueHolder(key, valueHolder, now, expiration, true); + } catch (LimitExceededException e) { + LOG.warn(e.getMessage()); + invalidateInGetOrComputeIfAbsent(backEnd, key, valueHolder, fault, now, expiration); + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); + return null; + } + } + + private OnHeapValueHolder cloneValueHolder(K key, ValueHolder valueHolder, long now, Duration expiration, boolean sizingEnabled) throws LimitExceededException { + V realValue = valueHolder.get(); + boolean evictionAdvice = checkEvictionAdvice(key, realValue); + OnHeapValueHolder clonedValueHolder; + if(valueCopier instanceof SerializingCopier) { + if (valueHolder instanceof BinaryValueHolder && ((BinaryValueHolder) valueHolder).isBinaryValueAvailable()) { + clonedValueHolder = new SerializedOnHeapValueHolder<>(valueHolder, ((BinaryValueHolder) valueHolder).getBinaryValue(), + evictionAdvice, ((SerializingCopier) valueCopier).getSerializer(), now, expiration); + } else { + clonedValueHolder = new SerializedOnHeapValueHolder<>(valueHolder, realValue, evictionAdvice, + ((SerializingCopier) valueCopier).getSerializer(), now, expiration); + } + } else { + clonedValueHolder = new CopiedOnHeapValueHolder<>(valueHolder, realValue, evictionAdvice, valueCopier, now, expiration); + } + if (sizingEnabled) { + clonedValueHolder.setSize(getSizeOfKeyValuePairs(key, clonedValueHolder)); + } + return clonedValueHolder; + } + + private OnHeapValueHolder makeValue(K key, V value, long creationTime, long expirationTime, Copier valueCopier) throws LimitExceededException { + return makeValue(key, value, creationTime, expirationTime, valueCopier, true); + } + + private OnHeapValueHolder makeValue(K key, V value, long creationTime, long expirationTime, Copier valueCopier, boolean size) throws LimitExceededException { + boolean evictionAdvice = checkEvictionAdvice(key, value); + OnHeapValueHolder valueHolder; + if (valueCopier instanceof SerializingCopier) { + valueHolder = new SerializedOnHeapValueHolder<>(value, creationTime, expirationTime, evictionAdvice, ((SerializingCopier) valueCopier) + .getSerializer()); + } else { + valueHolder = new CopiedOnHeapValueHolder<>(value, creationTime, expirationTime, evictionAdvice, valueCopier); + } + if (size) { + valueHolder.setSize(getSizeOfKeyValuePairs(key, valueHolder)); + } + return valueHolder; + } + + private boolean checkEvictionAdvice(K key, V value) { + try { + return evictionAdvisor.adviseAgainstEviction(key, value); + } catch (Exception e) { + LOG.error("Exception raised while running eviction advisor " + + "- Eviction will assume entry is NOT advised against eviction", e); + return false; + } + } + + private void updateUsageInBytesIfRequired(long delta) { + map.updateUsageInBytesIfRequired(delta); + } + + protected long byteSized() { + return map.byteSize(); + } + + @SuppressFBWarnings("QF_QUESTIONABLE_FOR_LOOP") + protected void enforceCapacity() { + StoreEventSink eventSink = storeEventDispatcher.eventSink(); + try { + for (int attempts = 0, evicted = 0; attempts < ATTEMPT_RATIO && evicted < EVICTION_RATIO + && capacity < map.naturalSize(); attempts++) { + if (evict(eventSink)) { + evicted++; + } + } + storeEventDispatcher.releaseEventSink(eventSink); + } catch (RuntimeException re){ + storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); + throw re; + } + } + + /** + * Try to evict a mapping. + * @return true if a mapping was evicted, false otherwise. + * @param eventSink target of eviction event + */ + boolean evict(StoreEventSink eventSink) { + evictionObserver.begin(); + Random random = new Random(); + + @SuppressWarnings("unchecked") + Map.Entry> candidate = map.getEvictionCandidate(random, SAMPLE_SIZE, EVICTION_PRIORITIZER, EVICTION_ADVISOR); + + if (candidate == null) { + // 2nd attempt without any advisor + candidate = map.getEvictionCandidate(random, SAMPLE_SIZE, EVICTION_PRIORITIZER, noAdvice()); + } + + if (candidate == null) { + return false; + } else { + Map.Entry> evictionCandidate = candidate; + AtomicBoolean removed = new AtomicBoolean(false); + map.computeIfPresent(evictionCandidate.getKey(), (mappedKey, mappedValue) -> { + if (mappedValue.equals(evictionCandidate.getValue())) { + removed.set(true); + if (!(evictionCandidate.getValue() instanceof Fault)) { + eventSink.evicted(evictionCandidate.getKey(), evictionCandidate.getValue()); + invalidationListener.onInvalidation(mappedKey, evictionCandidate.getValue()); + } + updateUsageInBytesIfRequired(-mappedValue.size()); + return null; + } + return mappedValue; + }); + if (removed.get()) { + evictionObserver.end(StoreOperationOutcomes.EvictionOutcome.SUCCESS); + return true; + } else { + evictionObserver.end(StoreOperationOutcomes.EvictionOutcome.FAILURE); + return false; + } + } + } + + void fireOnExpirationEvent(K mappedKey, ValueHolder mappedValue, StoreEventSink eventSink) { + expirationObserver.begin(); + expirationObserver.end(StoreOperationOutcomes.ExpirationOutcome.SUCCESS); + eventSink.expired(mappedKey, mappedValue); + invalidationListener.onInvalidation(mappedKey, mappedValue); + } + + @ServiceDependencies({TimeSourceService.class, CopyProvider.class, SizeOfEngineProvider.class}) + @OptionalServiceDependencies("org.ehcache.core.spi.service.Statis" + + "ticsService") + public static class Provider extends BaseStoreProvider implements CachingTier.Provider, HigherCachingTier.Provider { + + private final Map, List>> createdStores = new ConcurrentWeakIdentityHashMap<>(); + private final Map, OperationStatistic[]> tierOperationStatistics = new ConcurrentWeakIdentityHashMap<>(); + + @Override + protected ResourceType getResourceType() { + return ResourceType.Core.HEAP; + } + + @Override + public int rank(Set> resourceTypes, Collection> serviceConfigs) { + return resourceTypes.equals(Collections.singleton(getResourceType())) ? 1 : 0; + } + + @Override + public int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs) { + return rank(resourceTypes, serviceConfigs); + } + + @Override + public OnHeapStore createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + OnHeapStore store = createStoreInternal(storeConfig, new DefaultStoreEventDispatcher<>(storeConfig.getDispatcherConcurrency()), serviceConfigs); + + tierOperationStatistics.put(store, new OperationStatistic[] { + createTranslatedStatistic(store, "get", TierOperationOutcomes.GET_TRANSLATION, "get"), + createTranslatedStatistic(store, "eviction", TierOperationOutcomes.EVICTION_TRANSLATION, "eviction") }); + + return store; + } + + public OnHeapStore createStoreInternal(Configuration storeConfig, StoreEventDispatcher eventDispatcher, + ServiceConfiguration... serviceConfigs) { + TimeSource timeSource = getServiceProvider().getService(TimeSourceService.class).getTimeSource(); + CopyProvider copyProvider = getServiceProvider().getService(CopyProvider.class); + Copier keyCopier = copyProvider.createKeyCopier(storeConfig.getKeyType(), storeConfig.getKeySerializer(), serviceConfigs); + Copier valueCopier = copyProvider.createValueCopier(storeConfig.getValueType(), storeConfig.getValueSerializer(), serviceConfigs); + + List> copiers = Arrays.asList(keyCopier, valueCopier); + + SizeOfEngineProvider sizeOfEngineProvider = getServiceProvider().getService(SizeOfEngineProvider.class); + SizeOfEngine sizeOfEngine = sizeOfEngineProvider.createSizeOfEngine( + storeConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getUnit(), serviceConfigs); + OnHeapStore onHeapStore = new OnHeapStore<>(storeConfig, timeSource, keyCopier, valueCopier, sizeOfEngine, eventDispatcher, ConcurrentHashMap::new, getServiceProvider().getService(StatisticsService.class)); + createdStores.put(onHeapStore, copiers); + return onHeapStore; + } + + @Override + public void releaseStore(Store resource) { + List> copiers = createdStores.remove(resource); + if (copiers == null) { + throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); + } + OnHeapStore onHeapStore = (OnHeapStore)resource; + close(onHeapStore); + getStatisticsService().ifPresent(s -> s.cleanForNode(onHeapStore)); + tierOperationStatistics.remove(onHeapStore); + + CopyProvider copyProvider = getServiceProvider().getService(CopyProvider.class); + for (Copier copier: copiers) { + try { + copyProvider.releaseCopier(copier); + } catch (Exception e) { + throw new IllegalStateException("Exception while releasing Copier instance.", e); + } + } + } + + static void close(OnHeapStore onHeapStore) { + onHeapStore.clear(); + } + + @Override + public void initStore(Store resource) { + checkResource(resource); + + List> copiers = createdStores.get(resource); + for (Copier copier : copiers) { + if(copier instanceof SerializingCopier) { + Serializer serializer = ((SerializingCopier)copier).getSerializer(); + if(serializer instanceof StatefulSerializer) { + ((StatefulSerializer)serializer).init(new TransientStateRepository()); + } + } + } + } + + private void checkResource(Object resource) { + if (!createdStores.containsKey(resource)) { + throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); + } + } + + @Override + public void stop() { + try { + createdStores.clear(); + } finally { + super.stop(); + } + } + + @Override + public CachingTier createCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + OnHeapStore cachingTier = createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); + + this.tierOperationStatistics.put(cachingTier, new OperationStatistic[] { + createTranslatedStatistic(cachingTier, "get", TierOperationOutcomes.GET_OR_COMPUTEIFABSENT_TRANSLATION, "getOrComputeIfAbsent"), + createTranslatedStatistic(cachingTier, "eviction", TierOperationOutcomes.EVICTION_TRANSLATION, "eviction") + }); + + return cachingTier; + } + + @Override + public void releaseCachingTier(CachingTier resource) { + checkResource(resource); + try { + resource.invalidateAll(); + } catch (StoreAccessException e) { + LOG.warn("Invalidation failure while releasing caching tier", e); + } + releaseStore((Store) resource); + } + + @Override + public void initCachingTier(CachingTier resource) { + checkResource(resource); + } + + @Override + public HigherCachingTier createHigherCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + OnHeapStore higherCachingTier = createStoreInternal(storeConfig, new DefaultStoreEventDispatcher<>(storeConfig + .getDispatcherConcurrency()), serviceConfigs); + + this.tierOperationStatistics.put(higherCachingTier, new OperationStatistic[] { + createTranslatedStatistic(higherCachingTier, "get", TierOperationOutcomes.GET_OR_COMPUTEIFABSENT_TRANSLATION, "getOrComputeIfAbsent"), + createTranslatedStatistic(higherCachingTier, "eviction", TierOperationOutcomes.EVICTION_TRANSLATION, "eviction") + }); + + return higherCachingTier; + } + + @Override + public void releaseHigherCachingTier(HigherCachingTier resource) { + releaseCachingTier(resource); + } + + @Override + public void initHigherCachingTier(HigherCachingTier resource) { + checkResource(resource); + } + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderFactory.java similarity index 85% rename from impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderFactory.java index 13f97b8df6..bb1fe01943 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderFactory.java @@ -16,21 +16,23 @@ package org.ehcache.impl.internal.store.heap; -import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; /** * @author Alex Snaps */ +@Component public class OnHeapStoreProviderFactory implements ServiceFactory { @Override - public OnHeapStore.Provider create(ServiceCreationConfiguration configuration) { + public OnHeapStore.Provider create(ServiceCreationConfiguration configuration) { return new OnHeapStore.Provider(); } @Override - public Class getServiceType() { + public Class getServiceType() { return OnHeapStore.Provider.class; } } diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStrategy.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStrategy.java new file mode 100644 index 0000000000..7910481e39 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStrategy.java @@ -0,0 +1,262 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.heap; + +import org.ehcache.core.events.StoreEventSink; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.internal.store.heap.holders.OnHeapValueHolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +/** + * Specialized behavior for an OnHeapStore allowing optimization depending on the expiry policy used. + * + * @param type of the keys stored + * @param type of the values stored + */ +interface OnHeapStrategy { + + Logger LOG = LoggerFactory.getLogger(OnHeapStore.class); + + static OnHeapStrategy strategy(OnHeapStore store, ExpiryPolicy expiry, TimeSource timeSource) { + if(expiry == ExpiryPolicy.NO_EXPIRY) { + LOG.debug("No expiration strategy detected"); + return new NoExpirationStrategy<>(); + } + if(expiry.getClass().getName().equals("org.ehcache.config.builders.ExpiryPolicyBuilder$TimeToLiveExpiryPolicy")) { + LOG.debug("TTL expiration strategy detected"); + return new TTLStrategy<>(expiry, timeSource); + } + LOG.debug("TTI or custom expiration strategy detected"); + return new AllStrategy<>(store, expiry, timeSource); + } + + /** + * Tells if a given mapping is expired. + * + * @param mapping mapping to test for expiration + * @return if the mapping is expired + */ + boolean isExpired(OnHeapValueHolder mapping); + + /** + * Set the access time on the mapping and its expiry time if it is access sensitive (TTI). We expect this action to + * be called when the caller isn't holding any lock. + * + * @param key key of the mapping. Used to remove it form the map if needed + * @param valueHolder the mapping + * @param now the current time + */ + void setAccessAndExpiryTimeWhenCallerOutsideLock(K key, OnHeapValueHolder valueHolder, long now); + + /** + * Set the access time on the mapping and its expiry time if it is access sensitive (TTI). We expect this action to + * be called when the caller is currently holding a lock. + * + * @param key key of the mapping. Used to remove it form the map if needed + * @param valueHolder the mapping + * @param now the current time + * @param eventSink sink where the expiration request will be sent + * @return the mapping or null if it was removed + */ + OnHeapValueHolder setAccessAndExpiryWhenCallerlUnderLock(K key, OnHeapValueHolder valueHolder, long now, StoreEventSink eventSink); + + /** + * Get the new expiry duration as per {@link ExpiryPolicy#getExpiryForAccess(Object, Supplier)}. + * + * @param key key of the mapping + * @param valueHolder the mapping + * @return new access expiry duration + */ + Duration getAccessDuration(K key, Store.ValueHolder valueHolder); + + /** + * Get the new expiry duration as per {@link ExpiryPolicy#getExpiryForUpdate(Object, Supplier, Object)}. + * + * @param key key of the mapping + * @param oldValue the old mapping to be updated + * @param newValue the new value for the mapping + * @return new access expiry duration + */ + Duration getUpdateDuration(K key, OnHeapValueHolder oldValue, V newValue); + + /** + * All purpose strategy. Covers any case that can't be optimized due to the uncertainty of the expiry policy used. + * + * @param type of the keys stored + * @param type of the values stored + */ + class AllStrategy implements OnHeapStrategy { + private final OnHeapStore store; + private final ExpiryPolicy expiry; + private final TimeSource timeSource; + + public AllStrategy(OnHeapStore store, ExpiryPolicy expiry, TimeSource timeSource) { + this.store = store; + this.expiry = expiry; + this.timeSource = timeSource; + } + + @Override + public boolean isExpired(OnHeapValueHolder mapping) { + return mapping.isExpired(timeSource.getTimeMillis()); + } + + @Override + public void setAccessAndExpiryTimeWhenCallerOutsideLock(K key, OnHeapValueHolder valueHolder, long now) { + Duration duration = getAccessDuration(key, valueHolder); + if (Duration.ZERO.equals(duration)) { + // Expires mapping through computeIfPresent + store.expireMappingUnderLock(key, valueHolder); + } else { + valueHolder.accessed(now, duration); + } + } + + public Duration getAccessDuration(K key, Store.ValueHolder valueHolder) { + Duration duration; + try { + duration = expiry.getExpiryForAccess(key, valueHolder); + if (duration != null && duration.isNegative()) { + duration = Duration.ZERO; + } + } catch (RuntimeException re) { + LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); + duration = Duration.ZERO; + } + return duration; + } + + public Duration getUpdateDuration(K key, OnHeapValueHolder oldValue, V newValue) { + Duration duration; + try { + duration = expiry.getExpiryForUpdate(key, oldValue, newValue); + if (duration != null && duration.isNegative()) { + duration = Duration.ZERO; + } + } catch (RuntimeException re) { + LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); + duration = Duration.ZERO; + } + return duration; + } + + public OnHeapValueHolder setAccessAndExpiryWhenCallerlUnderLock(K key, OnHeapValueHolder valueHolder, long now, + StoreEventSink eventSink) { + Duration duration = getAccessDuration(key, valueHolder); + if (Duration.ZERO.equals(duration)) { + // Fires event, must happen under lock + store.fireOnExpirationEvent(key, valueHolder, eventSink); + return null; + } else { + valueHolder.accessed(now, duration); + } + return valueHolder; + } + + } + + /** + * Strategy used when entries are never expiring. + * + * @param type of the keys stored + * @param type of the values stored + */ + class NoExpirationStrategy implements OnHeapStrategy { + + @Override + public boolean isExpired(OnHeapValueHolder mapping) { + return false; + } + + @Override + public void setAccessAndExpiryTimeWhenCallerOutsideLock(K key, OnHeapValueHolder valueHolder, long now) { + valueHolder.accessed(now, null); + } + + public OnHeapValueHolder setAccessAndExpiryWhenCallerlUnderLock(K key, OnHeapValueHolder valueHolder, long now, + StoreEventSink eventSink) { + valueHolder.accessed(now, null); + return valueHolder; + } + + public Duration getAccessDuration(K key, Store.ValueHolder valueHolder) { + return null; + } + + public Duration getUpdateDuration(K key, OnHeapValueHolder oldValue, V newValue) { + return null; + } + } + + /** + * Strategy used when entries are expiring due to TTL only. + * + * @param type of the keys stored + * @param type of the values stored + */ + class TTLStrategy implements OnHeapStrategy { + private final TimeSource timeSource; + private final ExpiryPolicy expiry; + + public TTLStrategy(ExpiryPolicy expiry, TimeSource timeSource) { + this.timeSource = timeSource; + this.expiry = expiry; + } + + @Override + public boolean isExpired(OnHeapValueHolder mapping) { + return mapping.isExpired(timeSource.getTimeMillis()); + } + + @Override + public void setAccessAndExpiryTimeWhenCallerOutsideLock(K key, OnHeapValueHolder valueHolder, long now) { + valueHolder.accessed(now, null); + } + + public OnHeapValueHolder setAccessAndExpiryWhenCallerlUnderLock(K key, OnHeapValueHolder valueHolder, long now, + StoreEventSink eventSink) { + valueHolder.accessed(now, null); + return valueHolder; + } + + public Duration getAccessDuration(K key, Store.ValueHolder valueHolder) { + return null; + } + + public Duration getUpdateDuration(K key, OnHeapValueHolder oldValue, V newValue) { + Duration duration; + try { + duration = expiry.getExpiryForUpdate(key, oldValue, newValue); + if (duration != null && duration.isNegative()) { + duration = Duration.ZERO; + } + } catch (RuntimeException re) { + LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); + duration = Duration.ZERO; + } + return duration; + } + } + +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java similarity index 86% rename from impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java index ad4474a81a..8b5e4e5f7c 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java @@ -19,6 +19,7 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.core.spi.store.Store; import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.impl.internal.concurrent.EvictingConcurrentMap; import org.ehcache.impl.internal.store.heap.holders.OnHeapValueHolder; import java.util.Collection; @@ -27,19 +28,22 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiFunction; +import java.util.function.Supplier; /** * Simple passthrough backend, no key translation */ class SimpleBackend implements Backend { - private final ConcurrentHashMap> realMap; + private volatile EvictingConcurrentMap> realMap; + private final Supplier>> realMapSupplier; private final boolean byteSized; private final AtomicLong byteSize = new AtomicLong(0L); - SimpleBackend(boolean byteSized) { + SimpleBackend(boolean byteSized, Supplier>> realMapSupplier) { this.byteSized = byteSized; - realMap = new ConcurrentHashMap<>(); + this.realMap = realMapSupplier.get(); + this.realMapSupplier = realMapSupplier; } @Override @@ -98,8 +102,9 @@ public OnHeapValueHolder compute(final K key, final BiFunction clear() { - return new SimpleBackend<>(byteSized); + public void clear() { + // This is faster than performing a clear on the underlying map + realMap = realMapSupplier.get(); } @Override diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/BaseOnHeapKey.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/BaseOnHeapKey.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/BaseOnHeapKey.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/BaseOnHeapKey.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapKey.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapKey.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapKey.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapKey.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapValueHolder.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapValueHolder.java similarity index 91% rename from impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapValueHolder.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapValueHolder.java index d7b7a9e87a..554b2f6b8f 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapValueHolder.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapValueHolder.java @@ -16,11 +16,12 @@ package org.ehcache.impl.internal.store.heap.holders; -import org.ehcache.expiry.Duration; import org.ehcache.sizeof.annotations.IgnoreSizeOf; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.copy.Copier; +import java.util.concurrent.TimeUnit; + /** * @author Albin Suresh */ @@ -51,8 +52,8 @@ protected CopiedOnHeapValueHolder(long id, V value, long creationTime, long expi * @param now timestamp in millis * @param expiration computed expiration duration */ - public CopiedOnHeapValueHolder(Store.ValueHolder valueHolder, V value, boolean evictionAdvice, Copier valueCopier, long now, Duration expiration) { - super(valueHolder.getId(), valueHolder.creationTime(TIME_UNIT), valueHolder.expirationTime(TIME_UNIT), evictionAdvice); + public CopiedOnHeapValueHolder(Store.ValueHolder valueHolder, V value, boolean evictionAdvice, Copier valueCopier, long now, java.time.Duration expiration) { + super(valueHolder.getId(), valueHolder.creationTime(), valueHolder.expirationTime(), evictionAdvice); if (value == null) { throw new NullPointerException("null value"); } @@ -61,7 +62,6 @@ public CopiedOnHeapValueHolder(Store.ValueHolder valueHolder, V value, boolea } this.valueCopier = valueCopier; this.copiedValue = value; - this.setHits(valueHolder.hits()); this.accessed(now, expiration); } @@ -74,7 +74,7 @@ public CopiedOnHeapValueHolder(V value, long creationTime, long expirationTime, } @Override - public V value() { + public V get() { return valueCopier.copyForRead(copiedValue); } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/LookupOnlyOnHeapKey.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/LookupOnlyOnHeapKey.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/LookupOnlyOnHeapKey.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/LookupOnlyOnHeapKey.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/OnHeapKey.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/OnHeapKey.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/OnHeapKey.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/OnHeapKey.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/OnHeapValueHolder.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/OnHeapValueHolder.java similarity index 92% rename from impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/OnHeapValueHolder.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/OnHeapValueHolder.java index 5fc391b95c..947d7092db 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/OnHeapValueHolder.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/OnHeapValueHolder.java @@ -25,8 +25,6 @@ */ public abstract class OnHeapValueHolder extends AbstractValueHolder { - public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; - private final boolean evictionAdvice; private long size; @@ -55,11 +53,6 @@ public void setSize(long size) { this.size = size; } - @Override - final protected TimeUnit nativeTimeUnit() { - return TIME_UNIT; - } - @Override public boolean equals(Object obj) { if (obj != null && this.getClass().equals(obj.getClass())) { diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java similarity index 85% rename from impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java index 2a47a0bce8..a5dcaf8d9d 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java @@ -16,7 +16,6 @@ package org.ehcache.impl.internal.store.heap.holders; import org.ehcache.spi.serialization.SerializerException; -import org.ehcache.expiry.Duration; import org.ehcache.sizeof.annotations.IgnoreSizeOf; import org.ehcache.core.spi.store.Store; import org.ehcache.impl.internal.store.BinaryValueHolder; @@ -49,22 +48,20 @@ public SerializedOnHeapValueHolder(V value, long creationTime, long expirationTi this(-1, value, creationTime, expirationTime, evictionAdvice, serializer); } - public SerializedOnHeapValueHolder(Store.ValueHolder valueHolder, V value, boolean evictionAdvice, Serializer serializer, long now, Duration expiration) { - this(valueHolder.getId(), value, valueHolder.creationTime(TIME_UNIT), valueHolder.expirationTime(TIME_UNIT), evictionAdvice, serializer); - this.setHits(valueHolder.hits()); + public SerializedOnHeapValueHolder(Store.ValueHolder valueHolder, V value, boolean evictionAdvice, Serializer serializer, long now, java.time.Duration expiration) { + this(valueHolder.getId(), value, valueHolder.creationTime(), valueHolder.expirationTime(), evictionAdvice, serializer); this.accessed(now, expiration); } - public SerializedOnHeapValueHolder(Store.ValueHolder valueHolder, ByteBuffer binaryValue, boolean evictionAdvice, Serializer serializer, long now, Duration expiration) { - super(valueHolder.getId(), valueHolder.creationTime(TIME_UNIT), valueHolder.expirationTime(TIME_UNIT), evictionAdvice); + public SerializedOnHeapValueHolder(Store.ValueHolder valueHolder, ByteBuffer binaryValue, boolean evictionAdvice, Serializer serializer, long now, java.time.Duration expiration) { + super(valueHolder.getId(), valueHolder.creationTime(), valueHolder.expirationTime(), evictionAdvice); this.buffer = binaryValue; this.serializer = serializer; - this.setHits(valueHolder.hits()); this.accessed(now, expiration); } @Override - public final V value() { + public final V get() { try { return serializer.read(buffer.duplicate()); } catch (ClassNotFoundException cnfe) { @@ -92,7 +89,7 @@ public boolean equals(Object other) { if (!super.equals(that)) return false; try { - if (!serializer.equals(that.value(), buffer)) return false; + if (!serializer.equals(that.get(), buffer)) return false; } catch (ClassNotFoundException cnfe) { throw new SerializerException(cnfe); } diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LoaderWriterStoreProvider.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LoaderWriterStoreProvider.java new file mode 100644 index 0000000000..4ba520285f --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LoaderWriterStoreProvider.java @@ -0,0 +1,90 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.loaderwriter; + +import org.ehcache.config.ResourceType; +import org.ehcache.core.spi.store.AbstractWrapperStoreProvider; +import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterConfiguration; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; +import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; +import org.ehcache.spi.loaderwriter.WriteBehindProvider; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; + +import java.util.Collection; +import java.util.Set; + +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; + +@ServiceDependencies({CacheLoaderWriterProvider.class, WriteBehindProvider.class}) +public class LoaderWriterStoreProvider extends AbstractWrapperStoreProvider { + + private volatile WriteBehindProvider writeBehindProvider; + + @Override + protected Store wrap(Store store, Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + WriteBehindConfiguration writeBehindConfiguration = findSingletonAmongst(WriteBehindConfiguration.class, (Object[]) serviceConfigs); + LocalLoaderWriterStore loaderWriterStore; + if(writeBehindConfiguration == null) { + loaderWriterStore = new LocalLoaderWriterStore<>(store, storeConfig.getCacheLoaderWriter(), storeConfig.useLoaderInAtomics(), storeConfig.getExpiry()); + } else { + CacheLoaderWriter writeBehindLoaderWriter = writeBehindProvider.createWriteBehindLoaderWriter(storeConfig.getCacheLoaderWriter(), writeBehindConfiguration); + loaderWriterStore = new LocalWriteBehindLoaderWriterStore<>(store, writeBehindLoaderWriter, storeConfig.useLoaderInAtomics(), storeConfig.getExpiry()); + } + return loaderWriterStore; + } + + @Override + public void releaseStore(Store resource) { + try { + if (resource instanceof LocalWriteBehindLoaderWriterStore) { + writeBehindProvider.releaseWriteBehindLoaderWriter(((LocalWriteBehindLoaderWriterStore) resource).getCacheLoaderWriter()); + } + } finally { + super.releaseStore(resource); + } + } + + @Override + public void start(ServiceProvider serviceProvider) { + super.start(serviceProvider); + this.writeBehindProvider = serviceProvider.getService(WriteBehindProvider.class); + } + + @Override + public void stop() { + this.writeBehindProvider = null; + super.stop(); + } + + @Override + public int rank(Set> resourceTypes, Collection> serviceConfigs) { + throw new UnsupportedOperationException("Its a Wrapper store provider, does not support regular ranking"); + } + + @Override + public int wrapperStoreRank(Collection> serviceConfigs) { + CacheLoaderWriterConfiguration loaderWriterConfiguration = findSingletonAmongst(CacheLoaderWriterConfiguration.class, serviceConfigs); + if (loaderWriterConfiguration == null) { + return 0; + } + return 2; + } +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LoaderWriterStoreProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LoaderWriterStoreProviderFactory.java new file mode 100644 index 0000000000..2a5137b9b1 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LoaderWriterStoreProviderFactory.java @@ -0,0 +1,33 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.loaderwriter; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; + +@Component +public class LoaderWriterStoreProviderFactory implements ServiceFactory { + @Override + public LoaderWriterStoreProvider create(ServiceCreationConfiguration configuration) { + return new LoaderWriterStoreProvider(); + } + + @Override + public Class getServiceType() { + return LoaderWriterStoreProvider.class; + } +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LoaderWriterValueHolder.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LoaderWriterValueHolder.java new file mode 100644 index 0000000000..70a80aa162 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LoaderWriterValueHolder.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.loaderwriter; + +import org.ehcache.core.spi.store.AbstractValueHolder; + +public class LoaderWriterValueHolder extends AbstractValueHolder { + + private final V value; + + public LoaderWriterValueHolder(V value) { + super(0, 0, NO_EXPIRE); + if(value == null) { + throw new NullPointerException("Value can not be null"); + } + this.value = value; + } + + @Override + public V get() { + return value; + } +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LocalLoaderWriterStore.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LocalLoaderWriterStore.java new file mode 100644 index 0000000000..066bf01033 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LocalLoaderWriterStore.java @@ -0,0 +1,595 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.loaderwriter; + +import org.ehcache.Cache; +import org.ehcache.core.CacheConfigurationChangeListener; +import org.ehcache.core.Ehcache; +import org.ehcache.core.exceptions.StorePassThroughException; +import org.ehcache.core.util.CollectionUtil; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.WrapperStore; +import org.ehcache.core.spi.store.events.StoreEventSource; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; +import org.ehcache.spi.loaderwriter.BulkCacheWritingException; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.StoreAccessException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.ehcache.core.exceptions.ExceptionFactory.newCacheLoadingException; +import static org.ehcache.core.exceptions.ExceptionFactory.newCacheWritingException; + +public class LocalLoaderWriterStore implements WrapperStore { + + private static final Logger LOG = LoggerFactory.getLogger(LocalLoaderWriterStore.class); + private static final Supplier SUPPLY_FALSE = () -> Boolean.FALSE; + + private final Store delegate; + private final CacheLoaderWriter cacheLoaderWriter; + private final boolean useLoaderInAtomics; + private final ExpiryPolicy expiry; + + public LocalLoaderWriterStore(Store delegate, CacheLoaderWriter cacheLoaderWriter, boolean useLoaderInAtomics, + ExpiryPolicy expiry) { + this.delegate = delegate; + this.cacheLoaderWriter = cacheLoaderWriter; + this.useLoaderInAtomics = useLoaderInAtomics; + this.expiry = expiry; + } + + @Override + public ValueHolder get(K key) throws StoreAccessException { + Function mappingFunction = k -> { + try { + return cacheLoaderWriter.load(k); + } catch (Exception e) { + throw new StorePassThroughException(newCacheLoadingException(e)); + } + }; + return delegate.computeIfAbsent(key, mappingFunction); + } + + @Override + public boolean containsKey(K key) throws StoreAccessException { + return delegate.containsKey(key); + } + + @Override + public PutStatus put(K key, V value) throws StoreAccessException { + BiFunction remappingFunction = (key1, previousValue) -> { + try { + cacheLoaderWriter.write(key1, value); + } catch (Exception e) { + throw new StorePassThroughException(newCacheWritingException(e)); + } + return value; + }; + + delegate.getAndCompute(key, remappingFunction); + return Store.PutStatus.PUT; + } + + @Override + public ValueHolder putIfAbsent(K key, V value, Consumer put) throws StoreAccessException { + Function mappingFunction = k -> { + if (useLoaderInAtomics) { + try { + V loaded = cacheLoaderWriter.load(k); + if (loaded != null) { + return loaded; // populate the cache + } + } catch (Exception e) { + throw new StorePassThroughException(newCacheLoadingException(e)); + } + } + + try { + cacheLoaderWriter.write(k, value); + } catch (Exception e) { + throw new StorePassThroughException(newCacheWritingException(e)); + } + + // Here were a returning an actual value instead of null because the mappingFunction is called by a map.compute(). So we + // want the compute to actually set the value to the backend. However, the putIfAbsent should return null since there + // was no previous value. This is why we use put.accept(true). This will tell EhcacheBase: "Hey! A put was done, you should return null" + put.accept(true); + return value; + }; + + return delegate.computeIfAbsent(key, mappingFunction); + } + + @Override + public boolean remove(K key) throws StoreAccessException { + boolean[] modified = { false }; + + BiFunction remappingFunction = (key1, previousValue) -> { + modified[0] = (previousValue != null); + + try { + cacheLoaderWriter.delete(key1); + } catch (Exception e) { + throw new StorePassThroughException(newCacheWritingException(e)); + } + return null; + }; + + delegate.getAndCompute(key, remappingFunction); + return modified[0]; + } + + @Override + public RemoveStatus remove(K key, V value) throws StoreAccessException { + boolean[] hitRemoved = { false, false }; // index 0 = hit, 1 = removed + BiFunction remappingFunction = (k, inCache) -> { + inCache = loadFromLoaderWriter(key, inCache); + if(inCache == null) { + return null; + } + + hitRemoved[0] = true; + if (value.equals(inCache)) { + try { + cacheLoaderWriter.delete(k); + } catch (Exception e) { + throw new StorePassThroughException(newCacheWritingException(e)); + } + hitRemoved[1] = true; + return null; + } + return inCache; + }; + + delegate.computeAndGet(key, remappingFunction, SUPPLY_FALSE, SUPPLY_FALSE); + if (hitRemoved[1]) { + return Store.RemoveStatus.REMOVED; + } + + if (hitRemoved[0]) { + return Store.RemoveStatus.KEY_PRESENT; + } else { + return Store.RemoveStatus.KEY_MISSING; + } + } + + @Override + public ValueHolder replace(K key, V value) throws StoreAccessException { + @SuppressWarnings("unchecked") + V[] old = (V[]) new Object[1]; + + BiFunction remappingFunction = (k, inCache) -> { + inCache = loadFromLoaderWriter(key, inCache); + if(inCache == null) { + return null; + } + + try { + cacheLoaderWriter.write(key, value); + } catch (Exception e) { + throw new StorePassThroughException(newCacheWritingException(e)); + } + + old[0] = inCache; + + if (newValueAlreadyExpired(LOG, expiry, key, inCache, value)) { + return null; + } + return value; + }; + + delegate.getAndCompute(key, remappingFunction); + if (old[0] == null) { + return null; + } + return new LoaderWriterValueHolder<>(old[0]); + } + + @Override + public ReplaceStatus replace(K key, V oldValue, V newValue) throws StoreAccessException { + boolean[] successHit = { false, false }; // index 0 = success, 1 = hit + + BiFunction remappingFunction = (k, inCache) -> { + inCache = loadFromLoaderWriter(key, inCache); + if(inCache == null) { + return null; + } + + successHit[1] = true; + if (oldValue.equals(inCache)) { + try { + cacheLoaderWriter.write(key, newValue); + } catch (Exception e) { + throw new StorePassThroughException(newCacheWritingException(e)); + } + + successHit[0] = true; + + if (newValueAlreadyExpired(LOG, expiry, key, oldValue, newValue)) { + return null; + } + return newValue; + } + return inCache; + }; + + delegate.computeAndGet(key, remappingFunction, SUPPLY_FALSE, SUPPLY_FALSE); + if (successHit[0]) { + return Store.ReplaceStatus.HIT; + } else { + if (successHit[1]) { + return Store.ReplaceStatus.MISS_PRESENT; + } else { + return Store.ReplaceStatus.MISS_NOT_PRESENT; + } + } + } + + @Override + public void clear() throws StoreAccessException { + delegate.clear(); + } + + @Override + public StoreEventSource getStoreEventSource() { + return delegate.getStoreEventSource(); + } + + @Override + public Iterator>> iterator() { + return delegate.iterator(); + } + + @Override + public ValueHolder getAndCompute(K key, BiFunction mappingFunction) throws StoreAccessException { + return delegate.getAndCompute(key, (mappedKey, mappedValue) -> { + V newValue = mappingFunction.apply(mappedKey, mappedValue); + if (newValue == null) { + try { + cacheLoaderWriter.delete(mappedKey); + } catch (Exception e) { + throw new StorePassThroughException(newCacheWritingException(e)); + } + return null; + } else { + try { + cacheLoaderWriter.write(mappedKey, newValue); + } catch (Exception e) { + throw new StorePassThroughException(newCacheWritingException(e)); + } + if (newValueAlreadyExpired(LOG, expiry, mappedKey, mappedValue, newValue)) { + return null; + } + return newValue; + } + }); + } + + @Override + public ValueHolder computeAndGet(K key, BiFunction mappingFunction, Supplier replaceEqual, Supplier invokeWriter) throws StoreAccessException { + + BiFunction remappingFunction = (mappedKey, mappedValue) -> { + V newValue = mappingFunction.apply(mappedKey, mappedValue); + if (invokeWriter.get()) { + try { + if (newValue != null) { + cacheLoaderWriter.write(mappedKey, newValue); + } else { + cacheLoaderWriter.delete(mappedKey); + } + } catch (Exception e) { + throw new StorePassThroughException(newCacheWritingException(e)); + } + } + return newValue; + }; + + return delegate.computeAndGet(key, remappingFunction, replaceEqual, SUPPLY_FALSE); + } + + @Override + public ValueHolder computeIfAbsent(K key, Function mappingFunction) throws StoreAccessException { + throw new UnsupportedOperationException("Not supported"); + } + + @Override + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction) throws StoreAccessException { + // we are not expecting failures and these two maps are only used in case of failures. So keep them small + Set successes = new HashSet<>(1); + Map failures = new HashMap<>(1); + + if(remappingFunction instanceof Ehcache.PutAllFunction) { + return getkValueHolderMap((Ehcache.PutAllFunction) remappingFunction, successes, failures); + } else if (remappingFunction instanceof Ehcache.RemoveAllFunction) { + return getkValueHolderMap(keys); + } else { + return delegate.bulkCompute(keys, remappingFunction); + } + } + + private Map> getkValueHolderMap(Set keys) throws StoreAccessException { + // we are not expecting failures and these two maps are only used in case of failures. So keep them small + Set deleteSuccesses = new HashSet<>(1); + Map deleteFailures = new HashMap<>(1); + + Map entriesToRemove = new HashMap<>(keys.size()); + for (K key: keys) { + entriesToRemove.put(key, null); + } + + int[] actualRemoveCount = { 0 }; + + Function>, Iterable>> removalFunction = + entries -> { + Set unknowns = cacheLoaderWriterDeleteAllCall(entries, entriesToRemove, deleteSuccesses, deleteFailures); + + int size = CollectionUtil.findBestCollectionSize(entries, 1); + Map results = new LinkedHashMap<>(size); + + for (Map.Entry entry : entries) { + K key = entry.getKey(); + V existingValue = entry.getValue(); + + if (deleteSuccesses.contains(key)) { + if (existingValue != null) { + ++actualRemoveCount[0]; + } + results.put(key, null); + entriesToRemove.remove(key); + } else { + if (unknowns.contains(key)) { + results.put(key, null); + } else { + results.put(key, existingValue); + } + } + } + + return results.entrySet(); + }; + + Map> map = delegate.bulkCompute(keys, removalFunction); + if (!deleteFailures.isEmpty()) { + throw new BulkCacheWritingException(deleteFailures, deleteSuccesses); + } else { + return map; + } + } + + private Map> getkValueHolderMap(Ehcache.PutAllFunction remappingFunction, Set successes, Map failures) throws StoreAccessException { + // Copy all entries to write into a Map + Ehcache.PutAllFunction putAllFunction = remappingFunction; + Map entriesToRemap = CollectionUtil.copyMapButFailOnNull(putAllFunction.getEntriesToRemap()); + + int[] actualPutCount = {0}; + + // The compute function that will return the keys to their NEW values, taking the keys to their old values as input; + // but this could happen in batches, i.e. not necessary containing all of the entries of the Iterable passed to this method + Function>, Iterable>> computeFunction = + entries1 -> { + // If we have a writer, first write this batch + cacheLoaderWriterWriteAllCall(entries1, entriesToRemap, successes, failures); + + int size = CollectionUtil.findBestCollectionSize(entries1, 1); + Map mutations = new LinkedHashMap<>(size); + + // then record we handled these mappings + for (Map.Entry entry : entries1) { + K key = entry.getKey(); + V existingValue = entry.getValue(); + V newValue = entriesToRemap.remove(key); + + if (newValueAlreadyExpired(LOG, expiry, key, existingValue, newValue)) { + mutations.put(key, null); + } else if (successes.contains(key)) { + ++actualPutCount[0]; + mutations.put(key, newValue); + + } else { + mutations.put(key, existingValue); + } + } + + // Finally return the values to be installed in the Cache's Store + return mutations.entrySet(); + }; + + Map> computedMap = delegate.bulkCompute(putAllFunction.getEntriesToRemap().keySet(), computeFunction); + if (!failures.isEmpty()) { + throw new BulkCacheWritingException(failures, successes); + } + return computedMap; + } + + @Override + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction, Supplier replaceEqual) throws StoreAccessException { + return null; + } + + @Override + public Map> bulkComputeIfAbsent(Set keys, Function, Iterable>> mappingFunction) throws StoreAccessException { + // we are not expecting failures and these two maps are only used in case of failures. So keep them small + Map successes = new HashMap<>(1); + Map failures = new HashMap<>(1); + + if (!(mappingFunction instanceof Ehcache.GetAllFunction)) { + return delegate.bulkComputeIfAbsent(keys, mappingFunction); + } + + Function, Iterable>> computeFunction = + keys1 -> { + Map loaded; + try { + loaded = cacheLoaderWriter.loadAll(keys1); + } catch(BulkCacheLoadingException bcle) { + loaded = Collections.emptyMap(); + collectSuccessesAndFailures(bcle, successes, failures); + } catch (Exception e) { + loaded = Collections.emptyMap(); + for (K key : keys1) { + failures.put(key, e); + } + } + + int size = CollectionUtil.findBestCollectionSize(keys1, 1); // this function is actually called with one key at the time + Map computeResult = new LinkedHashMap<>(size); + + // put all the entries to get ordering correct + for (K key : keys1) { + computeResult.put(key, null); + } + + if (!loaded.isEmpty()) { + for (K key : keys1) { + V value = loaded.get(key); + successes.put(key, value); + computeResult.put(key, value); + } + } + + return computeResult.entrySet(); + }; + + Map result = new HashMap<>(); + Map> computedMap = delegate.bulkComputeIfAbsent(keys, computeFunction); + + if (failures.isEmpty()) { + return computedMap; + } else { + successes.putAll(result); + throw new BulkCacheLoadingException(failures, successes); + } + } + + @Override + public List getConfigurationChangeListeners() { + return delegate.getConfigurationChangeListeners(); + } + + private V loadFromLoaderWriter(K key, V inCache) { + if (inCache == null) { + if (useLoaderInAtomics) { + try { + inCache = cacheLoaderWriter.load(key); + if (inCache == null) { + return null; + } + } catch (Exception e) { + throw new StorePassThroughException(newCacheLoadingException(e)); + } + } else { + return null; + } + } + return inCache; + } + + private void cacheLoaderWriterWriteAllCall(Iterable> entries, Map entriesToRemap, Set successes, Map failures) throws IllegalStateException { + Map toWrite = new HashMap<>(); + for (Map.Entry entry: entries) { + V value = entriesToRemap.get(entry.getKey()); + if (value == null) { + continue; + } + + toWrite.put(entry.getKey(), value); + } + try { + if (! toWrite.isEmpty()) { + // write all entries of this batch + cacheLoaderWriter.writeAll(toWrite.entrySet()); + successes.addAll(toWrite.keySet()); + } + } catch (BulkCacheWritingException bcwe) { + collectSuccessesAndFailures(bcwe, successes, failures); + } catch (Exception e) { + for (K key: toWrite.keySet()) { + failures.put(key, e); + } + } + } + + private Set cacheLoaderWriterDeleteAllCall(Iterable> entries, Map entriesToRemove, Set successes, Map failures) { + Set unknowns = new HashSet<>(); + Set toDelete = new HashSet<>(); + for (Map.Entry entry : entries) { + K key = entry.getKey(); + if (entriesToRemove.containsKey(key)) { + toDelete.add(key); + } + } + + try { + cacheLoaderWriter.deleteAll(toDelete); + successes.addAll(toDelete); + } catch (BulkCacheWritingException bcwe) { + collectSuccessesAndFailures(bcwe, successes, failures); + } catch (Exception e) { + for (K key : toDelete) { + failures.put(key, e); + unknowns.add(key); + } + } + return unknowns; + } + + @SuppressWarnings({ "unchecked" }) + private static void collectSuccessesAndFailures(BulkCacheWritingException bcwe, Set successes, Map failures) { + successes.addAll((Collection)bcwe.getSuccesses()); + failures.putAll((Map)bcwe.getFailures()); + } + + @SuppressWarnings({ "unchecked" }) + private void collectSuccessesAndFailures(BulkCacheLoadingException bcle, Map successes, Map failures) { + successes.putAll((Map)bcle.getSuccesses()); + failures.putAll((Map)bcle.getFailures()); + } + + private static boolean newValueAlreadyExpired(Logger logger, ExpiryPolicy expiry, K key, V oldValue, V newValue) { + if (newValue == null) { + return false; + } + + Duration duration; + try { + if (oldValue == null) { + duration = expiry.getExpiryForCreation(key, newValue); + } else { + duration = expiry.getExpiryForUpdate(key, () -> oldValue, newValue); + } + } catch (RuntimeException re) { + logger.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); + return true; + } + + return Duration.ZERO.equals(duration); + } +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LocalWriteBehindLoaderWriterStore.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LocalWriteBehindLoaderWriterStore.java new file mode 100644 index 0000000000..bfbcf6e936 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/loaderwriter/LocalWriteBehindLoaderWriterStore.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.loaderwriter; + +import org.ehcache.core.spi.store.Store; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; + +public class LocalWriteBehindLoaderWriterStore extends LocalLoaderWriterStore { + + private final CacheLoaderWriter cacheLoaderWriter; + + public LocalWriteBehindLoaderWriterStore(Store delegate, CacheLoaderWriter cacheLoaderWriter, boolean useLoaderInAtomics, ExpiryPolicy expiry) { + super(delegate, cacheLoaderWriter, useLoaderInAtomics, expiry); + this.cacheLoaderWriter = cacheLoaderWriter; + } + + public CacheLoaderWriter getCacheLoaderWriter() { + return cacheLoaderWriter; + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java similarity index 77% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java index a7b88f28f0..fda2c6cb8a 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java @@ -16,28 +16,35 @@ package org.ehcache.impl.internal.store.offheap; +import java.io.Serializable; +import java.time.Duration; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; +import java.util.Objects; import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import org.ehcache.Cache; import org.ehcache.config.EvictionAdvisor; +import org.ehcache.core.config.ExpiryUtils; import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.core.events.StoreEventSink; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.statistics.StatisticType; +import org.ehcache.core.statistics.OperationObserver; +import org.ehcache.impl.store.BaseStore; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.impl.internal.store.offheap.portability.OffHeapValueHolderPortability; import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.store.offheap.factories.EhcacheSegmentFactory; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.events.StoreEventSource; @@ -49,17 +56,16 @@ import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.impl.internal.store.BinaryValueHolder; import org.ehcache.impl.store.HashUtils; +import org.ehcache.spi.serialization.Serializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.offheapstore.exceptions.OversizeMappingException; -import org.terracotta.statistics.StatisticsManager; -import org.terracotta.statistics.observer.OperationObserver; -import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; -import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; -import static org.terracotta.statistics.StatisticBuilder.operation; +import static org.ehcache.core.config.ExpiryUtils.isExpiryDurationInfinite; +import static org.ehcache.core.exceptions.StorePassThroughException.handleException; +import static org.ehcache.core.statistics.StatisticType.GAUGE; -public abstract class AbstractOffHeapStore implements AuthoritativeTier, LowerCachingTier { +public abstract class AbstractOffHeapStore extends BaseStore implements AuthoritativeTier, LowerCachingTier { private static final Logger LOG = LoggerFactory.getLogger(AbstractOffHeapStore.class); @@ -67,12 +73,10 @@ public abstract class AbstractOffHeapStore implements AuthoritativeTier keyType; - private final Class valueType; private final TimeSource timeSource; private final StoreEventDispatcher eventDispatcher; - private final Expiry expiry; + private final ExpiryPolicy expiry; private final OperationObserver getObserver; private final OperationObserver putObserver; @@ -98,93 +102,70 @@ public abstract class AbstractOffHeapStore implements AuthoritativeTier mapEvictionListener; + protected final BackingMapEvictionListener mapEvictionListener; @SuppressWarnings("unchecked") private volatile CachingTier.InvalidationListener invalidationListener = (CachingTier.InvalidationListener) NULL_INVALIDATION_LISTENER; - public AbstractOffHeapStore(String statisticsTag, Configuration config, TimeSource timeSource, StoreEventDispatcher eventDispatcher) { - keyType = config.getKeyType(); - valueType = config.getValueType(); + public AbstractOffHeapStore(Configuration config, TimeSource timeSource, StoreEventDispatcher eventDispatcher, StatisticsService statisticsService) { + super(config, statisticsService); + expiry = config.getExpiry(); this.timeSource = timeSource; this.eventDispatcher = eventDispatcher; - this.getObserver = operation(StoreOperationOutcomes.GetOutcome.class).of(this).named("get").tag(statisticsTag).build(); - this.putObserver = operation(StoreOperationOutcomes.PutOutcome.class).of(this).named("put").tag(statisticsTag).build(); - this.putIfAbsentObserver = operation(StoreOperationOutcomes.PutIfAbsentOutcome.class).of(this).named("putIfAbsent").tag(statisticsTag).build(); - this.removeObserver = operation(StoreOperationOutcomes.RemoveOutcome.class).of(this).named("remove").tag(statisticsTag).build(); - this.conditionalRemoveObserver = operation(StoreOperationOutcomes.ConditionalRemoveOutcome.class).of(this).named("conditionalRemove").tag(statisticsTag).build(); - this.replaceObserver = operation(StoreOperationOutcomes.ReplaceOutcome.class).of(this).named("replace").tag(statisticsTag).build(); - this.conditionalReplaceObserver = operation(StoreOperationOutcomes.ConditionalReplaceOutcome.class).of(this).named("conditionalReplace").tag(statisticsTag).build(); - this.computeObserver = operation(StoreOperationOutcomes.ComputeOutcome.class).of(this).named("compute").tag(statisticsTag).build(); - this.computeIfAbsentObserver = operation(StoreOperationOutcomes.ComputeIfAbsentOutcome.class).of(this).named("computeIfAbsent").tag(statisticsTag).build(); - this.evictionObserver = operation(StoreOperationOutcomes.EvictionOutcome.class).of(this).named("eviction").tag(statisticsTag).build(); - this.expirationObserver = operation(StoreOperationOutcomes.ExpirationOutcome.class).of(this).named("expiration").tag(statisticsTag).build(); - - this.getAndFaultObserver = operation(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class).of(this).named("getAndFault").tag(statisticsTag).build(); - this.computeIfAbsentAndFaultObserver = operation(AuthoritativeTierOperationOutcomes.ComputeIfAbsentAndFaultOutcome.class).of(this).named("computeIfAbsentAndFault").tag(statisticsTag).build(); - this.flushObserver = operation(AuthoritativeTierOperationOutcomes.FlushOutcome.class).of(this).named("flush").tag(statisticsTag).build(); - - this.invalidateObserver = operation(LowerCachingTierOperationsOutcome.InvalidateOutcome.class).of(this).named("invalidate").tag(statisticsTag).build(); - this.invalidateAllObserver = operation(LowerCachingTierOperationsOutcome.InvalidateAllOutcome.class).of(this).named("invalidateAll").tag(statisticsTag).build(); - this.invalidateAllWithHashObserver = operation(LowerCachingTierOperationsOutcome.InvalidateAllWithHashOutcome.class).of(this).named("invalidateAllWithHash").tag(statisticsTag).build(); - this.getAndRemoveObserver= operation(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.class).of(this).named("getAndRemove").tag(statisticsTag).build(); - this.installMappingObserver= operation(LowerCachingTierOperationsOutcome.InstallMappingOutcome.class).of(this).named("installMapping").tag(statisticsTag).build(); - - Set tags = new HashSet<>(Arrays.asList(statisticsTag, "tier")); - StatisticsManager.createPassThroughStatistic(this, "allocatedMemory", tags, () -> { - EhcacheOffHeapBackingMap> map = backingMap(); - return map == null ? -1L : map.allocatedMemory(); - }); - StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", tags, () -> { - EhcacheOffHeapBackingMap> map = backingMap(); - return map == null ? -1L : map.occupiedMemory(); - }); - StatisticsManager.createPassThroughStatistic(this, "dataAllocatedMemory", tags, () -> { - EhcacheOffHeapBackingMap> map = backingMap(); - return map == null ? -1L : map.dataAllocatedMemory(); - }); - StatisticsManager.createPassThroughStatistic(this, "dataOccupiedMemory", tags, () -> { - EhcacheOffHeapBackingMap> map = backingMap(); - return map == null ? -1L : map.dataOccupiedMemory(); - }); - StatisticsManager.createPassThroughStatistic(this, "dataSize", tags, () -> { - EhcacheOffHeapBackingMap> map = backingMap(); - return map == null ? -1L : map.dataSize(); - }); - StatisticsManager.createPassThroughStatistic(this, "dataVitalMemory", tags, () -> { - EhcacheOffHeapBackingMap> map = backingMap(); - return map == null ? -1L : map.dataVitalMemory(); - }); - StatisticsManager.createPassThroughStatistic(this, "mappings", tags, () -> { - EhcacheOffHeapBackingMap> map = backingMap(); - return map == null ? -1L : map.longSize(); - }); - StatisticsManager.createPassThroughStatistic(this, "maxMappings", tags, () -> -1L); - StatisticsManager.createPassThroughStatistic(this, "vitalMemory", tags, () -> { - EhcacheOffHeapBackingMap> map = backingMap(); - return map == null ? -1L : map.vitalMemory(); - }); - StatisticsManager.createPassThroughStatistic(this, "removedSlotCount", tags, () -> { - EhcacheOffHeapBackingMap> map = backingMap(); - return map == null ? -1L : map.removedSlotCount(); - }); - StatisticsManager.createPassThroughStatistic(this, "usedSlotCount", tags, () -> { - EhcacheOffHeapBackingMap> map = backingMap(); - return map == null ? -1L : map.usedSlotCount(); - }); - StatisticsManager.createPassThroughStatistic(this, "tableCapacity", tags, () -> { - EhcacheOffHeapBackingMap> map = backingMap(); - return map == null ? -1L : map.tableCapacity(); - }); + this.getObserver = createObserver("get", StoreOperationOutcomes.GetOutcome.class, true); + this.putObserver = createObserver("put", StoreOperationOutcomes.PutOutcome.class, true); + this.putIfAbsentObserver = createObserver("putIfAbsent", StoreOperationOutcomes.PutIfAbsentOutcome.class, true); + this.removeObserver = createObserver("remove", StoreOperationOutcomes.RemoveOutcome.class, true); + this.conditionalRemoveObserver = createObserver("conditionalRemove", StoreOperationOutcomes.ConditionalRemoveOutcome.class, true); + this.replaceObserver = createObserver("replace", StoreOperationOutcomes.ReplaceOutcome.class, true); + this.conditionalReplaceObserver = createObserver("conditionalReplace", StoreOperationOutcomes.ConditionalReplaceOutcome.class, true); + this.computeObserver = createObserver("compute", StoreOperationOutcomes.ComputeOutcome.class, true); + this.computeIfAbsentObserver = createObserver("computeIfAbsent", StoreOperationOutcomes.ComputeIfAbsentOutcome.class, true); + this.evictionObserver = createObserver("eviction", StoreOperationOutcomes.EvictionOutcome.class, false); + this.expirationObserver = createObserver("expiration", StoreOperationOutcomes.ExpirationOutcome.class, false); + + this.getAndFaultObserver = createObserver("getAndFault", AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class, true); + this.computeIfAbsentAndFaultObserver = createObserver("computeIfAbsentAndFault", AuthoritativeTierOperationOutcomes.ComputeIfAbsentAndFaultOutcome.class, true); + this.flushObserver = createObserver("flush", AuthoritativeTierOperationOutcomes.FlushOutcome.class, true); + + this.invalidateObserver = createObserver("invalidate", LowerCachingTierOperationsOutcome.InvalidateOutcome.class, true); + this.invalidateAllObserver = createObserver("invalidateAll", LowerCachingTierOperationsOutcome.InvalidateAllOutcome.class, true); + this.invalidateAllWithHashObserver = createObserver("invalidateAllWithHash", LowerCachingTierOperationsOutcome.InvalidateAllWithHashOutcome.class, true); + this.getAndRemoveObserver= createObserver("getAndRemove", LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.class, true); + this.installMappingObserver= createObserver("installMapping", LowerCachingTierOperationsOutcome.InstallMappingOutcome.class, true); + + Set tags = new HashSet<>(Arrays.asList(getStatisticsTag(), "tier")); + registerStatistic("allocatedMemory", GAUGE, tags, EhcacheOffHeapBackingMap::allocatedMemory); + registerStatistic("occupiedMemory", GAUGE, tags, EhcacheOffHeapBackingMap::occupiedMemory); + registerStatistic("dataAllocatedMemory", GAUGE, tags, EhcacheOffHeapBackingMap::dataAllocatedMemory); + registerStatistic("dataOccupiedMemory", GAUGE, tags, EhcacheOffHeapBackingMap::dataOccupiedMemory); + registerStatistic("dataSize", GAUGE, tags, EhcacheOffHeapBackingMap::dataSize); + registerStatistic("dataVitalMemory", GAUGE, tags, EhcacheOffHeapBackingMap::dataVitalMemory); + registerStatistic("mappings", GAUGE, tags, EhcacheOffHeapBackingMap::longSize); + registerStatistic("vitalMemory", GAUGE, tags, EhcacheOffHeapBackingMap::vitalMemory); + registerStatistic("removedSlotCount", GAUGE, tags, EhcacheOffHeapBackingMap::removedSlotCount); + registerStatistic("usedSlotCount", GAUGE, tags, EhcacheOffHeapBackingMap::usedSlotCount); + registerStatistic("tableCapacity", GAUGE, tags, EhcacheOffHeapBackingMap::tableCapacity); this.mapEvictionListener = new BackingMapEvictionListener<>(eventDispatcher, evictionObserver); } + private void registerStatistic(String name, StatisticType type, Set tags, Function>, T> fn) { + registerStatistic(name, type, tags, () -> { + EhcacheOffHeapBackingMap> map = backingMap(); + // Returning null means not available. + // Do not return -1 because a stat can be negative and it's hard to tell the difference + // between -1 meaning unavailable for a stat and for the other one -1 being a right value; + return map == null ? null : fn.apply(map); + }); + } + @Override public Store.ValueHolder get(K key) throws StoreAccessException { checkKey(key); + getObserver.begin(); ValueHolder result = internalGet(key, true, true); if (result == null) { @@ -203,7 +184,7 @@ private Store.ValueHolder internalGet(K key, final boolean updateAccess, fina OffHeapValueHolder result = backingMap().computeIfPresent(key, (mappedKey, mappedValue) -> { long now = timeSource.getTimeMillis(); - if (mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { + if (mappedValue.isExpired(now)) { onExpiration(mappedKey, mappedValue, eventSink); return null; } @@ -227,22 +208,24 @@ private Store.ValueHolder internalGet(K key, final boolean updateAccess, fina return result; } catch (RuntimeException re) { eventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw handleRuntimeException(re); + throw handleException(re); } } @Override public boolean containsKey(K key) throws StoreAccessException { checkKey(key); + return internalGet(key, false, false) != null; } @Override public PutStatus put(final K key, final V value) throws StoreAccessException { - putObserver.begin(); checkKey(key); checkValue(value); + putObserver.begin(); + final AtomicBoolean put = new AtomicBoolean(); final StoreEventSink eventSink = eventDispatcher.eventSink(); @@ -250,7 +233,7 @@ public PutStatus put(final K key, final V value) throws StoreAccessException { try { BiFunction, OffHeapValueHolder> mappingFunction = (mappedKey, mappedValue) -> { - if (mappedValue != null && mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { + if (mappedValue != null && mappedValue.isExpired(now)) { mappedValue = null; } @@ -281,11 +264,12 @@ public PutStatus put(final K key, final V value) throws StoreAccessException { } @Override - public Store.ValueHolder putIfAbsent(final K key, final V value) throws NullPointerException, StoreAccessException { - putIfAbsentObserver.begin(); + public Store.ValueHolder putIfAbsent(final K key, final V value, Consumer put) throws NullPointerException, StoreAccessException { checkKey(key); checkValue(value); + putIfAbsentObserver.begin(); + final AtomicReference> returnValue = new AtomicReference<>(); final StoreEventSink eventSink = eventDispatcher.eventSink(); @@ -293,7 +277,7 @@ public Store.ValueHolder putIfAbsent(final K key, final V value) throws NullP BiFunction, OffHeapValueHolder> mappingFunction = (mappedKey, mappedValue) -> { long now = timeSource.getTimeMillis(); - if (mappedValue == null || mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { + if (mappedValue == null || mappedValue.isExpired(now)) { if (mappedValue != null) { onExpiration(mappedKey, mappedValue, eventSink); } @@ -323,9 +307,10 @@ public Store.ValueHolder putIfAbsent(final K key, final V value) throws NullP @Override public boolean remove(final K key) throws StoreAccessException { - removeObserver.begin(); checkKey(key); + removeObserver.begin(); + final StoreEventSink eventSink = eventDispatcher.eventSink(); final long now = timeSource.getTimeMillis(); @@ -334,7 +319,7 @@ public boolean remove(final K key) throws StoreAccessException { backingMap().computeIfPresent(key, (mappedKey, mappedValue) -> { - if (mappedValue != null && mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { + if (mappedValue != null && mappedValue.isExpired(now)) { onExpiration(mappedKey, mappedValue, eventSink); return null; } @@ -356,16 +341,17 @@ public boolean remove(final K key) throws StoreAccessException { return removed.get(); } catch (RuntimeException re) { eventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw handleRuntimeException(re); + throw handleException(re); } } @Override public RemoveStatus remove(final K key, final V value) throws StoreAccessException { - conditionalRemoveObserver.begin(); checkKey(key); checkValue(value); + conditionalRemoveObserver.begin(); + final AtomicBoolean removed = new AtomicBoolean(false); final StoreEventSink eventSink = eventDispatcher.eventSink(); final AtomicBoolean mappingExists = new AtomicBoolean(); @@ -374,10 +360,10 @@ public RemoveStatus remove(final K key, final V value) throws StoreAccessExcepti backingMap().computeIfPresent(key, (mappedKey, mappedValue) -> { long now = timeSource.getTimeMillis(); - if (mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { + if (mappedValue.isExpired(now)) { onExpiration(mappedKey, mappedValue, eventSink); return null; - } else if (mappedValue.value().equals(value)) { + } else if (mappedValue.get().equals(value)) { removed.set(true); eventSink.removed(mappedKey, mappedValue); return null; @@ -402,28 +388,30 @@ public RemoveStatus remove(final K key, final V value) throws StoreAccessExcepti } } catch (RuntimeException re) { eventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw handleRuntimeException(re); + throw handleException(re); } } @Override public ValueHolder replace(final K key, final V value) throws NullPointerException, StoreAccessException { - replaceObserver.begin(); checkKey(key); checkValue(value); + replaceObserver.begin(); + final AtomicReference> returnValue = new AtomicReference<>(null); final StoreEventSink eventSink = eventDispatcher.eventSink(); BiFunction, OffHeapValueHolder> mappingFunction = (mappedKey, mappedValue) -> { long now = timeSource.getTimeMillis(); - if (mappedValue == null || mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { + if (mappedValue == null || mappedValue.isExpired(now)) { if (mappedValue != null) { onExpiration(mappedKey, mappedValue, eventSink); } return null; } else { + mappedValue.forceDeserialization(); returnValue.set(mappedValue); return newUpdatedValueHolder(mappedKey, value, mappedValue, now, eventSink); } @@ -446,11 +434,12 @@ public ValueHolder replace(final K key, final V value) throws NullPointerExce @Override public ReplaceStatus replace(final K key, final V oldValue, final V newValue) throws NullPointerException, IllegalArgumentException, StoreAccessException { - conditionalReplaceObserver.begin(); checkKey(key); checkValue(oldValue); checkValue(newValue); + conditionalReplaceObserver.begin(); + final AtomicBoolean replaced = new AtomicBoolean(false); final StoreEventSink eventSink = eventDispatcher.eventSink(); final AtomicBoolean mappingExists = new AtomicBoolean(); @@ -458,12 +447,12 @@ public ReplaceStatus replace(final K key, final V oldValue, final V newValue) th BiFunction, OffHeapValueHolder> mappingFunction = (mappedKey, mappedValue) -> { long now = timeSource.getTimeMillis(); - if (mappedValue == null || mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { + if (mappedValue == null || mappedValue.isExpired(now)) { if (mappedValue != null) { onExpiration(mappedKey, mappedValue, eventSink); } return null; - } else if (oldValue.equals(mappedValue.value())) { + } else if (oldValue.equals(mappedValue.get())) { replaced.set(true); return newUpdatedValueHolder(mappedKey, newValue, mappedValue, now, eventSink); } else { @@ -497,7 +486,7 @@ public void clear() throws StoreAccessException { try { backingMap().clear(); } catch (RuntimeException re) { - throw handleRuntimeException(re); + throw handleException(re); } } @@ -517,7 +506,7 @@ public boolean hasNext() { } @Override - public Cache.Entry> next() throws StoreAccessException { + public Cache.Entry> next() { Map.Entry> next = mapIterator.next(); final K key = next.getKey(); final OffHeapValueHolder value = next.getValue(); @@ -536,28 +525,93 @@ public ValueHolder getValue() { } @Override - public ValueHolder compute(K key, BiFunction mappingFunction) throws StoreAccessException { - return compute(key, mappingFunction, REPLACE_EQUALS_TRUE); + public ValueHolder getAndCompute(K key, BiFunction mappingFunction) throws StoreAccessException { + checkKey(key); + + computeObserver.begin(); + + AtomicBoolean write = new AtomicBoolean(false); + AtomicReference> valueHeld = new AtomicReference<>(); + AtomicReference> existingValueHolder = new AtomicReference<>(); + StoreEventSink eventSink = eventDispatcher.eventSink(); + BiFunction, OffHeapValueHolder> computeFunction = (mappedKey, mappedValue) -> { + long now = timeSource.getTimeMillis(); + V existingValue = null; + if (mappedValue == null || mappedValue.isExpired(now)) { + if (mappedValue != null) { + onExpiration(mappedKey, mappedValue, eventSink); + } + mappedValue = null; + } else { + existingValue = mappedValue.get(); + existingValueHolder.set(mappedValue); + } + V computedValue = mappingFunction.apply(mappedKey, existingValue); + if (computedValue == null) { + if (mappedValue != null) { + write.set(true); + eventSink.removed(mappedKey, mappedValue); + } + return null; + } + + checkValue(computedValue); + write.set(true); + if (mappedValue != null) { + OffHeapValueHolder valueHolder = newUpdatedValueHolder(key, computedValue, mappedValue, now, eventSink); + if (valueHolder == null) { + valueHeld.set(new BasicOffHeapValueHolder<>(mappedValue.getId(), computedValue, now, now)); + } + return valueHolder; + } else { + return newCreateValueHolder(key, computedValue, now, eventSink); + } + }; + + OffHeapValueHolder result; + try { + result = computeWithRetry(key, computeFunction, false); + if (result == null && valueHeld.get() != null) { + result = valueHeld.get(); + } + eventDispatcher.releaseEventSink(eventSink); + if (result == null) { + if (write.get()) { + computeObserver.end(StoreOperationOutcomes.ComputeOutcome.REMOVED); + } else { + computeObserver.end(StoreOperationOutcomes.ComputeOutcome.MISS); + } + } else if (write.get()) { + computeObserver.end(StoreOperationOutcomes.ComputeOutcome.PUT); + } else { + computeObserver.end(StoreOperationOutcomes.ComputeOutcome.HIT); + } + return existingValueHolder.get(); + } catch (StoreAccessException | RuntimeException caex) { + eventDispatcher.releaseEventSinkAfterFailure(eventSink, caex); + throw caex; + } } @Override - public ValueHolder compute(final K key, final BiFunction mappingFunction, final Supplier replaceEqual) throws StoreAccessException { - computeObserver.begin(); + public ValueHolder computeAndGet(final K key, final BiFunction mappingFunction, final Supplier replaceEqual, Supplier invokeWriter) throws StoreAccessException { checkKey(key); + computeObserver.begin(); + final AtomicBoolean write = new AtomicBoolean(false); final AtomicReference> valueHeld = new AtomicReference<>(); final StoreEventSink eventSink = eventDispatcher.eventSink(); BiFunction, OffHeapValueHolder> computeFunction = (mappedKey, mappedValue) -> { long now = timeSource.getTimeMillis(); V existingValue = null; - if (mappedValue == null || mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { + if (mappedValue == null || mappedValue.isExpired(now)) { if (mappedValue != null) { onExpiration(mappedKey, mappedValue, eventSink); } mappedValue = null; } else { - existingValue = mappedValue.value(); + existingValue = mappedValue.get(); } V computedValue = mappingFunction.apply(mappedKey, existingValue); if (computedValue == null) { @@ -622,19 +676,20 @@ public ValueHolder computeIfAbsent(final K key, final Function internalComputeIfAbsent(final K key, final Function mappingFunction, boolean fault, final boolean delayedDeserialization) throws StoreAccessException { + checkKey(key); + if (fault) { computeIfAbsentAndFaultObserver.begin(); } else { computeIfAbsentObserver.begin(); } - checkKey(key); final AtomicBoolean write = new AtomicBoolean(false); final AtomicReference> valueHeld = new AtomicReference<>(); final StoreEventSink eventSink = eventDispatcher.eventSink(); BiFunction, OffHeapValueHolder> computeFunction = (mappedKey, mappedValue) -> { long now = timeSource.getTimeMillis(); - if (mappedValue == null || mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { + if (mappedValue == null || mappedValue.isExpired(now)) { if (mappedValue != null) { onExpiration(mappedKey, mappedValue, eventSink); } @@ -703,7 +758,7 @@ public Map> bulkCompute(Set keys, Function> bulkCompute(Set keys, final Function>, Iterable>> remappingFunction, Supplier replaceEqual) throws StoreAccessException { - Map> result = new HashMap<>(); + Map> result = new HashMap<>(keys.size()); for (K key : keys) { checkKey(key); BiFunction biFunction = (k, v) -> { @@ -733,7 +788,7 @@ public V setValue(V value) { return null; } }; - ValueHolder computed = compute(key, biFunction, replaceEqual); + ValueHolder computed = computeAndGet(key, biFunction, replaceEqual, () -> false); result.put(key, computed); } return result; @@ -741,7 +796,7 @@ public V setValue(V value) { @Override public Map> bulkComputeIfAbsent(Set keys, final Function, Iterable>> mappingFunction) throws StoreAccessException { - Map> result = new HashMap<>(); + Map> result = new HashMap<>(keys.size()); for (K key : keys) { checkKey(key); Function function = k -> { @@ -762,13 +817,14 @@ public Map> bulkComputeIfAbsent(Set keys, final F @Override public ValueHolder getAndFault(K key) throws StoreAccessException { - getAndFaultObserver.begin(); checkKey(key); - ValueHolder mappedValue = null; + + getAndFaultObserver.begin(); + ValueHolder mappedValue; final StoreEventSink eventSink = eventDispatcher.eventSink(); try { mappedValue = backingMap().computeIfPresentAndPin(key, (mappedKey, mappedValue1) -> { - if(mappedValue1.isExpired(timeSource.getTimeMillis(), TimeUnit.MILLISECONDS)) { + if(mappedValue1.isExpired(timeSource.getTimeMillis())) { onExpiration(mappedKey, mappedValue1, eventSink); return null; } @@ -785,7 +841,7 @@ public ValueHolder getAndFault(K key) throws StoreAccessException { } } catch (RuntimeException re) { eventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw handleRuntimeException(re); + throw handleException(re); } return mappedValue; } @@ -797,14 +853,15 @@ public ValueHolder computeIfAbsentAndFault(K key, Function valueFlushed) { - flushObserver.begin(); checkKey(key); + + flushObserver.begin(); final StoreEventSink eventSink = eventDispatcher.eventSink(); try { boolean result = backingMap().computeIfPinned(key, (k, valuePresent) -> { if (valuePresent.getId() == valueFlushed.getId()) { - if (valueFlushed.isExpired(timeSource.getTimeMillis(), TimeUnit.MILLISECONDS)) { + if (valueFlushed.isExpired(timeSource.getTimeMillis())) { onExpiration(k, valuePresent, eventSink); return null; } @@ -854,7 +911,7 @@ public void invalidate(final K key) throws StoreAccessException { invalidateObserver.end(LowerCachingTierOperationsOutcome.InvalidateOutcome.MISS); } } catch (RuntimeException re) { - throw handleRuntimeException(re); + throw handleException(re); } } @@ -904,13 +961,14 @@ private void notifyInvalidation(final K key, final ValueHolder p) { */ @Override public ValueHolder getAndRemove(final K key) throws StoreAccessException { - getAndRemoveObserver.begin(); checkKey(key); + getAndRemoveObserver.begin(); + final AtomicReference> valueHolderAtomicReference = new AtomicReference<>(); BiFunction, OffHeapValueHolder> computeFunction = (mappedKey, mappedValue) -> { long now = timeSource.getTimeMillis(); - if (mappedValue == null || mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { + if (mappedValue == null || mappedValue.isExpired(now)) { if (mappedValue != null) { onExpirationInCachingTier(mappedValue, key); } @@ -931,7 +989,7 @@ public ValueHolder getAndRemove(final K key) throws StoreAccessException { } return result; } catch (RuntimeException re) { - throw handleRuntimeException(re); + throw handleException(re); } } @@ -944,7 +1002,7 @@ public ValueHolder installMapping(final K key, final Function valueHolder = source.apply(k); if (valueHolder != null) { - if (valueHolder.isExpired(timeSource.getTimeMillis(), TimeUnit.MILLISECONDS)) { + if (valueHolder.isExpired(timeSource.getTimeMillis())) { onExpirationInCachingTier(valueHolder, key); return null; } else { @@ -963,12 +1021,12 @@ public ValueHolder installMapping(final K key, final Function computeWithRetry(K key, BiFunction, OffHeapValueHolder> computeFunction, boolean fault) throws StoreAccessException { - OffHeapValueHolder computeResult = null; + OffHeapValueHolder computeResult; try { computeResult = backingMap().compute(key, computeFunction, fault); } catch (OversizeMappingException ex) { @@ -980,12 +1038,12 @@ private OffHeapValueHolder computeWithRetry(K key, BiFunction setAccessTimeAndExpiryThenReturnMapping(K key, Off Duration duration = Duration.ZERO; try { duration = expiry.getExpiryForAccess(key, valueHolder); + if (duration != null && duration.isNegative()) { + duration = Duration.ZERO; + } } catch (RuntimeException re) { LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); } @@ -1017,52 +1078,49 @@ private OffHeapValueHolder newUpdatedValueHolder(K key, V value, OffHeapValue Duration duration = Duration.ZERO; try { duration = expiry.getExpiryForUpdate(key, existing, value); + if (duration != null && duration.isNegative()) { + duration = Duration.ZERO; + } } catch (RuntimeException re) { LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); } if (Duration.ZERO.equals(duration)) { - eventSink.expired(key, supplierOf(value)); + eventSink.expired(key, () -> value); return null; } if (duration == null) { - return new BasicOffHeapValueHolder<>(backingMap().nextIdFor(key), value, now, existing.expirationTime(OffHeapValueHolder.TIME_UNIT)); - } else if (duration.isInfinite()) { + return new BasicOffHeapValueHolder<>(backingMap().nextIdFor(key), value, now, existing.expirationTime()); + } else if (isExpiryDurationInfinite(duration)) { return new BasicOffHeapValueHolder<>(backingMap().nextIdFor(key), value, now, OffHeapValueHolder.NO_EXPIRE); } else { - return new BasicOffHeapValueHolder<>(backingMap().nextIdFor(key), value, now, safeExpireTime(now, duration)); + return new BasicOffHeapValueHolder<>(backingMap().nextIdFor(key), value, now, ExpiryUtils.getExpirationMillis(now, duration)); } } private OffHeapValueHolder newCreateValueHolder(K key, V value, long now, StoreEventSink eventSink) { - Duration duration = Duration.ZERO; - try { - duration = expiry.getExpiryForCreation(key, value); - } catch (RuntimeException re) { - LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); - } - if (Duration.ZERO.equals(duration)) { + Objects.requireNonNull(value); + + Duration duration = ExpiryUtils.getExpiryForCreation(key, value, expiry); + if(duration.isZero()) { return null; } eventSink.created(key, value); - if (duration.isInfinite()) { - return new BasicOffHeapValueHolder<>(backingMap().nextIdFor(key), value, now, OffHeapValueHolder.NO_EXPIRE); - } else { - return new BasicOffHeapValueHolder<>(backingMap().nextIdFor(key), value, now, safeExpireTime(now, duration)); - } + long expirationTime = isExpiryDurationInfinite(duration) ? ValueHolder.NO_EXPIRE : ExpiryUtils.getExpirationMillis(now, duration); + + return new BasicOffHeapValueHolder<>(backingMap().nextIdFor(key), value, now, expirationTime); } private OffHeapValueHolder newTransferValueHolder(ValueHolder valueHolder) { if (valueHolder instanceof BinaryValueHolder && ((BinaryValueHolder) valueHolder).isBinaryValueAvailable()) { - return new BinaryOffHeapValueHolder<>(valueHolder.getId(), valueHolder.value(), ((BinaryValueHolder) valueHolder).getBinaryValue(), - valueHolder.creationTime(OffHeapValueHolder.TIME_UNIT), valueHolder.expirationTime(OffHeapValueHolder.TIME_UNIT), - valueHolder.lastAccessTime(OffHeapValueHolder.TIME_UNIT), valueHolder.hits()); + return new BinaryOffHeapValueHolder<>(valueHolder.getId(), valueHolder.get(), ((BinaryValueHolder) valueHolder).getBinaryValue(), + valueHolder.creationTime(), valueHolder.expirationTime(), + valueHolder.lastAccessTime()); } else { - return new BasicOffHeapValueHolder<>(valueHolder.getId(), valueHolder.value(), valueHolder.creationTime(OffHeapValueHolder.TIME_UNIT), - valueHolder.expirationTime(OffHeapValueHolder.TIME_UNIT), valueHolder.lastAccessTime(OffHeapValueHolder.TIME_UNIT), valueHolder - .hits()); + return new BasicOffHeapValueHolder<>(valueHolder.getId(), valueHolder.get(), valueHolder.creationTime(), + valueHolder.expirationTime(), valueHolder.lastAccessTime()); } } @@ -1073,38 +1131,6 @@ private void invokeValve() throws StoreAccessException { } } - private static long safeExpireTime(long now, Duration duration) { - long millis = OffHeapValueHolder.TIME_UNIT.convert(duration.getLength(), duration.getTimeUnit()); - - if (millis == Long.MAX_VALUE) { - return Long.MAX_VALUE; - } - - long result = now + millis; - if (result < 0) { - return Long.MAX_VALUE; - } - return result; - } - - private void checkKey(K keyObject) { - if (keyObject == null) { - throw new NullPointerException(); - } - if (!keyType.isAssignableFrom(keyObject.getClass())) { - throw new ClassCastException("Invalid key type, expected : " + keyType.getName() + " but was : " + keyObject.getClass().getName()); - } - } - - private void checkValue(V valueObject) { - if (valueObject == null) { - throw new NullPointerException(); - } - if (!valueType.isAssignableFrom(valueObject.getClass())) { - throw new ClassCastException("Invalid value type, expected : " + valueType.getName() + " but was : " + valueObject.getClass().getName()); - } - } - private void onExpirationInCachingTier(ValueHolder mappedValue, K key) { expirationObserver.begin(); invalidationListener.onInvalidation(key, mappedValue); @@ -1126,6 +1152,10 @@ private void onExpiration(K mappedKey, ValueHolder mappedValue, StoreEventSin protected abstract SwitchableEvictionAdvisor> evictionAdvisor(); + protected OffHeapValueHolderPortability createValuePortability(Serializer serializer) { + return new OffHeapValueHolderPortability<>(serializer); + } + protected static SwitchableEvictionAdvisor> wrap(EvictionAdvisor delegate) { return new OffHeapEvictionAdvisorWrapper<>(delegate); } @@ -1142,7 +1172,7 @@ private OffHeapEvictionAdvisorWrapper(EvictionAdvisor dele @Override public boolean adviseAgainstEviction(K key, OffHeapValueHolder value) { try { - return delegate.adviseAgainstEviction(key, value.value()); + return delegate.adviseAgainstEviction(key, value.get()); } catch (Exception e) { LOG.error("Exception raised while running eviction advisor " + "- Eviction will assume entry is NOT advised against eviction", e); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/BasicOffHeapValueHolder.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/BasicOffHeapValueHolder.java similarity index 87% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/BasicOffHeapValueHolder.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/BasicOffHeapValueHolder.java index 8a518c5956..ce755034f0 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/BasicOffHeapValueHolder.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/BasicOffHeapValueHolder.java @@ -18,6 +18,8 @@ import org.ehcache.core.spi.store.Store; +import java.util.concurrent.TimeUnit; + /** * BasicOffHeapValueHolder */ @@ -26,14 +28,13 @@ public class BasicOffHeapValueHolder extends OffHeapValueHolder { private final V value; public BasicOffHeapValueHolder(long id, V value, long creationTime, long expireTime) { - this(id, value, creationTime, expireTime, 0, 0); + this(id, value, creationTime, expireTime, 0); } - public BasicOffHeapValueHolder(long id, V value, long creationTime, long expireTime, long lastAccessTime, long hits) { + public BasicOffHeapValueHolder(long id, V value, long creationTime, long expireTime, long lastAccessTime) { super(id, creationTime, expireTime); - setLastAccessTime(lastAccessTime, TIME_UNIT); + setLastAccessTime(lastAccessTime); this.value = value; - this.setHits(hits); } @Override @@ -57,7 +58,7 @@ void detach() { } @Override - public V value() { + public V get() { return value; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/BinaryOffHeapValueHolder.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/BinaryOffHeapValueHolder.java similarity index 85% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/BinaryOffHeapValueHolder.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/BinaryOffHeapValueHolder.java index 48b0b33f4b..18df2eb356 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/BinaryOffHeapValueHolder.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/BinaryOffHeapValueHolder.java @@ -18,10 +18,10 @@ import org.ehcache.core.spi.store.Store; import org.ehcache.impl.internal.store.BinaryValueHolder; -import org.terracotta.offheapstore.util.FindbugsSuppressWarnings; import java.io.IOException; import java.nio.ByteBuffer; +import java.util.concurrent.TimeUnit; /** * BinaryOffHeapValueHolder @@ -31,12 +31,11 @@ final class BinaryOffHeapValueHolder extends OffHeapValueHolder implements private final ByteBuffer binaryValue; private final V value; - BinaryOffHeapValueHolder(long id, V value, ByteBuffer binaryValue, long creationTime, long expireTime, long lastAccessTime, long hits) { + BinaryOffHeapValueHolder(long id, V value, ByteBuffer binaryValue, long creationTime, long expireTime, long lastAccessTime) { super(id, creationTime, expireTime); this.value = value; - setLastAccessTime(lastAccessTime, TIME_UNIT); + setLastAccessTime(lastAccessTime); this.binaryValue = binaryValue; - this.setHits(hits); } @@ -71,11 +70,11 @@ void detach() { } @Override - public V value() { + public V get() { return value; } - private void writeObject(java.io.ObjectOutputStream out) throws IOException { + private void writeObject(java.io.ObjectOutputStream out) { throw new UnsupportedOperationException("This subclass of AbstractValueHolder is NOT serializable"); } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCache.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCache.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCache.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCache.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheOffHeapBackingMap.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheOffHeapBackingMap.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheOffHeapBackingMap.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheOffHeapBackingMap.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/HeuristicConfiguration.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/HeuristicConfiguration.java similarity index 86% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/HeuristicConfiguration.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/HeuristicConfiguration.java index 9952983993..e74e6e0466 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/HeuristicConfiguration.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/HeuristicConfiguration.java @@ -106,14 +106,13 @@ private long getInitialSegmentCapacity() { @Override public String toString() { - StringBuilder sb = new StringBuilder("Heuristic Configuration: \n"); - sb.append("Maximum Size (specified) : ").append(DebuggingUtils.toBase2SuffixedString(getMaximumSize())).append("B\n"); - sb.append("Minimum Chunk Size : ").append(DebuggingUtils.toBase2SuffixedString(getMinimumChunkSize())).append("B\n"); - sb.append("Maximum Chunk Size : ").append(DebuggingUtils.toBase2SuffixedString(getMaximumChunkSize())).append("B\n"); - sb.append("Concurrency : ").append(getConcurrency()).append("\n"); - sb.append("Initial Segment Table Size : ").append(DebuggingUtils.toBase2SuffixedString(getInitialSegmentTableSize())).append(" slots\n"); - sb.append("Segment Data Page Size : ").append(DebuggingUtils.toBase2SuffixedString(getSegmentDataPageSize())).append("B\n"); - return sb.toString(); + String sb = "Heuristic Configuration: \n" + "Maximum Size (specified) : " + DebuggingUtils.toBase2SuffixedString(getMaximumSize()) + "B\n" + + "Minimum Chunk Size : " + DebuggingUtils.toBase2SuffixedString(getMinimumChunkSize()) + "B\n" + + "Maximum Chunk Size : " + DebuggingUtils.toBase2SuffixedString(getMaximumChunkSize()) + "B\n" + + "Concurrency : " + getConcurrency() + "\n" + + "Initial Segment Table Size : " + DebuggingUtils.toBase2SuffixedString(getInitialSegmentTableSize()) + " slots\n" + + "Segment Data Page Size : " + DebuggingUtils.toBase2SuffixedString(getSegmentDataPageSize()) + "B\n"; + return sb; } } diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/LazyOffHeapValueHolder.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/LazyOffHeapValueHolder.java new file mode 100644 index 0000000000..c5557bd50f --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/LazyOffHeapValueHolder.java @@ -0,0 +1,130 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.offheap; + +import org.ehcache.core.spi.store.Store; +import org.ehcache.impl.internal.store.BinaryValueHolder; +import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.impl.internal.store.offheap.portability.OffHeapValueHolderPortability; +import org.ehcache.spi.serialization.Serializer; +import org.terracotta.offheapstore.storage.portability.WriteContext; + +import java.nio.ByteBuffer; + +/** +* OffHeapValueHolder variant that supports lazy deserialization and also serving the binary value if detached. +*/ +public class LazyOffHeapValueHolder extends OffHeapValueHolder implements BinaryValueHolder { + + private final Serializer valueSerializer; + private final WriteContext writeContext; + private Mode mode; + private ByteBuffer binaryValue; + private V value; + + public LazyOffHeapValueHolder(long id, ByteBuffer binaryValue, Serializer serializer, long creationTime, long expireTime, long lastAccessTime, WriteContext writeContext) { + super(id, creationTime, expireTime); + setLastAccessTime(lastAccessTime); + this.binaryValue = binaryValue; + this.valueSerializer = serializer; + this.writeContext = writeContext; + this.mode = Mode.ATTACHED; + } + + @Override + public V get() { + forceDeserialization(); + return value; + } + + @Override + public ByteBuffer getBinaryValue() throws IllegalStateException { + if (isBinaryValueAvailable()) { + return binaryValue.duplicate(); + } else { + throw new IllegalStateException("This OffHeapValueHolder has not been prepared to hand off its binary form"); + } + } + + @Override + public boolean isBinaryValueAvailable() { + return mode == Mode.DETACHED; + } + + @Override + void updateMetadata(final Store.ValueHolder valueFlushed) { + if(getId() != valueFlushed.getId()) { + throw new IllegalArgumentException("Wrong id passed in [this.id != id] : " + getId() + " != " + valueFlushed.getId()); + } + this.setLastAccessTime(valueFlushed.lastAccessTime()); + this.setExpirationTime(valueFlushed.expirationTime()); + } + + /** + * Must be called under offheap lock, may corrupt memory otherwise + */ + @Override + void writeBack() { + writeContext.setLong(OffHeapValueHolderPortability.ACCESS_TIME_OFFSET, lastAccessTime()); + writeContext.setLong(OffHeapValueHolderPortability.EXPIRE_TIME_OFFSET, expirationTime()); + writeContext.flush(); + } + + /** + * Must be called under offheap lock (when it actually does something) + */ + @Override + void forceDeserialization() { + if (value == null) { + value = deserialize(); + } + } + + V deserialize() { + try { + return valueSerializer.read(binaryValue.duplicate()); + } catch (ClassNotFoundException e) { + throw new SerializerException(e); + } catch (SerializerException e) { + throw new SerializerException("Seeing this exception and having no other " + + "serialization related issues is a red flag!", e); + } + } + + /** + * Must be called under offheap lock, may read invalid memory content otherwise + */ + @Override + void detach() { + if (mode == Mode.ATTACHED) { + byte[] bytes = new byte[binaryValue.remaining()]; + binaryValue.get(bytes); + binaryValue = ByteBuffer.wrap(bytes); + mode = Mode.DETACHED; + } else { + throw new IllegalStateException("OffHeapValueHolder in mode " + mode + " cannot be prepared for delayed deserialization"); + } + } + + private enum Mode { + ATTACHED, DETACHED + } + + private void writeObject(java.io.ObjectOutputStream out) { + throw new UnsupportedOperationException("This subclass of AbstractValueHolder is NOT serializable"); + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/MemorySizeParser.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/MemorySizeParser.java similarity index 97% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/MemorySizeParser.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/MemorySizeParser.java index 0b89ab895e..696a150676 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/MemorySizeParser.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/MemorySizeParser.java @@ -87,8 +87,8 @@ private static MemorySize toMemorySize(String configuredMemorySize, long unitMul * Memory size calculator. */ private static final class MemorySize { - private String configuredMemorySizeWithoutUnit; - private long multiplicationFactor; + private final String configuredMemorySizeWithoutUnit; + private final long multiplicationFactor; private MemorySize(String configuredMemorySizeWithoutUnit, long multiplicationFactor) { this.configuredMemorySizeWithoutUnit = configuredMemorySizeWithoutUnit; diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapMapStatistics.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapMapStatistics.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapMapStatistics.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapMapStatistics.java diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java new file mode 100644 index 0000000000..acaf26bc75 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java @@ -0,0 +1,305 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.offheap; + +import org.ehcache.config.SizedResourcePool; +import org.ehcache.core.CacheConfigurationChangeListener; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourceType; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.events.StoreEventDispatcher; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.statistics.OperationStatistic; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.core.events.NullStoreEventDispatcher; +import org.ehcache.impl.internal.events.ThreadLocalStoreEventDispatcher; +import org.ehcache.impl.internal.store.offheap.factories.EhcacheSegmentFactory; +import org.ehcache.impl.internal.store.offheap.portability.SerializerPortability; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.impl.serialization.TransientStateRepository; +import org.ehcache.spi.serialization.StatefulSerializer; +import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.tiering.AuthoritativeTier; +import org.ehcache.core.spi.store.tiering.LowerCachingTier; +import org.ehcache.spi.serialization.SerializationProvider; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap; +import org.ehcache.core.statistics.TierOperationOutcomes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.offheapstore.paging.PageSource; +import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; +import org.terracotta.offheapstore.pinning.PinnableSegment; +import org.terracotta.offheapstore.storage.OffHeapBufferStorageEngine; +import org.terracotta.offheapstore.storage.PointerSize; +import org.terracotta.offheapstore.storage.portability.Portability; +import org.terracotta.offheapstore.util.Factory; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.ehcache.config.Eviction.noAdvice; +import static org.ehcache.impl.internal.store.offheap.OffHeapStoreUtils.getBufferSource; + +/** + * OffHeapStore + */ +public class OffHeapStore extends AbstractOffHeapStore { + + private final SwitchableEvictionAdvisor> evictionAdvisor; + private final Serializer keySerializer; + private final Serializer valueSerializer; + private final long sizeInBytes; + + private volatile EhcacheConcurrentOffHeapClockCache> map; + + public OffHeapStore(final Configuration config, TimeSource timeSource, StoreEventDispatcher eventDispatcher, long sizeInBytes, StatisticsService statisticsService) { + super(config, timeSource, eventDispatcher, statisticsService); + EvictionAdvisor evictionAdvisor = config.getEvictionAdvisor(); + if (evictionAdvisor != null) { + this.evictionAdvisor = wrap(evictionAdvisor); + } else { + this.evictionAdvisor = wrap(noAdvice()); + } + this.keySerializer = config.getKeySerializer(); + this.valueSerializer = config.getValueSerializer(); + this.sizeInBytes = sizeInBytes; + } + + @Override + protected String getStatisticsTag() { + return "OffHeap"; + } + + @Override + public List getConfigurationChangeListeners() { + return Collections.emptyList(); + } + + private EhcacheConcurrentOffHeapClockCache> createBackingMap(long size, Serializer keySerializer, Serializer valueSerializer, SwitchableEvictionAdvisor> evictionAdvisor) { + HeuristicConfiguration config = new HeuristicConfiguration(size); + PageSource source = new UpfrontAllocatingPageSource(getBufferSource(), config.getMaximumSize(), config.getMaximumChunkSize(), config.getMinimumChunkSize()); + Portability keyPortability = new SerializerPortability<>(keySerializer); + Portability> valuePortability = createValuePortability(valueSerializer); + Factory>> storageEngineFactory = OffHeapBufferStorageEngine.createFactory(PointerSize.INT, source, config + .getSegmentDataPageSize(), keyPortability, valuePortability, false, true); + + Factory>> segmentFactory = new EhcacheSegmentFactory<>( + source, + storageEngineFactory, + config.getInitialSegmentTableSize(), + evictionAdvisor, + mapEvictionListener); + return new EhcacheConcurrentOffHeapClockCache<>(evictionAdvisor, segmentFactory, config.getConcurrency()); + + } + + @Override + protected EhcacheOffHeapBackingMap> backingMap() { + return map; + } + + @Override + protected SwitchableEvictionAdvisor> evictionAdvisor() { + return evictionAdvisor; + } + + @ServiceDependencies({TimeSourceService.class, SerializationProvider.class}) + public static class Provider extends BaseStoreProvider implements AuthoritativeTier.Provider, LowerCachingTier.Provider { + + private static final Logger LOGGER = LoggerFactory.getLogger(Provider.class); + + private final Set> createdStores = Collections.newSetFromMap(new ConcurrentWeakIdentityHashMap<>()); + private final Map, OperationStatistic[]> tierOperationStatistics = new ConcurrentWeakIdentityHashMap<>(); + + @Override + protected ResourceType getResourceType() { + return ResourceType.Core.OFFHEAP; + } + + @Override + public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { + return resourceTypes.equals(Collections.singleton(ResourceType.Core.OFFHEAP)) ? 1 : 0; + } + + @Override + public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { + return authorityResource.equals(ResourceType.Core.OFFHEAP) ? 1 : 0; + } + + @Override + public OffHeapStore createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + OffHeapStore store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher<>(storeConfig.getDispatcherConcurrency()), serviceConfigs); + + tierOperationStatistics.put(store, new OperationStatistic[] { + createTranslatedStatistic(store, "get", TierOperationOutcomes.GET_TRANSLATION, "get"), + createTranslatedStatistic(store, "eviction", TierOperationOutcomes.EVICTION_TRANSLATION, "eviction") + }); + + return store; + } + + private OffHeapStore createStoreInternal(Configuration storeConfig, StoreEventDispatcher eventDispatcher, ServiceConfiguration... serviceConfigs) { + if (getServiceProvider() == null) { + throw new NullPointerException("ServiceProvider is null in OffHeapStore.Provider."); + } + TimeSource timeSource = getServiceProvider().getService(TimeSourceService.class).getTimeSource(); + + SizedResourcePool offHeapPool = storeConfig.getResourcePools().getPoolForResource(getResourceType()); + if (!(offHeapPool.getUnit() instanceof MemoryUnit)) { + throw new IllegalArgumentException("OffHeapStore only supports resources with memory unit"); + } + MemoryUnit unit = (MemoryUnit)offHeapPool.getUnit(); + + + OffHeapStore offHeapStore = new OffHeapStore<>(storeConfig, timeSource, eventDispatcher, unit.toBytes(offHeapPool + .getSize()), getServiceProvider().getService(StatisticsService.class)); + createdStores.add(offHeapStore); + return offHeapStore; + } + + @Override + public void releaseStore(Store resource) { + if (!createdStores.contains(resource)) { + throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); + } + OffHeapStore offHeapStore = (OffHeapStore) resource; + close(offHeapStore); + getStatisticsService().ifPresent(s -> s.cleanForNode(offHeapStore)); + tierOperationStatistics.remove(offHeapStore); + } + + static void close(final OffHeapStore resource) { + EhcacheConcurrentOffHeapClockCache localMap = resource.map; + if (localMap != null) { + resource.map = null; + localMap.destroy(); + } + } + + @Override + public void initStore(Store resource) { + if (!createdStores.contains(resource)) { + throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); + } + + OffHeapStore offHeapStore = (OffHeapStore) resource; + Serializer keySerializer = offHeapStore.keySerializer; + if (keySerializer instanceof StatefulSerializer) { + ((StatefulSerializer)keySerializer).init(new TransientStateRepository()); + } + Serializer valueSerializer = offHeapStore.valueSerializer; + if (valueSerializer instanceof StatefulSerializer) { + ((StatefulSerializer)valueSerializer).init(new TransientStateRepository()); + } + + init(offHeapStore); + } + + static void init(final OffHeapStore resource) { + resource.map = resource.createBackingMap(resource.sizeInBytes, resource.keySerializer, resource.valueSerializer, resource.evictionAdvisor); + } + + @Override + public void stop() { + try { + createdStores.clear(); + } finally { + super.stop(); + } + } + + @Override + public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + OffHeapStore authoritativeTier = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher<>(storeConfig + .getDispatcherConcurrency()), serviceConfigs); + + tierOperationStatistics.put(authoritativeTier, new OperationStatistic[] { + createTranslatedStatistic(authoritativeTier, "get", TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "getAndFault"), + createTranslatedStatistic(authoritativeTier, "eviction", TierOperationOutcomes.EVICTION_TRANSLATION, "eviction") + }); + + return authoritativeTier; + } + + @Override + public void releaseAuthoritativeTier(AuthoritativeTier resource) { + releaseStore(resource); + } + + @Override + public void initAuthoritativeTier(AuthoritativeTier resource) { + initStore(resource); + } + + @Override + public LowerCachingTier createCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + OffHeapStore lowerCachingTier = createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); + + tierOperationStatistics.put(lowerCachingTier, new OperationStatistic[] { + createTranslatedStatistic(lowerCachingTier, "get", TierOperationOutcomes.GET_AND_REMOVE_TRANSLATION, "getAndRemove"), + createTranslatedStatistic(lowerCachingTier, "eviction", TierOperationOutcomes.EVICTION_TRANSLATION, "eviction") + }); + + return lowerCachingTier; + } + + @Override + @SuppressWarnings("unchecked") + public void releaseCachingTier(LowerCachingTier resource) { + if (!createdStores.contains(resource)) { + throw new IllegalArgumentException("Given caching tier is not managed by this provider : " + resource); + } + flushToLowerTier((OffHeapStore) resource); + releaseStore((Store) resource); + } + + private void flushToLowerTier(OffHeapStore offheapStore) { + StoreAccessException lastFailure = null; + int failureCount = 0; + for (Object key : offheapStore.backingMap().keySet()) { + try { + offheapStore.invalidate(key); + } catch (StoreAccessException cae) { + lastFailure = cae; + failureCount++; + LOGGER.warn("Error flushing '{}' to lower tier", key, cae); + } + } + if (lastFailure != null) { + throw new RuntimeException("Failed to flush some mappings to lower tier, " + + failureCount + " could not be flushed. This error represents the last failure.", lastFailure); + } + } + + @Override + public void initCachingTier(LowerCachingTier resource) { + if (!createdStores.contains(resource)) { + throw new IllegalArgumentException("Given caching tier is not managed by this provider : " + resource); + } + init((OffHeapStore) resource); + } + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderFactory.java similarity index 85% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderFactory.java index 4d3865d91e..8619fd8a13 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderFactory.java @@ -16,21 +16,23 @@ package org.ehcache.impl.internal.store.offheap; -import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; /** * OffHeapStoreProviderFactory */ +@Component public class OffHeapStoreProviderFactory implements ServiceFactory { @Override - public OffHeapStore.Provider create(ServiceCreationConfiguration configuration) { + public OffHeapStore.Provider create(ServiceCreationConfiguration configuration) { return new OffHeapStore.Provider(); } @Override - public Class getServiceType() { + public Class getServiceType() { return OffHeapStore.Provider.class; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreUtils.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreUtils.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreUtils.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreUtils.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapValueHolder.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapValueHolder.java similarity index 86% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapValueHolder.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapValueHolder.java index 1990aeba91..66130cf2c2 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapValueHolder.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapValueHolder.java @@ -26,17 +26,10 @@ */ public abstract class OffHeapValueHolder extends AbstractValueHolder { - public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; - public OffHeapValueHolder(long id, long creationTime, long expireTime) { super(id, creationTime, expireTime); } - @Override - final protected TimeUnit nativeTimeUnit() { - return TIME_UNIT; - } - @Override public boolean equals(Object other) { if (this == other) return true; @@ -45,14 +38,14 @@ public boolean equals(Object other) { OffHeapValueHolder that = (OffHeapValueHolder)other; if (!super.equals(that)) return false; - return value().equals(that.value()); + return get().equals(that.get()); } @Override public int hashCode() { int result = 1; - result = 31 * result + value().hashCode(); + result = 31 * result + get().hashCode(); result = 31 * result + super.hashCode(); return result; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/SwitchableEvictionAdvisor.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/SwitchableEvictionAdvisor.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/SwitchableEvictionAdvisor.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/SwitchableEvictionAdvisor.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentFactory.java similarity index 84% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentFactory.java index d1e0cec986..a90d214c53 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentFactory.java @@ -16,6 +16,7 @@ package org.ehcache.impl.internal.store.offheap.factories; +import org.ehcache.core.spi.store.Store; import org.ehcache.impl.internal.store.offheap.SwitchableEvictionAdvisor; import org.terracotta.offheapstore.Metadata; import org.terracotta.offheapstore.ReadWriteLockedOffHeapClockCache; @@ -24,6 +25,9 @@ import org.terracotta.offheapstore.storage.StorageEngine; import org.terracotta.offheapstore.util.Factory; +import java.nio.IntBuffer; +import java.util.Iterator; +import java.util.Set; import java.util.concurrent.locks.Lock; /** @@ -105,8 +109,33 @@ public boolean evict(int index, boolean shrink) { } } + @Override + protected Set> createEntrySet() { + return new EntrySet(); + } + public interface EvictionListener { void onEviction(K key, V value); } + + private class EntrySet extends LockedEntrySet { + @Override + public Iterator> iterator() { + readLock().lock(); + try { + return new LockedEntryIterator() { + + @Override + protected Entry create(IntBuffer entry) { + Entry entryObject = super.create(entry); + ((Store.ValueHolder) entryObject.getValue()).get(); + return entryObject; + } + }; + } finally { + readLock().unlock(); + } + } + } } } diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/OffHeapValueHolderPortability.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/OffHeapValueHolderPortability.java new file mode 100644 index 0000000000..22b97eacb2 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/OffHeapValueHolderPortability.java @@ -0,0 +1,88 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.offheap.portability; + +import org.ehcache.impl.internal.store.BinaryValueHolder; +import org.ehcache.impl.internal.store.offheap.LazyOffHeapValueHolder; +import org.ehcache.impl.internal.store.offheap.OffHeapValueHolder; +import org.ehcache.spi.serialization.Serializer; +import org.terracotta.offheapstore.storage.portability.WriteBackPortability; +import org.terracotta.offheapstore.storage.portability.WriteContext; + +import java.nio.ByteBuffer; +import java.util.concurrent.TimeUnit; + +/** + * OffHeapValueHolderPortability + */ +public class OffHeapValueHolderPortability implements WriteBackPortability> { + + public static final int ACCESS_TIME_OFFSET = 16; + public static final int EXPIRE_TIME_OFFSET = 24; + + // 5 longs: id, access, expire, creation time, hits (which is kept for compatibility) + private static final int FIELDS_OVERHEAD = 40; + + private final Serializer serializer; + + public OffHeapValueHolderPortability(Serializer serializer) { + this.serializer = serializer; + } + + @Override + public ByteBuffer encode(OffHeapValueHolder valueHolder) { + ByteBuffer serialized; + if (valueHolder instanceof BinaryValueHolder && ((BinaryValueHolder)valueHolder).isBinaryValueAvailable()) { + serialized = ((BinaryValueHolder)valueHolder).getBinaryValue(); + } else { + serialized = serializer.serialize(valueHolder.get()); + } + ByteBuffer byteBuffer = ByteBuffer.allocate(serialized.remaining() + FIELDS_OVERHEAD); + byteBuffer.putLong(valueHolder.getId()); + byteBuffer.putLong(valueHolder.creationTime()); + byteBuffer.putLong(valueHolder.lastAccessTime()); + byteBuffer.putLong(valueHolder.expirationTime()); + byteBuffer.putLong(0L); // represent the hits on previous versions. It is kept for compatibility reasons with previously saved data + byteBuffer.put(serialized); + byteBuffer.flip(); + return byteBuffer; + } + + @Override + public OffHeapValueHolder decode(ByteBuffer byteBuffer) { + return decode(byteBuffer, null); + } + + @Override + public boolean equals(Object o, ByteBuffer byteBuffer) { + return o.equals(decode(byteBuffer)); + } + + @Override + public OffHeapValueHolder decode(ByteBuffer byteBuffer, WriteContext writeContext) { + long id = byteBuffer.getLong(); + long creationTime = byteBuffer.getLong(); + long lastAccessTime = byteBuffer.getLong(); + long expireTime = byteBuffer.getLong(); + byteBuffer.getLong(); // hits read from disk. It is kept for compatibility reasons with previously saved data + return createLazyOffHeapValueHolder(id, byteBuffer.slice(), serializer, creationTime, expireTime, lastAccessTime, writeContext); + } + + protected OffHeapValueHolder createLazyOffHeapValueHolder(long id, ByteBuffer byteBuffer, Serializer serializer, long creationTime, long expireTime, long lastAccessTime, WriteContext writeContext) { + return new LazyOffHeapValueHolder<>(id, byteBuffer, serializer, creationTime, expireTime, lastAccessTime, writeContext); + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/SerializerPortability.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/SerializerPortability.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/SerializerPortability.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/SerializerPortability.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java similarity index 85% rename from impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java index 798c441397..1883026f03 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java @@ -18,18 +18,19 @@ import org.ehcache.config.ResourceType; import org.ehcache.core.CacheConfigurationChangeListener; import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap; +import org.ehcache.core.spi.service.StatisticsService; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.tiering.CachingTier; import org.ehcache.core.spi.store.tiering.HigherCachingTier; import org.ehcache.core.spi.store.tiering.LowerCachingTier; +import org.ehcache.spi.service.OptionalServiceDependencies; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.terracotta.statistics.StatisticsManager; import java.util.AbstractMap; import java.util.ArrayList; @@ -39,7 +40,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; -import java.util.function.BiFunction; import java.util.function.Function; import static java.util.Collections.unmodifiableSet; @@ -69,8 +69,6 @@ public CompoundCachingTier(HigherCachingTier higher, final LowerCachingTie } }); - StatisticsManager.associate(higher).withParent(this); - StatisticsManager.associate(lower).withParent(this); } private void notifyInvalidation(K key, Store.ValueHolder p) { @@ -81,6 +79,9 @@ private void notifyInvalidation(K key, Store.ValueHolder p) { } static class ComputationException extends RuntimeException { + + private static final long serialVersionUID = 6832417052348277644L; + public ComputationException(StoreAccessException cause) { super(cause); } @@ -116,6 +117,26 @@ public Store.ValueHolder getOrComputeIfAbsent(K key, final Function getOrDefault(K key, Function> source) throws StoreAccessException { + try { + return higher.getOrDefault(key, keyParam -> { + try { + Store.ValueHolder valueHolder = lower.get(keyParam); + if (valueHolder != null) { + return valueHolder; + } + + return source.apply(keyParam); + } catch (StoreAccessException cae) { + throw new ComputationException(cae); + } + }); + } catch (ComputationException ce) { + throw ce.getStoreAccessException(); + } + } + @Override public void invalidate(final K key) throws StoreAccessException { try { @@ -189,12 +210,13 @@ public List getConfigurationChangeListeners() @ServiceDependencies({HigherCachingTier.Provider.class, LowerCachingTier.Provider.class}) + @OptionalServiceDependencies("org.ehcache.core.spi.service.StatisticsService") public static class Provider implements CachingTier.Provider { private volatile ServiceProvider serviceProvider; private final ConcurrentMap, Map.Entry> providersMap = new ConcurrentWeakIdentityHashMap<>(); @Override - public CachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + public CachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { if (serviceProvider == null) { throw new RuntimeException("ServiceProvider is null."); } @@ -214,6 +236,11 @@ public CachingTier createCachingTier(Store.Configuration stor LowerCachingTier lowerCachingTier = lowerProvider.createCachingTier(storeConfig, serviceConfigs); CompoundCachingTier compoundCachingTier = new CompoundCachingTier<>(higherCachingTier, lowerCachingTier); + StatisticsService statisticsService = serviceProvider.getService(StatisticsService.class); + if (statisticsService != null) { + statisticsService.registerWithParent(higherCachingTier, compoundCachingTier); + statisticsService.registerWithParent(lowerCachingTier, compoundCachingTier); + } providersMap.put(compoundCachingTier, new AbstractMap.SimpleEntry<>(higherProvider, lowerProvider)); return compoundCachingTier; } @@ -223,7 +250,7 @@ public void releaseCachingTier(CachingTier resource) { if (!providersMap.containsKey(resource)) { throw new IllegalArgumentException("Given caching tier is not managed by this provider : " + resource); } - CompoundCachingTier compoundCachingTier = (CompoundCachingTier) resource; + CompoundCachingTier compoundCachingTier = (CompoundCachingTier) resource; Map.Entry entry = providersMap.get(resource); entry.getKey().releaseHigherCachingTier(compoundCachingTier.higher); @@ -235,7 +262,7 @@ public void initCachingTier(CachingTier resource) { if (!providersMap.containsKey(resource)) { throw new IllegalArgumentException("Given caching tier is not managed by this provider : " + resource); } - CompoundCachingTier compoundCachingTier = (CompoundCachingTier) resource; + CompoundCachingTier compoundCachingTier = (CompoundCachingTier) resource; Map.Entry entry = providersMap.get(resource); entry.getValue().initCachingTier(compoundCachingTier.lower); @@ -243,7 +270,7 @@ public void initCachingTier(CachingTier resource) { } @Override - public int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs) { + public int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs) { return resourceTypes.equals(unmodifiableSet(EnumSet.of(HEAP, OFFHEAP))) ? 2 : 0; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierProviderFactory.java similarity index 83% rename from impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierProviderFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierProviderFactory.java index 4fd20e68f4..7fd64e0d3e 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierProviderFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierProviderFactory.java @@ -15,20 +15,22 @@ */ package org.ehcache.impl.internal.store.tiering; -import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; /** * @author Ludovic Orban */ +@Component public class CompoundCachingTierProviderFactory implements ServiceFactory { @Override - public CompoundCachingTier.Provider create(ServiceCreationConfiguration serviceConfiguration) { + public CompoundCachingTier.Provider create(ServiceCreationConfiguration serviceConfiguration) { return new CompoundCachingTier.Provider(); } @Override - public Class getServiceType() { + public Class getServiceType() { return CompoundCachingTier.Provider.class; } } diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java new file mode 100644 index 0000000000..c2b3c0bbe2 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java @@ -0,0 +1,614 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.tiering; + +import org.ehcache.Cache; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.ResourceType; +import org.ehcache.core.CacheConfigurationChangeListener; +import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap; +import org.ehcache.core.exceptions.StorePassThroughException; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.core.spi.store.events.StoreEventSource; +import org.ehcache.core.spi.store.tiering.AuthoritativeTier; +import org.ehcache.core.spi.store.tiering.CachingTier; +import org.ehcache.spi.service.OptionalServiceDependencies; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; + +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; + +/** + * A {@link Store} implementation supporting a tiered caching model. + */ +public class TieredStore implements Store { + + private final AtomicReference> cachingTierRef; + private final CachingTier noopCachingTier; + private final CachingTier realCachingTier; + private final AuthoritativeTier authoritativeTier; + + public TieredStore(CachingTier cachingTier, AuthoritativeTier authoritativeTier) { + this.cachingTierRef = new AtomicReference<>(cachingTier); + this.authoritativeTier = authoritativeTier; + this.realCachingTier = cachingTier; + this.noopCachingTier = new NoopCachingTier<>(authoritativeTier); + + + this.realCachingTier.setInvalidationListener(TieredStore.this.authoritativeTier::flush); + + this.authoritativeTier.setInvalidationValve(new AuthoritativeTier.InvalidationValve() { + @Override + public void invalidateAll() throws StoreAccessException { + invalidateAllInternal(); + } + + @Override + public void invalidateAllWithHash(long hash) throws StoreAccessException { + cachingTier().invalidateAllWithHash(hash); + } + }); + + } + + @Override + public ValueHolder get(final K key) throws StoreAccessException { + try { + return cachingTier().getOrComputeIfAbsent(key, keyParam -> { + try { + return authoritativeTier.getAndFault(keyParam); + } catch (StoreAccessException cae) { + throw new StorePassThroughException(cae); + } + }); + } catch (StoreAccessException ce) { + return handleStoreAccessException(ce); + } + } + + @Override + public boolean containsKey(K key) throws StoreAccessException { + return authoritativeTier.containsKey(key); + } + + @Override + public PutStatus put(final K key, final V value) throws StoreAccessException { + try { + return authoritativeTier.put(key, value); + } finally { + cachingTier().invalidate(key); + } + } + + @Override + public ValueHolder getAndPut(K key, V value) throws StoreAccessException { + try { + return authoritativeTier.getAndPut(key, value); + } finally { + cachingTier().invalidate(key); + } + } + + @Override + public ValueHolder putIfAbsent(K key, V value, Consumer put) throws StoreAccessException { + try { + return authoritativeTier.putIfAbsent(key, value, put); + } finally { + cachingTier().invalidate(key); + } + } + + @Override + public boolean remove(K key) throws StoreAccessException { + try { + return authoritativeTier.remove(key); + } finally { + cachingTier().invalidate(key); + } + } + + @Override + public ValueHolder getAndRemove(K key) throws StoreAccessException { + try { + return authoritativeTier.getAndRemove(key); + } finally { + cachingTier().invalidate(key); + } + } + + @Override + public RemoveStatus remove(K key, V value) throws StoreAccessException { + try { + return authoritativeTier.remove(key, value); + } finally { + cachingTier().invalidate(key); + } + } + + @Override + public ValueHolder replace(K key, V value) throws StoreAccessException { + try { + return authoritativeTier.replace(key, value); + } finally { + cachingTier().invalidate(key); + } + } + + @Override + public ReplaceStatus replace(K key, V oldValue, V newValue) throws StoreAccessException { + try { + return authoritativeTier.replace(key, oldValue, newValue); + } finally { + cachingTier().invalidate(key); + } + } + + @Override + public void clear() throws StoreAccessException { + swapCachingTiers(); + try { + authoritativeTier.clear(); + } finally { + try { + realCachingTier.clear(); + } finally { + swapBackCachingTiers(); + } + } + } + + private void invalidateAllInternal() throws StoreAccessException { + swapCachingTiers(); + try { + realCachingTier.invalidateAll(); + } finally { + swapBackCachingTiers(); + } + } + + private void swapCachingTiers() { + boolean interrupted = false; + while(!cachingTierRef.compareAndSet(realCachingTier, noopCachingTier)) { + synchronized (noopCachingTier) { + if(cachingTierRef.get() == noopCachingTier) { + try { + noopCachingTier.wait(); + } catch (InterruptedException e) { + interrupted = true; + } + } + } + } + if(interrupted) { + Thread.currentThread().interrupt(); + } + } + + private void swapBackCachingTiers() { + if(!cachingTierRef.compareAndSet(noopCachingTier, realCachingTier)) { + throw new AssertionError("Something bad happened"); + } + synchronized (noopCachingTier) { + noopCachingTier.notify(); + } + } + + @Override + public StoreEventSource getStoreEventSource() { + return authoritativeTier.getStoreEventSource(); + } + + @Override + public Iterator>> iterator() { + Iterator>> authoritativeIterator = authoritativeTier.iterator(); + + return new Iterator>>() { + + private StoreAccessException prefetchFailure; + private Cache.Entry> prefetched; + + { + try { + prefetched = advance(); + } catch (StoreAccessException sae) { + prefetchFailure = sae; + } + } + + @Override + public boolean hasNext() { + return prefetched != null || prefetchFailure != null; + } + + @Override + public Cache.Entry> next() throws StoreAccessException { + StoreAccessException nextFailure = prefetchFailure; + Cache.Entry> next = prefetched; + + try { + prefetchFailure = null; + prefetched = advance(); + } catch (StoreAccessException sae) { + prefetchFailure = sae; + prefetched = null; + } + if (nextFailure == null) { + if (next == null) { + throw new NoSuchElementException(); + } else { + return next; + } + } else { + throw nextFailure; + } + } + + private Cache.Entry> advance() throws StoreAccessException { + while (authoritativeIterator.hasNext()) { + Cache.Entry> next = authoritativeIterator.next(); + K authKey = next.getKey(); + + ValueHolder checked = cachingTier().getOrDefault(authKey, key -> next.getValue()); + + if (checked != null) { + return new Cache.Entry>() { + @Override + public K getKey() { + return authKey; + } + + @Override + public ValueHolder getValue() { + return checked; + } + }; + } + } + + return null; + } + }; + } + + @Override + public ValueHolder getAndCompute(final K key, final BiFunction mappingFunction) throws StoreAccessException { + try { + return authoritativeTier.getAndCompute(key, mappingFunction); + } finally { + cachingTier().invalidate(key); + } + } + + @Override + public ValueHolder computeAndGet(final K key, final BiFunction mappingFunction, final Supplier replaceEqual, Supplier invokeWriter) throws StoreAccessException { + try { + return authoritativeTier.computeAndGet(key, mappingFunction, replaceEqual, () -> false); + } finally { + cachingTier().invalidate(key); + } + } + + public ValueHolder computeIfAbsent(final K key, final Function mappingFunction) throws StoreAccessException { + try { + return cachingTier().getOrComputeIfAbsent(key, keyParam -> { + try { + return authoritativeTier.computeIfAbsentAndFault(keyParam, mappingFunction); + } catch (StoreAccessException cae) { + throw new StorePassThroughException(cae); + } + }); + } catch (StoreAccessException ce) { + return handleStoreAccessException(ce); + } + } + + @Override + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction) throws StoreAccessException { + try { + return authoritativeTier.bulkCompute(keys, remappingFunction); + } finally { + for (K key : keys) { + cachingTier().invalidate(key); + } + } + } + + @Override + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction, Supplier replaceEqual) throws StoreAccessException { + try { + return authoritativeTier.bulkCompute(keys, remappingFunction, replaceEqual); + } finally { + for (K key : keys) { + cachingTier().invalidate(key); + } + } + } + + @Override + public Map> bulkComputeIfAbsent(Set keys, Function, Iterable>> mappingFunction) throws StoreAccessException { + try { + return authoritativeTier.bulkComputeIfAbsent(keys, mappingFunction); + } finally { + for (K key : keys) { + cachingTier().invalidate(key); + } + } + } + + @Override + public List getConfigurationChangeListeners() { + List configurationChangeListenerList + = new ArrayList<>(); + configurationChangeListenerList.addAll(realCachingTier.getConfigurationChangeListeners()); + configurationChangeListenerList.addAll(authoritativeTier.getConfigurationChangeListeners()); + return configurationChangeListenerList; + } + + private CachingTier cachingTier() { + return cachingTierRef.get(); + } + + private ValueHolder handleStoreAccessException(StoreAccessException ce) throws StoreAccessException { + Throwable cause = ce.getCause(); + if (cause instanceof StorePassThroughException) { + throw (StoreAccessException) cause.getCause(); + } + if (cause instanceof Error) { + throw (Error) cause; + } + if (cause instanceof RuntimeException) { + throw (RuntimeException) cause; + } + throw new RuntimeException("Unexpected checked exception wrapped in StoreAccessException", cause); + } + + @ServiceDependencies({CachingTier.Provider.class, AuthoritativeTier.Provider.class}) + @OptionalServiceDependencies("org.ehcache.core.spi.service.StatisticsService") + public static class Provider implements Store.Provider { + + private volatile ServiceProvider serviceProvider; + private final ConcurrentMap, Map.Entry> providersMap = new ConcurrentWeakIdentityHashMap<>(); + + @Override + public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { + if (resourceTypes.size() == 1) { + return 0; + } + ResourceType authorityResource = getAuthorityResource(resourceTypes); + int authorityRank = 0; + Collection authorityProviders = serviceProvider.getServicesOfType(AuthoritativeTier.Provider.class); + for (AuthoritativeTier.Provider authorityProvider : authorityProviders) { + int newRank = authorityProvider.rankAuthority(authorityResource, serviceConfigs); + if (newRank > authorityRank) { + authorityRank = newRank; + } + } + if (authorityRank == 0) { + return 0; + } + Set> cachingResources = new HashSet<>(resourceTypes); + cachingResources.remove(authorityResource); + int cachingTierRank = 0; + Collection cachingTierProviders = serviceProvider.getServicesOfType(CachingTier.Provider.class); + for (CachingTier.Provider cachingTierProvider : cachingTierProviders) { + int newRank = cachingTierProvider.rankCachingTier(cachingResources, serviceConfigs); + if (newRank > cachingTierRank) { + cachingTierRank = newRank; + } + } + if (cachingTierRank == 0) { + return 0; + } + return authorityRank + cachingTierRank; + } + + private ResourceType getAuthorityResource(Set> resourceTypes) { + ResourceType authorityResource = null; + for (ResourceType resourceType : resourceTypes) { + if (authorityResource == null || authorityResource.getTierHeight() > resourceType.getTierHeight()) { + authorityResource = resourceType; + } + } + return authorityResource; + } + + @Override + public Store createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + final List> enhancedServiceConfigs = new ArrayList<>(Arrays.asList(serviceConfigs)); + + final ResourcePools resourcePools = storeConfig.getResourcePools(); + if (rank(resourcePools.getResourceTypeSet(), enhancedServiceConfigs) == 0) { + throw new IllegalArgumentException("TieredStore.Provider does not support configured resource types " + + resourcePools.getResourceTypeSet()); + } + + ResourceType authorityResource = getAuthorityResource(resourcePools.getResourceTypeSet()); + AuthoritativeTier.Provider authoritativeTierProvider = getAuthoritativeTierProvider(authorityResource, enhancedServiceConfigs); + + Set> cachingResources = new HashSet<>(resourcePools.getResourceTypeSet()); + cachingResources.remove(authorityResource); + + CachingTier.Provider cachingTierProvider = getCachingTierProvider(cachingResources, enhancedServiceConfigs); + + final ServiceConfiguration[] configurations = + enhancedServiceConfigs.toArray(new ServiceConfiguration[enhancedServiceConfigs.size()]); + CachingTier cachingTier = cachingTierProvider.createCachingTier(storeConfig, configurations); + AuthoritativeTier authoritativeTier = authoritativeTierProvider.createAuthoritativeTier(storeConfig, configurations); + + TieredStore store = new TieredStore<>(cachingTier, authoritativeTier); + StatisticsService statisticsService = serviceProvider.getService(StatisticsService.class); + if (statisticsService != null) { + statisticsService.registerWithParent(cachingTier, store); + statisticsService.registerWithParent(authoritativeTier, store); + } + registerStore(store, cachingTierProvider, authoritativeTierProvider); + return store; + } + + private CachingTier.Provider getCachingTierProvider(Set> cachingResources, List> enhancedServiceConfigs) { + CachingTier.Provider cachingTierProvider = null; + Collection cachingTierProviders = serviceProvider.getServicesOfType(CachingTier.Provider.class); + for (CachingTier.Provider provider : cachingTierProviders) { + if (provider.rankCachingTier(cachingResources, enhancedServiceConfigs) != 0) { + cachingTierProvider = provider; + break; + } + } + if (cachingTierProvider == null) { + throw new AssertionError("No CachingTier.Provider found although ranking found one for " + cachingResources); + } + return cachingTierProvider; + } + + AuthoritativeTier.Provider getAuthoritativeTierProvider(ResourceType authorityResource, List> enhancedServiceConfigs) { + AuthoritativeTier.Provider authoritativeTierProvider = null; + Collection authorityProviders = serviceProvider.getServicesOfType(AuthoritativeTier.Provider.class); + int highestRank = 0; + for (AuthoritativeTier.Provider provider : authorityProviders) { + int rank = provider.rankAuthority(authorityResource, enhancedServiceConfigs); + if (rank != 0) { + if (highestRank < rank) { + authoritativeTierProvider = provider; + highestRank = rank; + } + } + } + if (authoritativeTierProvider == null) { + throw new AssertionError("No AuthoritativeTier.Provider found although ranking found one for " + authorityResource); + } + return authoritativeTierProvider; + } + + void registerStore(final TieredStore store, final CachingTier.Provider cachingTierProvider, final AuthoritativeTier.Provider authoritativeTierProvider) { + if(providersMap.putIfAbsent(store, new AbstractMap.SimpleEntry<>(cachingTierProvider, authoritativeTierProvider)) != null) { + throw new IllegalStateException("Instance of the Store already registered!"); + } + } + + @Override + public void releaseStore(Store resource) { + Map.Entry entry = providersMap.get(resource); + if (entry == null) { + throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); + } + TieredStore tieredStore = (TieredStore) resource; + // Stop propagating invalidation to higher tier since they will be released before the authoritative tier + // and thus not be in a state when they can invalidate anymore + tieredStore.authoritativeTier.setInvalidationValve(new AuthoritativeTier.InvalidationValve() { + @Override + public void invalidateAll() { + } + + @Override + public void invalidateAllWithHash(long hash) { + } + }); + entry.getKey().releaseCachingTier(tieredStore.realCachingTier); + entry.getValue().releaseAuthoritativeTier(tieredStore.authoritativeTier); + } + + @Override + public void initStore(Store resource) { + Map.Entry entry = providersMap.get(resource); + if (entry == null) { + throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); + } + TieredStore tieredStore = (TieredStore) resource; + entry.getKey().initCachingTier(tieredStore.realCachingTier); + entry.getValue().initAuthoritativeTier(tieredStore.authoritativeTier); + } + + @Override + public void start(ServiceProvider serviceProvider) { + this.serviceProvider = serviceProvider; + } + + @Override + public void stop() { + this.serviceProvider = null; + providersMap.clear(); + } + } + + private static class NoopCachingTier implements CachingTier { + + private final AuthoritativeTier authoritativeTier; + + public NoopCachingTier(final AuthoritativeTier authoritativeTier) { + this.authoritativeTier = authoritativeTier; + } + + @Override + public ValueHolder getOrComputeIfAbsent(final K key, final Function> source) { + final ValueHolder apply = source.apply(key); + authoritativeTier.flush(key, apply); + return apply; + } + + @Override + public ValueHolder getOrDefault(K key, Function> source) { + return source.apply(key); + } + + @Override + public void invalidate(final K key) { + // noop + } + + @Override + public void invalidateAll() { + // noop + } + + @Override + public void clear() { + // noop + } + + @Override + public void setInvalidationListener(final InvalidationListener invalidationListener) { + // noop + } + + @Override + public void invalidateAllWithHash(long hash) { + // noop + } + + @Override + public List getConfigurationChangeListeners() { + return null; + } + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStoreProviderFactory.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStoreProviderFactory.java similarity index 85% rename from impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStoreProviderFactory.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStoreProviderFactory.java index 0a215b71b1..17bb23a501 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStoreProviderFactory.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStoreProviderFactory.java @@ -16,21 +16,23 @@ package org.ehcache.impl.internal.store.tiering; -import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.osgi.service.component.annotations.Component; /** * @author Ludovic Orban */ +@Component public class TieredStoreProviderFactory implements ServiceFactory { @Override - public TieredStore.Provider create(ServiceCreationConfiguration configuration) { + public TieredStore.Provider create(ServiceCreationConfiguration configuration) { return new TieredStore.Provider(); } @Override - public Class getServiceType() { + public Class getServiceType() { return TieredStore.Provider.class; } } diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/internal/util/Pacer.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/util/Pacer.java new file mode 100644 index 0000000000..5f49bf05ae --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/internal/util/Pacer.java @@ -0,0 +1,65 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.util; + +import org.ehcache.core.spi.time.TimeSource; + +import java.util.concurrent.atomic.AtomicLong; + +/** + * Class used to pace a call to prevent it from happening too frequently. + */ +public class Pacer { + + private final AtomicLong nextLogTime; + private final TimeSource timeSource; + private final long delay; + + /** + * Unique constructor + * + * @param delay delay between each call paced + */ + public Pacer(TimeSource timeSource, long delay) { + this.timeSource = timeSource; + this.delay = delay; + this.nextLogTime = new AtomicLong(timeSource.getTimeMillis()); + } + + /** + * Execute the call at the request page or call the alternative the rest of the time. An example would be to log + * a repetitive error once every 30 seconds or always if in debug. + *

                                      + *

                                      {@code
                                      +   * Pacer pacer = new Pacer(30_000);
                                      +   * String errorMessage = "my error";
                                      +   * pacer.pacedCall(() -> log.error(errorMessage), () -> log.debug(errorMessage);
                                      +   * }
                                      +   * 
                                      + * + * @param call call to be paced + * @param orElse call to be done everytime + */ + public void pacedCall(Runnable call, Runnable orElse) { + long now = timeSource.getTimeMillis(); + long end = nextLogTime.get(); + if(now >= end && nextLogTime.compareAndSet(end, now + delay)) { + call.run(); + } else { + orElse.run(); + } + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/util/ServiceUtil.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/util/ServiceUtil.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/util/ServiceUtil.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/util/ServiceUtil.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/util/ThreadFactoryUtil.java b/ehcache-impl/src/main/java/org/ehcache/impl/internal/util/ThreadFactoryUtil.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/util/ThreadFactoryUtil.java rename to ehcache-impl/src/main/java/org/ehcache/impl/internal/util/ThreadFactoryUtil.java diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java b/ehcache-impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java similarity index 98% rename from impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java rename to ehcache-impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java index 9383c5a849..2d3e273a19 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java @@ -170,7 +170,7 @@ private void checkStarted() { * {@inheritDoc} */ @Override - public void destroy(String name) throws CachePersistenceException { + public void destroy(String name) { checkStarted(); if(persistenceService == null) { @@ -218,7 +218,7 @@ public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier id return stateRepository; } - private CachePersistenceException newCachePersistenceException(PersistenceSpaceIdentifier identifier) throws CachePersistenceException { + private CachePersistenceException newCachePersistenceException(PersistenceSpaceIdentifier identifier) { return new CachePersistenceException("Unknown space: " + identifier); } diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java b/ehcache-impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java similarity index 95% rename from impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java rename to ehcache-impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java index f2f85ab871..81f05f4554 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java @@ -24,6 +24,7 @@ import org.ehcache.spi.service.ServiceProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.terracotta.utilities.io.Files; import java.io.File; import java.io.FileNotFoundException; @@ -33,7 +34,6 @@ import java.nio.channels.OverlappingFileLockException; import static org.ehcache.impl.persistence.FileUtils.createLocationIfRequiredAndVerify; -import static org.ehcache.impl.persistence.FileUtils.recursiveDeleteDirectoryContent; import static org.ehcache.impl.persistence.FileUtils.safeIdentifier; import static org.ehcache.impl.persistence.FileUtils.tryRecursiveDelete; import static org.ehcache.impl.persistence.FileUtils.validateName; @@ -121,7 +121,9 @@ public synchronized void stop() { // org.ehcache.internal.persistence.DefaultLocalPersistenceServiceTest.testLocksDirectoryAndUnlocks() // passes on windows rw.close(); - if (!lockFile.delete()) { + try { + Files.delete(lockFile.toPath()); + } catch (IOException e) { LOGGER.debug("Lock file was not deleted {}.", lockFile.getPath()); } } catch (IOException e) { @@ -185,16 +187,14 @@ public void destroySafeSpace(SafeSpaceIdentifier safeSpaceId, boolean verbose) { */ public void destroyAll(String owner) { File ownerDirectory = new File(rootDirectory, owner); - boolean cleared = true; if (ownerDirectory.exists() && ownerDirectory.isDirectory()) { - cleared = false; - if (recursiveDeleteDirectoryContent(ownerDirectory)) { + if (tryRecursiveDelete(ownerDirectory)) { LOGGER.debug("Destroyed all file based persistence contexts owned by {}", owner); - cleared = ownerDirectory.delete(); + } else { + LOGGER.warn("Could not delete all file based persistence contexts owned by {}", owner); } - } - if (!cleared) { - LOGGER.warn("Could not delete all file based persistence contexts owned by {}", owner); + } else { + LOGGER.warn("Could not delete all file based persistence contexts owned by {} - is not a directory!", owner); } } diff --git a/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java b/ehcache-impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java similarity index 97% rename from impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java rename to ehcache-impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java index 91f367f6c9..b8077a0945 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java @@ -64,6 +64,7 @@ class FileBasedStateRepository implements StateRepository, Closeable { @SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE") private void loadMaps() throws CachePersistenceException { try { + //noinspection ConstantConditions for (File file : dataDirectory.listFiles((dir, name) -> name.endsWith(HOLDER_FILE_SUFFIX))) { try (FileInputStream fis = new FileInputStream(file); ObjectInputStream oin = new ObjectInputStream(fis)) { @@ -122,6 +123,9 @@ public void close() throws IOException { } static class Tuple implements Serializable { + + private static final long serialVersionUID = 664492058736170101L; + final int index; final StateHolder holder; diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java b/ehcache-impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java new file mode 100644 index 0000000000..4a1ff25b34 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java @@ -0,0 +1,159 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.persistence; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import org.ehcache.CachePersistenceException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.utilities.io.Files; + +import java.io.File; +import java.io.IOException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.time.Duration; +import java.util.HashSet; +import java.util.Locale; +import java.util.Set; + +import static java.lang.Integer.toHexString; +import static java.nio.charset.StandardCharsets.UTF_8; + +/** + * A bunch of utility functions, mainly used by {@link DefaultLocalPersistenceService} and + * {@link FileBasedStateRepository} within this class. + */ +final class FileUtils { + private static final Logger LOGGER = LoggerFactory.getLogger(FileUtils.class); + private static final int DEL = 0x7F; + private static final char ESCAPE = '%'; + + private static final Set ILLEGALS = new HashSet<>(); + static { + ILLEGALS.add('/'); + ILLEGALS.add('\\'); + ILLEGALS.add('<'); + ILLEGALS.add('>'); + ILLEGALS.add(':'); + ILLEGALS.add('"'); + ILLEGALS.add('|'); + ILLEGALS.add('?'); + ILLEGALS.add('*'); + ILLEGALS.add('.'); + } + + static void createLocationIfRequiredAndVerify(final File rootDirectory) { + if(!rootDirectory.exists()) { + if(!rootDirectory.mkdirs()) { + throw new IllegalArgumentException("Directory couldn't be created: " + rootDirectory.getAbsolutePath()); + } + } else if(!rootDirectory.isDirectory()) { + throw new IllegalArgumentException("Location is not a directory: " + rootDirectory.getAbsolutePath()); + } + + if(!rootDirectory.canWrite()) { + throw new IllegalArgumentException("Location isn't writable: " + rootDirectory.getAbsolutePath()); + } + } + + static File createSubDirectory(File mainDirectory, String name) throws CachePersistenceException { + validateName(name); + File subDirectory = new File(mainDirectory, name); + create(subDirectory); + return subDirectory; + } + + static void validateName(String name) { + if (!name.matches("[a-zA-Z0-9\\-_]+")) { + throw new IllegalArgumentException("Name is invalid for persistence context: " + name); + } + } + + static void create(File directory) throws CachePersistenceException { + if (directory.isDirectory()) { + LOGGER.debug("Reusing {}", directory.getAbsolutePath()); + } else if (directory.mkdir()) { + LOGGER.debug("Created {}", directory.getAbsolutePath()); + } else if (directory.isDirectory()) { + // if create directory fails, check once more if it is due to concurrent creation. + LOGGER.debug("Reusing {}", directory.getAbsolutePath()); + } else { + throw new CachePersistenceException("Unable to create or reuse directory: " + directory.getAbsolutePath()); + } + } + + static boolean tryRecursiveDelete(File file) { + try { + Files.deleteTree(file.toPath(), Duration.ofMillis(250), FileUtils::gc); + return true; + } catch (IOException ioe) { + return false; + } + } + + @SuppressFBWarnings("DM_GC") + private static void gc() { + System.gc(); + System.runFinalization(); + } + + /** + * sanitize a name for valid file or directory name + * + * @param name the name to sanitize + * @return sanitized version of name + */ + static String safeIdentifier(String name) { + return safeIdentifier(name, true); + } + + static String safeIdentifier(String name, boolean withSha1) { + int len = name.length(); + StringBuilder sb = new StringBuilder(len); + for (int i = 0; i < len; i++) { + char c = name.charAt(i); + if (c <= ' ' || c >= DEL || ILLEGALS.contains(c) || c == ESCAPE) { + sb.append(ESCAPE); + sb.append(String.format("%04x", (int) c)); + } else { + sb.append(c); + } + } + if (withSha1) { + sb.append("_").append(sha1(name)); + } + return sb.toString(); + } + + private static String sha1(String input) { + StringBuilder sb = new StringBuilder(); + for (byte b : getSha1Digest().digest(input.getBytes(UTF_8))) { + sb.append(toHexString((b & 0xf0) >>> 4)); + sb.append(toHexString((b & 0xf))); + } + return sb.toString(); + } + + private static MessageDigest getSha1Digest() { + try { + return MessageDigest.getInstance("SHA-1"); + } catch (NoSuchAlgorithmException e) { + throw new AssertionError("All JDKs must have SHA-1"); + } + } +} diff --git a/impl/src/main/java/org/ehcache/impl/persistence/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/persistence/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/persistence/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/persistence/package-info.java diff --git a/impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java similarity index 90% rename from impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java rename to ehcache-impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java index ab3ccc5bb5..ddb8390083 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java @@ -60,7 +60,7 @@ public ByteBuffer serialize(byte[] object) throws SerializerException { * {@inheritDoc} */ @Override - public byte[] read(ByteBuffer binary) throws ClassNotFoundException, SerializerException { + public byte[] read(ByteBuffer binary) { byte[] bytes = new byte[binary.remaining()]; binary.get(bytes); return bytes; @@ -70,7 +70,7 @@ public byte[] read(ByteBuffer binary) throws ClassNotFoundException, SerializerE * {@inheritDoc} */ @Override - public boolean equals(byte[] object, ByteBuffer binary) throws ClassNotFoundException, SerializerException { + public boolean equals(byte[] object, ByteBuffer binary) { boolean equals = binary.equals(serialize(object)); binary.position(binary.limit()); return equals; diff --git a/impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java similarity index 88% rename from impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java rename to ehcache-impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java index 094c9984be..a5fb2bea5f 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java @@ -58,16 +58,15 @@ public ByteBuffer serialize(Character object) { * {@inheritDoc} */ @Override - public Character read(ByteBuffer binary) throws ClassNotFoundException { - char c = binary.getChar(); - return c; + public Character read(ByteBuffer binary) { + return binary.getChar(); } /** * {@inheritDoc} */ @Override - public boolean equals(Character object, ByteBuffer binary) throws ClassNotFoundException { + public boolean equals(Character object, ByteBuffer binary) { return object.equals(read(binary)); } } diff --git a/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java similarity index 75% rename from impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java rename to ehcache-impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java index e680c8a5f3..8dfbf59df4 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java @@ -28,7 +28,6 @@ import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Map.Entry; -import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.locks.Lock; @@ -37,10 +36,12 @@ import org.ehcache.spi.persistence.StateHolder; import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.SerializerException; -import org.ehcache.impl.internal.util.ByteBufferInputStream; +import org.ehcache.core.util.ByteBufferInputStream; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.StatefulSerializer; +import static java.lang.Math.max; + /** * A trivially compressed Java serialization based serializer. *

                                      @@ -51,12 +52,13 @@ */ public class CompactJavaSerializer implements StatefulSerializer { - private volatile StateHolder readLookup; - private final ConcurrentMap readLookupLocalCache = new ConcurrentHashMap<>(); - private final ConcurrentMap writeLookup = new ConcurrentHashMap<>(); + private volatile StateHolder persistentState; + private final ConcurrentMap readLookupCache = new ConcurrentHashMap<>(); + private final ConcurrentMap writeLookupCache = new ConcurrentHashMap<>(); private final Lock lock = new ReentrantLock(); private int nextStreamIndex = 0; + private boolean potentiallyInconsistent; private final transient ClassLoader loader; @@ -78,8 +80,8 @@ public static Class> asTypedSerializer() { @Override public void init(final StateRepository stateRepository) { - this.readLookup = stateRepository.getPersistentStateHolder("CompactJavaSerializer-ObjectStreamClassIndex", Integer.class, ObjectStreamClass.class); - loadMappingsInWriteContext(readLookup.entrySet(), true); + this.persistentState = stateRepository.getPersistentStateHolder("CompactJavaSerializer-ObjectStreamClassIndex", Integer.class, ObjectStreamClass.class, c -> true, null); + refreshMappingsFromStateRepository(); } /** @@ -130,57 +132,80 @@ public boolean equals(T object, ByteBuffer binary) throws ClassNotFoundException return object.equals(read(binary)); } - private int getOrAddMapping(ObjectStreamClass desc) throws IOException { + private int getOrAddMapping(ObjectStreamClass desc) { SerializableDataKey probe = new SerializableDataKey(desc, false); - Integer rep = writeLookup.get(probe); - if (rep != null) { + Integer rep = writeLookupCache.get(probe); + if (rep == null) { + return addMappingUnderLock(desc, probe); + } else { return rep; } + } - // Install new rep - locking + private int addMappingUnderLock(ObjectStreamClass desc, SerializableDataKey probe) { lock.lock(); try { - return addMappingUnderLock(desc, probe); + if (potentiallyInconsistent) { + refreshMappingsFromStateRepository(); + potentiallyInconsistent = false; + } + while (true) { + Integer rep = writeLookupCache.get(probe); + if (rep != null) { + return rep; + } + rep = nextStreamIndex++; + + try { + ObjectStreamClass disconnected = disconnect(desc); + ObjectStreamClass existingOsc = persistentState.putIfAbsent(rep, disconnected); + if (existingOsc == null) { + cacheMapping(rep, disconnected); + return rep; + } else { + cacheMapping(rep, disconnect(existingOsc)); + } + } catch (Throwable t) { + potentiallyInconsistent = true; + throw t; + } + } } finally { lock.unlock(); } } - private int addMappingUnderLock(ObjectStreamClass desc, SerializableDataKey probe) throws IOException { - ObjectStreamClass disconnected = disconnect(desc); - SerializableDataKey key = new SerializableDataKey(disconnected, true); - while (true) { - Integer rep = writeLookup.get(probe); - if (rep != null) { - return rep; - } - rep = nextStreamIndex++; - - ObjectStreamClass existingOsc = readLookup.putIfAbsent(rep, disconnected); - if (existingOsc == null) { - writeLookup.put(key, rep); - readLookupLocalCache.put(rep, disconnected); - return rep; - } else { - ObjectStreamClass discOsc = disconnect(existingOsc); - writeLookup.put(new SerializableDataKey(discOsc, true), rep); - readLookupLocalCache.put(rep, discOsc); - } + private void refreshMappingsFromStateRepository() { + int highestIndex = -1; + for (Entry entry : persistentState.entrySet()) { + Integer index = entry.getKey(); + cacheMapping(entry.getKey(), disconnect(entry.getValue())); + highestIndex = max(highestIndex, index); } + nextStreamIndex = highestIndex + 1; } - private void loadMappingsInWriteContext(Set> entries, boolean throwOnFailedPutIfAbsent) { - for (Entry entry : entries) { - Integer index = entry.getKey(); - ObjectStreamClass discOsc = disconnect(entry.getValue()); - readLookupLocalCache.put(index, discOsc); - if (writeLookup.putIfAbsent(new SerializableDataKey(discOsc, true), index) != null && throwOnFailedPutIfAbsent) { - throw new AssertionError("Corrupted data " + readLookup); + private void cacheMapping(Integer index, ObjectStreamClass disconnectedOsc) { + readLookupCache.merge(index, disconnectedOsc, (existing, update) -> { + if (equals(existing, update)) { + return existing; + } else { + throw new AssertionError("Corrupted data:\n" + + "State Repository: " + persistentState + "\n" + + "Local Write Lookup: " + writeLookupCache + "\n" + + "Local Read Lookup: " + readLookupCache); } - if (nextStreamIndex < index + 1) { - nextStreamIndex = index + 1; + }); + writeLookupCache.merge(new SerializableDataKey(disconnectedOsc, true), index, (existing, update) -> { + if (existing.equals(update)) { + return existing; + } else { + throw new AssertionError("Corrupted data:\n" + + "State Repository: " + persistentState + "\n" + + "Local Write Lookup: " + writeLookupCache + "\n" + + "Local Read Lookup: " + readLookupCache); } - } + }); } class OOS extends ObjectOutputStream { @@ -205,16 +230,13 @@ class OIS extends ObjectInputStream { } @Override - protected ObjectStreamClass readClassDescriptor() throws IOException, ClassNotFoundException { + protected ObjectStreamClass readClassDescriptor() throws IOException { int key = readInt(); - ObjectStreamClass objectStreamClass = readLookupLocalCache.get(key); - if (objectStreamClass != null) { - return objectStreamClass; + ObjectStreamClass objectStreamClass = readLookupCache.get(key); + if (objectStreamClass == null) { + objectStreamClass = persistentState.get(key); + cacheMapping(key, disconnect(objectStreamClass)); } - objectStreamClass = readLookup.get(key); - ObjectStreamClass discOsc = disconnect(objectStreamClass); - readLookupLocalCache.put(key, discOsc); - writeLookup.putIfAbsent(new SerializableDataKey(discOsc, true), key); return objectStreamClass; } diff --git a/impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java similarity index 97% rename from impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java rename to ehcache-impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java index cd482def9a..f74607f2fd 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java @@ -59,8 +59,7 @@ public ByteBuffer serialize(Double object) { */ @Override public Double read(ByteBuffer binary) throws ClassNotFoundException { - double d = binary.getDouble(); - return d; + return binary.getDouble(); } /** diff --git a/impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java similarity index 88% rename from impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java rename to ehcache-impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java index 6dab6a8cdc..23714b4531 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java @@ -58,16 +58,15 @@ public ByteBuffer serialize(Float object) { * {@inheritDoc} */ @Override - public Float read(ByteBuffer binary) throws ClassNotFoundException { - float f = binary.getFloat(); - return f; + public Float read(ByteBuffer binary) { + return binary.getFloat(); } /** * {@inheritDoc} */ @Override - public boolean equals(Float object, ByteBuffer binary) throws ClassNotFoundException { + public boolean equals(Float object, ByteBuffer binary) { return object.equals(read(binary)); } } diff --git a/impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java similarity index 88% rename from impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java rename to ehcache-impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java index 38097f322f..40ddc10089 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java @@ -58,16 +58,15 @@ public ByteBuffer serialize(Integer object) { * {@inheritDoc} */ @Override - public Integer read(ByteBuffer binary) throws ClassNotFoundException { - int i = binary.getInt(); - return i; + public Integer read(ByteBuffer binary) { + return binary.getInt(); } /** * {@inheritDoc} */ @Override - public boolean equals(Integer object, ByteBuffer binary) throws ClassNotFoundException { + public boolean equals(Integer object, ByteBuffer binary) { return object.equals(read(binary)); } } diff --git a/impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java similarity index 97% rename from impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java rename to ehcache-impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java index 7d7697d29a..09dc514002 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java @@ -59,8 +59,7 @@ public ByteBuffer serialize(Long object) { */ @Override public Long read(ByteBuffer binary) throws ClassNotFoundException { - long l = binary.getLong(); - return l; + return binary.getLong(); } /** diff --git a/impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java similarity index 91% rename from impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java rename to ehcache-impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java index 12e938143b..a48c066453 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.impl.internal.util.ByteBufferInputStream; +import org.ehcache.core.util.ByteBufferInputStream; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.SerializerException; @@ -45,17 +45,10 @@ public PlainJavaSerializer(ClassLoader classLoader) { @Override public ByteBuffer serialize(T object) { ByteArrayOutputStream bout = new ByteArrayOutputStream(); - try { - ObjectOutputStream oout = new ObjectOutputStream(bout); + try(ObjectOutputStream oout = new ObjectOutputStream(bout)) { oout.writeObject(object); } catch (IOException e) { throw new SerializerException(e); - } finally { - try { - bout.close(); - } catch (IOException e) { - throw new AssertionError(e); - } } return ByteBuffer.wrap(bout.toByteArray()); } @@ -94,7 +87,7 @@ public OIS(InputStream in, ClassLoader classLoader) throws IOException { } @Override - protected Class resolveClass(ObjectStreamClass desc) throws IOException, ClassNotFoundException { + protected Class resolveClass(ObjectStreamClass desc) throws ClassNotFoundException { try { return Class.forName(desc.getName(), false, classLoader); } catch (ClassNotFoundException cnfe) { @@ -107,7 +100,7 @@ protected Class resolveClass(ObjectStreamClass desc) throws IOException, Clas } @Override - protected Class resolveProxyClass(String[] interfaces) throws IOException, ClassNotFoundException { + protected Class resolveProxyClass(String[] interfaces) throws ClassNotFoundException { Class[] interfaceClasses = new Class[interfaces.length]; for (int i = 0; i < interfaces.length; i++) { interfaceClasses[i] = Class.forName(interfaces[i], false, classLoader); diff --git a/impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java similarity index 94% rename from impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java rename to ehcache-impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java index 45456ab860..cd48659b2c 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java @@ -52,14 +52,16 @@ public StringSerializer(ClassLoader classLoader) { */ @Override public ByteBuffer serialize(String object) { - ByteArrayOutputStream bout = new ByteArrayOutputStream(object.length()); - try { - int length = object.length(); + int length = object.length(); + + try(ByteArrayOutputStream bout = new ByteArrayOutputStream(length)) { int i = 0; for (; i < length; i++) { char c = object.charAt(i); - if ((c == 0x0000) || (c > 0x007f)) break; + if (c == 0x0000 || c > 0x007f) { + break; + } bout.write(c); } @@ -79,14 +81,12 @@ public ByteBuffer serialize(String object) { bout.write(0x80 | (c & 0x3f)); } } - } finally { - try { - bout.close(); - } catch (IOException ex) { - throw new AssertionError(ex); - } + + return ByteBuffer.wrap(bout.toByteArray()); + + } catch (IOException e) { + throw new RuntimeException(e); } - return ByteBuffer.wrap(bout.toByteArray()); } /** diff --git a/impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java similarity index 95% rename from impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java rename to ehcache-impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java index 0a75ff8a17..41a3b62955 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java @@ -26,6 +26,8 @@ public class TransientStateHolder implements StateHolder, Serializable { + private static final long serialVersionUID = -6350493651462112289L; + private final ConcurrentMap map = new ConcurrentHashMap<>(); @Override diff --git a/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java similarity index 95% rename from impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java rename to ehcache-impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java index 6010b8ed6c..9fb70bda74 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java +++ b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java @@ -29,7 +29,7 @@ */ public class TransientStateRepository implements StateRepository { - private ConcurrentMap> knownHolders = new ConcurrentHashMap<>(); + private final ConcurrentMap> knownHolders = new ConcurrentHashMap<>(); @Override @SuppressWarnings("unchecked") diff --git a/impl/src/main/java/org/ehcache/impl/serialization/package-info.java b/ehcache-impl/src/main/java/org/ehcache/impl/serialization/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/serialization/package-info.java rename to ehcache-impl/src/main/java/org/ehcache/impl/serialization/package-info.java diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/store/BaseStore.java b/ehcache-impl/src/main/java/org/ehcache/impl/store/BaseStore.java new file mode 100644 index 0000000000..d3971a68d1 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/store/BaseStore.java @@ -0,0 +1,134 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.store; + +import org.ehcache.config.ResourceType; +import org.ehcache.core.config.store.StoreStatisticsConfiguration; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.statistics.StatisticType; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.statistics.OperationObserver; +import org.ehcache.core.statistics.OperationStatistic; +import org.ehcache.core.statistics.ZeroOperationStatistic; +import org.ehcache.spi.service.OptionalServiceDependencies; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceProvider; + +import java.io.Serializable; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.Supplier; + +import static java.util.Optional.ofNullable; + +/** + * Base class to most stores. It provides functionality common to stores in general. A given store implementation is not required to extend + * it but the implementor might find it easier to do so. + */ +public abstract class BaseStore implements Store { + + /* Type of the keys stored in this store */ + protected final Class keyType; + /* Type of the values stored in this store */ + protected final Class valueType; + /** Tells if this store is by itself or in a tiered setup */ + protected final boolean operationStatisticsEnabled; + protected final StatisticsService statisticsService; + + public BaseStore(Configuration config, StatisticsService statisticsService) { + this(config.getKeyType(), config.getValueType(), config.isOperationStatisticsEnabled(), statisticsService); + } + + public BaseStore(Class keyType, Class valueType, boolean operationStatisticsEnabled, StatisticsService statisticsService) { + this.keyType = keyType; + this.valueType = valueType; + this.operationStatisticsEnabled = operationStatisticsEnabled; + this.statisticsService = statisticsService; + } + + protected void checkKey(K keyObject) { + if (!keyType.isInstance(Objects.requireNonNull((Object) keyObject))) { + throw new ClassCastException("Invalid key type, expected : " + keyType.getName() + " but was : " + keyObject.getClass().getName()); + } + } + + protected void checkValue(V valueObject) { + if (!valueType.isInstance(Objects.requireNonNull((Object) valueObject))) { + throw new ClassCastException("Invalid value type, expected : " + valueType.getName() + " but was : " + valueObject.getClass().getName()); + } + } + + /** + * Create an {@code OperationObserver} using {@code this} for the context. + * + * @param name name of the statistic + * @param outcome class of the possible outcomes + * @param canBeDisabled if this statistic can be disabled by a {@link StoreStatisticsConfiguration} + * @param type of the outcome + * @return the created observer + */ + protected > OperationObserver createObserver(String name, Class outcome, boolean canBeDisabled) { + if (statisticsService == null || !operationStatisticsEnabled && canBeDisabled) { + return ZeroOperationStatistic.get(); + } else { + return statisticsService.createOperationStatistics(name, outcome, getStatisticsTag(), this); + } + } + + protected void registerStatistic(String name, StatisticType type, Set tags, Supplier valueSupplier) { + if (statisticsService != null) { + statisticsService.registerStatistic(this, name, type, tags, valueSupplier); + } + } + + protected abstract String getStatisticsTag(); + + + @OptionalServiceDependencies("org.ehcache.core.spi.service.StatisticsService") + protected static abstract class BaseStoreProvider implements Store.Provider { + + private volatile ServiceProvider serviceProvider; + + protected , T extends Enum> OperationStatistic createTranslatedStatistic(BaseStore store, String statisticName, Map> translation, String targetName) { + return getStatisticsService() + .map(s -> s.registerStoreStatistics(store, targetName, getResourceType().getTierHeight(), store.getStatisticsTag(), translation, statisticName)) + .orElse(ZeroOperationStatistic.get()); + } + + @Override + public void start(ServiceProvider serviceProvider) { + this.serviceProvider = serviceProvider; + } + + @Override + public void stop() { + this.serviceProvider = null; + } + + protected ServiceProvider getServiceProvider() { + return this.serviceProvider; + } + + protected abstract ResourceType getResourceType(); + + protected Optional getStatisticsService() { + return ofNullable(serviceProvider.getService(StatisticsService.class)); + } + } +} diff --git a/ehcache-impl/src/main/java/org/ehcache/impl/store/DefaultStoreEventDispatcher.java b/ehcache-impl/src/main/java/org/ehcache/impl/store/DefaultStoreEventDispatcher.java new file mode 100644 index 0000000000..c194991132 --- /dev/null +++ b/ehcache-impl/src/main/java/org/ehcache/impl/store/DefaultStoreEventDispatcher.java @@ -0,0 +1,41 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.store; + +import org.ehcache.core.events.StoreEventSink; +import org.ehcache.impl.internal.events.AbstractStoreEventDispatcher; + +/** + * The default {@link org.ehcache.core.events.StoreEventDispatcher} implementation. + */ +public class DefaultStoreEventDispatcher extends AbstractStoreEventDispatcher { + + public DefaultStoreEventDispatcher(int dispatcherConcurrency) { + super(dispatcherConcurrency); + } + + @Override + public StoreEventSink eventSink() { + if (getListeners().isEmpty()) { + @SuppressWarnings("unchecked") + StoreEventSink noOpEventSink = (StoreEventSink) NO_OP_EVENT_SINK; + return noOpEventSink; + } else { + return super.eventSink(); + } + } +} diff --git a/impl/src/main/java/org/ehcache/impl/store/HashUtils.java b/ehcache-impl/src/main/java/org/ehcache/impl/store/HashUtils.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/store/HashUtils.java rename to ehcache-impl/src/main/java/org/ehcache/impl/store/HashUtils.java diff --git a/ehcache-impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory b/ehcache-impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory new file mode 100644 index 0000000000..c2cf30fb9a --- /dev/null +++ b/ehcache-impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory @@ -0,0 +1,19 @@ +org.ehcache.impl.internal.store.heap.OnHeapStoreProviderFactory +org.ehcache.impl.internal.store.offheap.OffHeapStoreProviderFactory +org.ehcache.impl.internal.store.disk.OffHeapDiskStoreProviderFactory +org.ehcache.impl.internal.store.tiering.TieredStoreProviderFactory +org.ehcache.impl.internal.store.tiering.CompoundCachingTierProviderFactory +org.ehcache.impl.internal.store.loaderwriter.LoaderWriterStoreProviderFactory + +org.ehcache.impl.internal.TimeSourceServiceFactory +org.ehcache.impl.internal.spi.serialization.DefaultSerializationProviderFactory +org.ehcache.impl.internal.spi.loaderwriter.DefaultCacheLoaderWriterProviderFactory +org.ehcache.impl.internal.spi.event.DefaultCacheEventListenerProviderFactory +org.ehcache.impl.internal.executor.DefaultExecutionServiceFactory +org.ehcache.impl.internal.persistence.DefaultLocalPersistenceServiceFactory +org.ehcache.impl.internal.persistence.DefaultDiskResourceServiceFactory +org.ehcache.impl.internal.loaderwriter.writebehind.WriteBehindProviderFactory +org.ehcache.impl.internal.events.CacheEventNotificationListenerServiceProviderFactory +org.ehcache.impl.internal.spi.copy.DefaultCopyProviderFactory +org.ehcache.impl.internal.sizeof.DefaultSizeOfEngineProviderFactory +org.ehcache.impl.internal.spi.resilience.DefaultResilienceStrategyProviderFactory diff --git a/impl/src/slow-test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapITest.java b/ehcache-impl/src/slow-test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapITest.java similarity index 95% rename from impl/src/slow-test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapITest.java rename to ehcache-impl/src/slow-test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapITest.java index 25ff999284..84f8356cce 100644 --- a/impl/src/slow-test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapITest.java +++ b/ehcache-impl/src/slow-test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapITest.java @@ -16,7 +16,6 @@ package org.ehcache.impl.internal.concurrent; -import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.junit.Test; import java.util.ArrayList; @@ -30,7 +29,7 @@ import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; /** * @author Ludovic Orban @@ -146,7 +145,7 @@ private void assertThings(final ConcurrentHashMap map) { assertThat(map.size(), is(ENTRIES)); for(int i = 0; i < 100; i ++) { - final HashSet randomValues = new HashSet(getRandomValues(map, ENTRIES)); + final HashSet randomValues = new HashSet(getRandomValues(map)); assertThat(randomValues.size(), is(ENTRIES)); for (Object randomValue : randomValues) { assertThat(randomValue, instanceOf(KeyHolder.class)); @@ -156,10 +155,10 @@ private void assertThings(final ConcurrentHashMap map) { } } - private static List getRandomValues(Map map, int amount) { + private static List getRandomValues(Map map) { List values = new ArrayList(map.values()); Collections.shuffle(values); - return values.subList(0, amount); + return values.subList(0, ENTRIES); } @@ -202,7 +201,7 @@ static class EvilComparableKey extends EvilKey implements Comparable cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10L, EntryUnit.ENTRIES).disk(10, MemoryUnit.MB).build()).build(); + + try (CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .with(new CacheManagerPersistenceConfiguration(diskPath.newFolder("myData"))) + .withCache("cache", cacheConfiguration).build(true)) { + + Cache cache = cacheManager.getCache("cache", Long.class, String.class); + + ResourcePoolsBuilder poolsBuilder = ResourcePoolsBuilder.newResourcePoolsBuilder(); + poolsBuilder = poolsBuilder.heap(20L, EntryUnit.ENTRIES); + ResourcePools pools = poolsBuilder.build(); + cache.getRuntimeConfiguration().updateResourcePools(pools); + assertThat(cache.getRuntimeConfiguration().getResourcePools() + .getPoolForResource(ResourceType.Core.HEAP).getSize(), is(20L)); + pools = poolsBuilder.build(); + cache.getRuntimeConfiguration().updateResourcePools(pools); + assertThat(cache.getRuntimeConfiguration().getResourcePools() + .getPoolForResource(ResourceType.Core.HEAP).getSize(), is(20L)); + } + } + + @Test + public void testUpdateFailureDoesNotUpdate() { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10L, EntryUnit.ENTRIES).build()).build(); + + try(CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("cache", cacheConfiguration).build(true)) { + + Cache cache = cacheManager.getCache("cache", Long.class, String.class); + + ResourcePoolsBuilder poolsBuilder = ResourcePoolsBuilder.newResourcePoolsBuilder(); + poolsBuilder = poolsBuilder.heap(20L, EntryUnit.ENTRIES).disk(10, MemoryUnit.MB); + ResourcePools pools = poolsBuilder.build(); + try { + cache.getRuntimeConfiguration().updateResourcePools(pools); + fail("We expect illegal arguments"); + } catch (IllegalArgumentException iae) { + // expected + assertThat(iae.getMessage(), is("Pools to be updated cannot contain previously undefined resources pools")); + } + assertThat(cache.getRuntimeConfiguration().getResourcePools() + .getPoolForResource(ResourceType.Core.HEAP).getSize(), is(10L)); + } + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java b/ehcache-impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java new file mode 100644 index 0000000000..4eaa86f64b --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java @@ -0,0 +1,579 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.config.builders; + +import org.ehcache.config.*; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.spi.service.ServiceUtils; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.config.copy.DefaultCopierConfiguration; +import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyConfiguration; +import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; +import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; +import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; +import org.ehcache.impl.copy.SerializingCopier; +import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; +import org.ehcache.impl.internal.resilience.RobustResilienceStrategy; +import org.ehcache.impl.serialization.JavaSerializer; +import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.loaderwriter.CacheLoaderWriterConfiguration; +import org.ehcache.spi.resilience.RecoveryStore; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.test.MockitoUtil; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; +import org.hamcrest.core.IsSame; +import org.junit.Test; + +import static java.util.function.UnaryOperator.identity; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.assertEquals; +import static org.ehcache.test.MockitoUtil.mock; +import static org.junit.Assert.fail; + +public class CacheConfigurationBuilderTest { + + @Test + public void testWithEvictionAdvisor() throws Exception { + EvictionAdvisor evictionAdvisor = (key, value) -> false; + + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withEvictionAdvisor(evictionAdvisor) + .build(); + + @SuppressWarnings("unchecked") + Matcher> evictionAdvisorMatcher = sameInstance(cacheConfiguration + .getEvictionAdvisor()); + assertThat(evictionAdvisor, evictionAdvisorMatcher); + } + + @Test + public void testWithLoaderWriter() throws Exception { + CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); + + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withLoaderWriter(loaderWriter) + .build(); + + CacheLoaderWriterConfiguration cacheLoaderWriterConfiguration = ServiceUtils.findSingletonAmongst(DefaultCacheLoaderWriterConfiguration.class, cacheConfiguration.getServiceConfigurations()); + Object instance = ((ClassInstanceConfiguration) cacheLoaderWriterConfiguration).getInstance(); + assertThat(instance, Matchers.sameInstance(loaderWriter)); + } + + @Test + public void testWithoutLoaderWriter() { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withLoaderWriter(mock(CacheLoaderWriter.class)) + .withoutLoaderWriter() + .build(); + + assertThat(cacheConfiguration.getServiceConfigurations(), not(hasItem(instanceOf(DefaultCacheLoaderWriterConfiguration.class)))); + } + + @Test + public void testWithKeySerializer() throws Exception { + Serializer keySerializer = mock(Serializer.class); + + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withKeySerializer(keySerializer) + .build(); + + + DefaultSerializerConfiguration serializerConfiguration = ServiceUtils.findSingletonAmongst(DefaultSerializerConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(serializerConfiguration.getType(), is(DefaultSerializerConfiguration.Type.KEY)); + Object instance = serializerConfiguration.getInstance(); + assertThat(instance, Matchers.sameInstance(keySerializer)); + } + + @Test + public void testWithKeySerializerClass() throws Exception { + @SuppressWarnings("unchecked") + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withKeySerializer((Class) JavaSerializer.class) + .build(); + + + DefaultSerializerConfiguration serializerConfiguration = ServiceUtils.findSingletonAmongst(DefaultSerializerConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(serializerConfiguration.getType(), is(DefaultSerializerConfiguration.Type.KEY)); + assertThat(serializerConfiguration.getClazz(), sameInstance(JavaSerializer.class)); + } + + @Test + public void testWithoutKeySerializer() throws Exception { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withKeySerializer(MockitoUtil.>mock(Serializer.class)) + .withDefaultKeySerializer() + .build(); + + assertThat(cacheConfiguration.getServiceConfigurations(), not(hasItem(instanceOf(DefaultSerializerConfiguration.class)))); + } + + @Test + public void testWithValueSerializer() throws Exception { + Serializer valueSerializer = mock(Serializer.class); + + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withValueSerializer(valueSerializer) + .build(); + + + DefaultSerializerConfiguration serializerConfiguration = ServiceUtils.findSingletonAmongst(DefaultSerializerConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(serializerConfiguration.getType(), is(DefaultSerializerConfiguration.Type.VALUE)); + Object instance = ((ClassInstanceConfiguration) serializerConfiguration).getInstance(); + assertThat(instance, Matchers.sameInstance(valueSerializer)); + } + + @Test + public void testWithValueSerializerClass() throws Exception { + @SuppressWarnings("unchecked") + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withValueSerializer((Class) JavaSerializer.class) + .build(); + + + DefaultSerializerConfiguration serializerConfiguration = ServiceUtils.findSingletonAmongst(DefaultSerializerConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(serializerConfiguration.getType(), is(DefaultSerializerConfiguration.Type.VALUE)); + assertThat(serializerConfiguration.getClazz(), sameInstance(JavaSerializer.class)); + } + + @Test + public void testWithoutValueSerializer() throws Exception { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withValueSerializer(MockitoUtil.>mock(Serializer.class)) + .withDefaultValueSerializer() + .build(); + + assertThat(cacheConfiguration.getServiceConfigurations(), not(hasItem(instanceOf(DefaultSerializerConfiguration.class)))); + } + + @Test + public void testWithKeyCopier() throws Exception { + Copier keyCopier = mock(Copier.class); + + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withKeyCopier(keyCopier) + .build(); + + + DefaultCopierConfiguration copierConfiguration = ServiceUtils.findSingletonAmongst(DefaultCopierConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(copierConfiguration.getType(), is(DefaultCopierConfiguration.Type.KEY)); + Object instance = copierConfiguration.getInstance(); + assertThat(instance, Matchers.sameInstance(keyCopier)); + } + + @Test + public void testWithKeySerializingCopier() { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withKeySerializingCopier() + .build(); + + + DefaultCopierConfiguration copierConfiguration = ServiceUtils.findSingletonAmongst(DefaultCopierConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(copierConfiguration.getType(), is(DefaultCopierConfiguration.Type.KEY)); + assertThat(copierConfiguration.getClazz(), Matchers.sameInstance(SerializingCopier.class)); + } + + @Test + public void testWithoutKeyCopier() { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withKeyCopier(MockitoUtil.>mock(Copier.class)) + .withoutKeyCopier() + .build(); + + assertThat(cacheConfiguration.getServiceConfigurations(), not(hasItem(instanceOf(DefaultCopierConfiguration.class)))); + } + + @Test + public void testWithValueCopier() throws Exception { + Copier valueCopier = mock(Copier.class); + + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withValueCopier(valueCopier) + .build(); + + + DefaultCopierConfiguration copierConfiguration = ServiceUtils.findSingletonAmongst(DefaultCopierConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(copierConfiguration.getType(), is(DefaultCopierConfiguration.Type.VALUE)); + Object instance = copierConfiguration.getInstance(); + assertThat(instance, Matchers.sameInstance(valueCopier)); + } + + @Test + public void testWithValueSerializingCopier() { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withValueSerializingCopier() + .build(); + + + DefaultCopierConfiguration copierConfiguration = ServiceUtils.findSingletonAmongst(DefaultCopierConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(copierConfiguration.getType(), is(DefaultCopierConfiguration.Type.VALUE)); + assertThat(copierConfiguration.getClazz(), Matchers.sameInstance(SerializingCopier.class)); + } + + @Test + public void testWithoutValueCopier() { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withValueCopier(MockitoUtil.>mock(Copier.class)) + .withoutValueCopier() + .build(); + + assertThat(cacheConfiguration.getServiceConfigurations(), not(hasItem(instanceOf(DefaultCopierConfiguration.class)))); + } + + @Test + public void testNothing() { + final CacheConfigurationBuilder builder = newCacheConfigurationBuilder(Long.class, CharSequence.class, heap(10)); + + final ExpiryPolicy expiry = ExpiryPolicyBuilder.timeToIdleExpiration(ExpiryPolicy.INFINITE); + + builder + .withEvictionAdvisor((key, value) -> value.charAt(0) == 'A') + .withExpiry(expiry) + .build(); + } + + @Test + public void testOffheapGetsAddedToCacheConfiguration() { + CacheConfigurationBuilder builder = newCacheConfigurationBuilder(Long.class, CharSequence.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES) + .offheap(10, MemoryUnit.MB)); + + final ExpiryPolicy expiry = ExpiryPolicyBuilder.timeToIdleExpiration(ExpiryPolicy.INFINITE); + + CacheConfiguration config = builder + .withEvictionAdvisor((key, value) -> value.charAt(0) == 'A') + .withExpiry(expiry) + .build(); + assertThat(config.getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getType(), Matchers.is(ResourceType.Core.OFFHEAP)); + assertThat(config.getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getUnit(), Matchers.is(MemoryUnit.MB)); + } + + @Test + public void testSizeOf() { + CacheConfigurationBuilder builder = newCacheConfigurationBuilder(String.class, String.class, heap(10)); + + builder = builder.withSizeOfMaxObjectSize(10, MemoryUnit.B).withSizeOfMaxObjectGraph(100); + CacheConfiguration configuration = builder.build(); + + DefaultSizeOfEngineConfiguration sizeOfEngineConfiguration = ServiceUtils.findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, configuration.getServiceConfigurations()); + assertThat(sizeOfEngineConfiguration, notNullValue()); + assertEquals(sizeOfEngineConfiguration.getMaxObjectSize(), 10); + assertEquals(sizeOfEngineConfiguration.getUnit(), MemoryUnit.B); + assertEquals(sizeOfEngineConfiguration.getMaxObjectGraphSize(), 100); + + builder = builder.withSizeOfMaxObjectGraph(1000); + configuration = builder.build(); + + sizeOfEngineConfiguration = ServiceUtils.findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, configuration.getServiceConfigurations()); + assertEquals(sizeOfEngineConfiguration.getMaxObjectGraphSize(), 1000); + + } + + @Test + public void testCopyingOfExistingConfiguration() { + Class keyClass = Integer.class; + Class valueClass = String.class; + ClassLoader loader = mock(ClassLoader.class); + @SuppressWarnings("unchecked") + EvictionAdvisor eviction = mock(EvictionAdvisor.class); + @SuppressWarnings("unchecked") + ExpiryPolicy expiry = mock(ExpiryPolicy.class); + ServiceConfiguration service = mock(ServiceConfiguration.class); + + CacheConfiguration configuration = newCacheConfigurationBuilder(Integer.class, String.class, heap(10)) + .withClassLoader(loader) + .withEvictionAdvisor(eviction) + .withExpiry(expiry) + .withService(service) + .build(); + + CacheConfiguration copy = newCacheConfigurationBuilder(configuration).build(); + + assertThat(copy.getKeyType(), equalTo(keyClass)); + assertThat(copy.getValueType(), equalTo(valueClass)); + assertThat(copy.getClassLoader(), equalTo(loader)); + + assertThat(copy.getEvictionAdvisor(), IsSame.sameInstance(eviction)); + assertThat(copy.getExpiryPolicy(), IsSame.sameInstance(expiry)); + assertThat(copy.getServiceConfigurations(), contains(IsSame.sameInstance(service))); + } + + @Test + public void testWithResilienceStrategyInstance() throws Exception { + ResilienceStrategy resilienceStrategy = mock(ResilienceStrategy.class); + + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withResilienceStrategy(resilienceStrategy) + .build(); + + DefaultResilienceStrategyConfiguration resilienceStrategyConfiguration = ServiceUtils.findSingletonAmongst(DefaultResilienceStrategyConfiguration.class, cacheConfiguration.getServiceConfigurations()); + Object instance = resilienceStrategyConfiguration.getInstance(); + assertThat(instance, sameInstance(resilienceStrategy)); + } + + @Test + public void testWithResilienceStrategyClass() throws Exception { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withResilienceStrategy(CustomResilience.class, "Hello World") + .build(); + + DefaultResilienceStrategyConfiguration resilienceStrategyConfiguration = ServiceUtils.findSingletonAmongst(DefaultResilienceStrategyConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(resilienceStrategyConfiguration.getInstance(), nullValue()); + assertThat(resilienceStrategyConfiguration.getClazz(), sameInstance(CustomResilience.class)); + assertThat(resilienceStrategyConfiguration.getArguments(), arrayContaining("Hello World")); + + } + + @Test + public void testWithDefaultResilienceStrategy() { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withResilienceStrategy(mock(ResilienceStrategy.class)) + .withDefaultResilienceStrategy() + .build(); + + assertThat(cacheConfiguration.getServiceConfigurations(), not(hasItem(instanceOf(DefaultResilienceStrategyConfiguration.class)))); + } + + @Test + public void testWithServiceAddsNewConfiguration() { + CacheConfigurationBuilder oldBuilder = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)); + + ServiceConfiguration serviceConfiguration = new IncompatibleServiceConfig(); + + CacheConfigurationBuilder newBuilder = oldBuilder.withService(serviceConfiguration); + + assertThat(oldBuilder.build().getServiceConfigurations(), not(hasItem(sameInstance(serviceConfiguration)))); + assertThat(newBuilder.build().getServiceConfigurations(), hasItem(sameInstance(serviceConfiguration))); + } + + @Test + public void testIncompatibleServiceRemovesExistingConfiguration() { + ServiceConfiguration oldConfig = new IncompatibleServiceConfig(); + ServiceConfiguration newConfig = new IncompatibleServiceConfig(); + + CacheConfigurationBuilder oldBuilder = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(oldConfig); + + CacheConfigurationBuilder newBuilder = oldBuilder.withService(newConfig); + + assertThat(oldBuilder.build().getServiceConfigurations(), both(hasItem(sameInstance(oldConfig))).and(not(hasItem(sameInstance(newConfig))))); + assertThat(newBuilder.build().getServiceConfigurations(), both(hasItem(sameInstance(newConfig))).and(not(hasItem(sameInstance(oldConfig))))); + } + + @Test + public void testCompatibleServiceJoinsExistingConfiguration() { + ServiceConfiguration oldConfig = new CompatibleServiceConfig(); + ServiceConfiguration newConfig = new CompatibleServiceConfig(); + + CacheConfigurationBuilder oldBuilder = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(oldConfig); + + CacheConfigurationBuilder newBuilder = oldBuilder.withService(newConfig); + + assertThat(oldBuilder.build().getServiceConfigurations(), both(hasItem(sameInstance(oldConfig))).and(not(hasItem(sameInstance(newConfig))))); + assertThat(newBuilder.build().getServiceConfigurations(), both(hasItem(sameInstance(oldConfig))).and(hasItem(sameInstance(newConfig)))); + } + + @Test + public void testUpdateServicesWithNoServicesThrows() { + CacheConfigurationBuilder oldBuilder = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)); + try { + oldBuilder.updateServices(IncompatibleServiceConfig.class, identity()); + fail("Expected IllegalStateException"); + } catch (IllegalStateException e) { + //expected + } + } + + @Test + public void testUpdateServicesWithNullReturnThrows() { + CacheConfigurationBuilder oldBuilder = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withService(new IncompatibleServiceConfig()); + try { + oldBuilder.updateServices(IncompatibleServiceConfig.class, c -> null); + fail("Expected NullPointerException"); + } catch (NullPointerException e) { + //expected + } + } + + + @Test + public void testUpdateServicesHitsAllServices() { + CompatibleServiceConfig configA = new CompatibleServiceConfig(); + CompatibleServiceConfig configB = new CompatibleServiceConfig(); + CacheConfigurationBuilder oldBuilder = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withService(configA) + .withService(configB); + + CompatibleServiceConfig configA2 = new CompatibleServiceConfig(); + CompatibleServiceConfig configB2 = new CompatibleServiceConfig(); + CacheConfigurationBuilder newBuilder = oldBuilder.updateServices(CompatibleServiceConfig.class, c -> { + if (c == configA) { + return configA2; + } else if (c == configB) { + return configB2; + } else { + throw new AssertionError(); + } + }); + + assertThat(oldBuilder.build().getServiceConfigurations(), hasItems(configA, configB)); + assertThat(newBuilder.build().getServiceConfigurations(), hasItems(configA2, configB2)); + } + + @Test + public void testWithClassLoader() { + ClassLoader classLoader = mock(ClassLoader.class); + + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withClassLoader(classLoader) + .build(); + + assertThat(cacheConfiguration.getClassLoader(), sameInstance(classLoader)); + } + + @Test + public void testWithDefaultClassLoader() { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withClassLoader(mock(ClassLoader.class)) + .withDefaultClassLoader() + .build(); + + assertThat(cacheConfiguration.getClassLoader(), nullValue()); + } + + @Test + public void testWithDiskStoreThreadPool() { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withDiskStoreThreadPool("banana", 42) + .build(); + + OffHeapDiskStoreConfiguration config = findSingletonAmongst(OffHeapDiskStoreConfiguration.class, cacheConfiguration.getServiceConfigurations()); + + assertThat(config.getWriterConcurrency(), is(42)); + assertThat(config.getThreadPoolAlias(), is("banana")); + } + + @Test + public void testWithDefaultDiskStoreThreadPool() { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withDiskStoreThreadPool("banana", 42) + .withDefaultDiskStoreThreadPool() + .build(); + + assertThat(cacheConfiguration.getServiceConfigurations(), not(hasItem(instanceOf(OffHeapDiskStoreConfiguration.class)))); + } + + @Test + public void testWithSizeOfConfig() { + CacheConfigurationBuilder builder = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)); + + builder = builder.withSizeOfMaxObjectGraph(42L); + { + CacheConfiguration cacheConfiguration = builder.build(); + DefaultSizeOfEngineConfiguration config = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(config.getMaxObjectGraphSize(), is(42L)); + assertThat(config.getMaxObjectSize(), is(Long.MAX_VALUE)); + assertThat(config.getUnit(), is(MemoryUnit.B)); + } + + builder = builder.withSizeOfMaxObjectSize(1024L, MemoryUnit.KB); + { + CacheConfiguration cacheConfiguration = builder.build(); + DefaultSizeOfEngineConfiguration config = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(config.getMaxObjectGraphSize(), is(42L)); + assertThat(config.getMaxObjectSize(), is(1024L)); + assertThat(config.getUnit(), is(MemoryUnit.KB)); + } + + builder = builder.withSizeOfMaxObjectGraph(43L); + { + CacheConfiguration cacheConfiguration = builder.build(); + DefaultSizeOfEngineConfiguration config = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(config.getMaxObjectGraphSize(), is(43L)); + assertThat(config.getMaxObjectSize(), is(1024L)); + assertThat(config.getUnit(), is(MemoryUnit.KB)); + } + } + + @Test + public void testWithDefaultSizeOfSettings() { + CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) + .withSizeOfMaxObjectGraph(42L) + .withSizeOfMaxObjectSize(1024L, MemoryUnit.KB) + .withDefaultSizeOfSettings() + .build(); + + assertThat(cacheConfiguration.getServiceConfigurations(), not(hasItem(instanceOf(DefaultSizeOfEngineConfiguration.class)))); + } + + static class CustomResilience extends RobustResilienceStrategy { + + public CustomResilience(RecoveryStore store, String blah) { + super(store); + } + } + + static class IncompatibleServiceConfig implements ServiceConfiguration { + + @Override + public Class getServiceType() { + return Service.class; + } + + @Override + public IncompatibleServiceConfig derive() throws UnsupportedOperationException { + return this; + } + + @Override + public ServiceConfiguration build(IncompatibleServiceConfig representation) throws UnsupportedOperationException { + return representation; + } + } + + static class CompatibleServiceConfig implements ServiceConfiguration { + @Override + public CompatibleServiceConfig derive() throws UnsupportedOperationException { + return this; + } + + @Override + public ServiceConfiguration build(CompatibleServiceConfig representation) throws UnsupportedOperationException { + return representation; + } + + @Override + public Class getServiceType() { + return Service.class; + } + + @Override + public boolean compatibleWith(ServiceConfiguration other) { + return true; + } + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java b/ehcache-impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java new file mode 100644 index 0000000000..86ef50c253 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java @@ -0,0 +1,111 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.config.builders; + +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; +import org.ehcache.impl.copy.IdentityCopier; +import org.ehcache.impl.copy.SerializingCopier; +import org.ehcache.impl.serialization.CompactJavaSerializer; +import org.ehcache.impl.serialization.JavaSerializer; +import org.ehcache.spi.serialization.Serializer; +import org.junit.Test; + +import java.util.concurrent.atomic.AtomicInteger; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.mockito.Mockito.mock; + +public class CacheManagerBuilderTest { + + @Test + public void testIsExtensible() { + + AtomicInteger counter = new AtomicInteger(0); + + @SuppressWarnings("unchecked") + CacheManagerConfiguration managerConfiguration = other -> { + counter.getAndIncrement(); + return mock(CacheManagerBuilder.class); + }; + + PersistentCacheManager cacheManager = newCacheManagerBuilder().with(managerConfiguration).build(true); + + assertThat(cacheManager).isNull(); + assertThat(counter.get()).isEqualTo(1); + } + + @Test + public void testCanOverrideCopierInConfig() { + @SuppressWarnings("unchecked") + CacheManagerBuilder managerBuilder = newCacheManagerBuilder() + .withCopier(Long.class, (Class) IdentityCopier.class); + assertThat(managerBuilder.withCopier(Long.class, SerializingCopier.asCopierClass())).isNotNull(); + } + + @Test + public void testCanOverrideSerializerConfig() { + @SuppressWarnings("unchecked") + Class> serializer1 = (Class) JavaSerializer.class; + CacheManagerBuilder managerBuilder = newCacheManagerBuilder() + .withSerializer(String.class, serializer1); + @SuppressWarnings("unchecked") + Class> serializer2 = (Class) CompactJavaSerializer.class; + assertThat(managerBuilder.withSerializer(String.class, serializer2)).isNotNull(); + } + + @Test + public void testDuplicateServiceCreationConfigurationOk() { + DefaultCopyProviderConfiguration configOne = new DefaultCopyProviderConfiguration(); + DefaultCopyProviderConfiguration configTwo = new DefaultCopyProviderConfiguration(); + CacheManagerBuilder builder = newCacheManagerBuilder() + .using(configOne) + .using(configTwo); + + assertThat(builder.build().getRuntimeConfiguration().getServiceCreationConfigurations()).contains(configTwo).doesNotContain(configOne); + } + + @Test @SuppressWarnings("deprecation") + public void testDuplicateServiceCreationConfigurationOkWhenExplicit() { + CacheManagerBuilder builder = newCacheManagerBuilder().using(new DefaultCopyProviderConfiguration()) + .replacing(new DefaultCopyProviderConfiguration()); + + assertThat(builder.build()).isNotNull(); + } + + @Test + public void testShouldNotBeAllowedToRegisterTwoCachesWithSameAlias() { + String cacheAlias = "alias"; + + CacheConfiguration configOne = CacheConfigurationBuilder + .newCacheConfigurationBuilder(Integer.class, String.class, ResourcePoolsBuilder.heap(10)) + .build(); + + CacheConfiguration configTwo = CacheConfigurationBuilder + .newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) + .build(); + + CacheManager build = newCacheManagerBuilder() + .withCache(cacheAlias, configOne) + .withCache(cacheAlias, configTwo).build(); + + assertThat(build.getRuntimeConfiguration().getCacheConfigurations().get(cacheAlias).getKeyType()).isEqualTo(Long.class); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/config/builders/ExpiryPolicyBuilderTest.java b/ehcache-impl/src/test/java/org/ehcache/config/builders/ExpiryPolicyBuilderTest.java new file mode 100644 index 0000000000..0858c15ef0 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/config/builders/ExpiryPolicyBuilderTest.java @@ -0,0 +1,105 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.config.builders; + +import org.ehcache.expiry.ExpiryPolicy; +import org.junit.Test; + +import java.time.Duration; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; + +/** + * ExpiryPolicyBuilderTest + */ +public class ExpiryPolicyBuilderTest { + + @Test + public void testNoExpiration() { + ExpiryPolicy expiry = ExpiryPolicyBuilder.noExpiration(); + assertThat(expiry, sameInstance(ExpiryPolicy.NO_EXPIRY)); + assertThat(expiry.getExpiryForCreation(this, this), equalTo(ExpiryPolicy.INFINITE)); + assertThat(expiry.getExpiryForAccess(this, () -> this), nullValue()); + assertThat(expiry.getExpiryForUpdate(this, () -> this, this), nullValue()); + } + + @Test + public void testTTIExpiration() { + java.time.Duration duration = java.time.Duration.ofSeconds(1L); + ExpiryPolicy expiry = ExpiryPolicyBuilder.timeToIdleExpiration(duration); + assertThat(expiry.getExpiryForCreation(this, this), equalTo(duration)); + assertThat(expiry.getExpiryForAccess(this, () -> this), equalTo(duration)); + assertThat(expiry.getExpiryForUpdate(this, () -> this, this), equalTo(duration)); + + ExpiryPolicy otherExpiry = ExpiryPolicyBuilder.timeToIdleExpiration(java.time.Duration.ofSeconds(1L)); + assertThat(otherExpiry, equalTo(expiry)); + } + + @Test + public void testTTLExpiration() { + java.time.Duration duration = java.time.Duration.ofSeconds(1L); + ExpiryPolicy expiry = ExpiryPolicyBuilder.timeToLiveExpiration(duration); + assertThat(expiry.getExpiryForCreation(this, this), equalTo(duration)); + assertThat(expiry.getExpiryForAccess(this, () -> this), nullValue()); + assertThat(expiry.getExpiryForUpdate(this, () -> this, this), equalTo(duration)); + + ExpiryPolicy otherExpiry = ExpiryPolicyBuilder.timeToLiveExpiration(java.time.Duration.ofSeconds(1L)); + assertThat(otherExpiry, equalTo(expiry)); + } + + @Test + public void testExpiration() { + Duration creation = Duration.ofSeconds(1L); + Duration access = Duration.ofSeconds(2L); + Duration update = Duration.ofSeconds(3L); + ExpiryPolicy expiry = ExpiryPolicyBuilder.expiry().create(creation).access(access).update(update).build(); + assertThat(expiry.getExpiryForCreation(this, this), equalTo(creation)); + assertThat(expiry.getExpiryForAccess(this, () -> this), equalTo(access)); + assertThat(expiry.getExpiryForUpdate(this, () -> this,this), equalTo(update)); + } + + @Test + public void testExpirationFunctions() { + Duration creation = Duration.ofSeconds(1L); + Duration access = Duration.ofSeconds(2L); + Duration update = Duration.ofSeconds(3L); + ExpiryPolicy expiry = ExpiryPolicyBuilder.expiry() + .create((k, v) -> { + assertThat(k, equalTo(10L)); + assertThat(v, equalTo(20L)); + return creation; + }) + .access((k, v) -> { + assertThat(k, equalTo(10L)); + assertThat(v.get(), equalTo(20L)); + return access; + }) + .update((k, v1, v2) -> { + assertThat(k, equalTo(10L)); + assertThat(v1.get(), equalTo(20L)); + assertThat(v2, equalTo(30L)); + return update; + }) + .build(); + assertThat(expiry.getExpiryForCreation(10L, 20L), equalTo(creation)); + assertThat(expiry.getExpiryForAccess(10L, () -> 20L), equalTo(access)); + assertThat(expiry.getExpiryForUpdate(10L, () -> 20L,30L), equalTo(update)); + } +} diff --git a/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java b/ehcache-impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java similarity index 92% rename from impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java rename to ehcache-impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java index aa6326391e..a2ba2fa045 100644 --- a/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java @@ -23,7 +23,6 @@ import org.junit.Before; import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import java.io.File; @@ -33,10 +32,14 @@ import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; import static org.ehcache.impl.internal.util.FileExistenceMatchers.containsCacheDirectory; import static org.ehcache.impl.internal.util.FileExistenceMatchers.isLocked; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; -import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; +import static org.terracotta.utilities.io.Files.delete; /** * @author Alex Snaps @@ -45,9 +48,6 @@ public class PersistentCacheManagerTest { private static final String TEST_CACHE_ALIAS = "test123"; - @Rule - public ExpectedException thrown = ExpectedException.none(); - @Rule public final TemporaryFolder folder = new TemporaryFolder(); @@ -57,7 +57,7 @@ public class PersistentCacheManagerTest { @Before public void setup() throws IOException { rootDirectory = folder.newFolder("testInitializesDiskResourceService"); - assertTrue(rootDirectory.delete()); + delete(rootDirectory.toPath()); builder = newCacheManagerBuilder().with(new CacheManagerPersistenceConfiguration(rootDirectory)); } @@ -79,9 +79,8 @@ public void testInitializesLocalPersistenceServiceAndCreateCache() throws IOExce @Test public void testDestroyCache_NullAliasNotAllowed() throws CachePersistenceException { PersistentCacheManager manager = builder.build(true); - thrown.expect(NullPointerException.class); - thrown.expectMessage("Alias cannot be null"); - manager.destroyCache(null); + NullPointerException thrown = assertThrows(NullPointerException.class, () -> manager.destroyCache(null)); + assertThat(thrown, hasProperty("message", is("Alias cannot be null"))); } @Test diff --git a/impl/src/test/java/org/ehcache/config/builders/ResourcePoolsBuilderTest.java b/ehcache-impl/src/test/java/org/ehcache/config/builders/ResourcePoolsBuilderTest.java similarity index 95% rename from impl/src/test/java/org/ehcache/config/builders/ResourcePoolsBuilderTest.java rename to ehcache-impl/src/test/java/org/ehcache/config/builders/ResourcePoolsBuilderTest.java index 8dfcc69c7b..522676ff9c 100644 --- a/impl/src/test/java/org/ehcache/config/builders/ResourcePoolsBuilderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/config/builders/ResourcePoolsBuilderTest.java @@ -20,16 +20,17 @@ import org.ehcache.config.ResourceUnit; import org.ehcache.config.SizedResourcePool; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.config.SizedResourcePoolImpl; +import org.ehcache.impl.config.SizedResourcePoolImpl; import org.hamcrest.Matchers; import org.junit.Test; import static org.ehcache.config.ResourceType.Core.HEAP; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.*; +import static org.junit.Assert.fail; public class ResourcePoolsBuilderTest { diff --git a/impl/src/test/java/org/ehcache/config/builders/TieringTest.java b/ehcache-impl/src/test/java/org/ehcache/config/builders/TieringTest.java similarity index 100% rename from impl/src/test/java/org/ehcache/config/builders/TieringTest.java rename to ehcache-impl/src/test/java/org/ehcache/config/builders/TieringTest.java diff --git a/impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java b/ehcache-impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java similarity index 95% rename from impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java rename to ehcache-impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java index 2a326fc36a..b84c23242b 100644 --- a/impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java @@ -22,16 +22,14 @@ import org.ehcache.config.CacheRuntimeConfiguration; import org.ehcache.event.EventType; import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.impl.internal.spi.event.DefaultCacheEventListenerProviderTest; import org.junit.Test; +import java.time.Duration; import java.util.Iterator; import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; @@ -81,7 +79,7 @@ public void testInvalidListenerConfig() { @Test public void testTypedCacheWithExpirationPolicy() { try (UserManagedCache cache = UserManagedCacheBuilder.newUserManagedCacheBuilder(String.class, String.class) - .withExpiry(Expirations.timeToIdleExpiration(new Duration(30, TimeUnit.SECONDS))) + .withExpiry(ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofSeconds(30))) .build(true)) { assertThat(cache, notNullValue()); } diff --git a/impl/src/test/java/org/ehcache/config/builders/WriteBehindConfigurationBuilderTest.java b/ehcache-impl/src/test/java/org/ehcache/config/builders/WriteBehindConfigurationBuilderTest.java similarity index 98% rename from impl/src/test/java/org/ehcache/config/builders/WriteBehindConfigurationBuilderTest.java rename to ehcache-impl/src/test/java/org/ehcache/config/builders/WriteBehindConfigurationBuilderTest.java index 632177f00d..b7f93b975c 100644 --- a/impl/src/test/java/org/ehcache/config/builders/WriteBehindConfigurationBuilderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/config/builders/WriteBehindConfigurationBuilderTest.java @@ -19,9 +19,9 @@ import java.util.concurrent.TimeUnit; import static org.ehcache.config.builders.WriteBehindConfigurationBuilder.newBatchedWriteBehindConfiguration; import static org.ehcache.config.builders.WriteBehindConfigurationBuilder.newUnBatchedWriteBehindConfiguration; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNull.nullValue; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import org.junit.Test; diff --git a/impl/src/test/java/org/ehcache/core/events/CacheManagerListenerInteractionsTest.java b/ehcache-impl/src/test/java/org/ehcache/core/events/CacheManagerListenerInteractionsTest.java similarity index 98% rename from impl/src/test/java/org/ehcache/core/events/CacheManagerListenerInteractionsTest.java rename to ehcache-impl/src/test/java/org/ehcache/core/events/CacheManagerListenerInteractionsTest.java index bb67642410..6a51a5fe7c 100644 --- a/impl/src/test/java/org/ehcache/core/events/CacheManagerListenerInteractionsTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/core/events/CacheManagerListenerInteractionsTest.java @@ -27,8 +27,8 @@ import org.junit.Test; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isNotNull; diff --git a/impl/src/test/java/org/ehcache/core/events/CacheManagerListenerTest.java b/ehcache-impl/src/test/java/org/ehcache/core/events/CacheManagerListenerTest.java similarity index 96% rename from impl/src/test/java/org/ehcache/core/events/CacheManagerListenerTest.java rename to ehcache-impl/src/test/java/org/ehcache/core/events/CacheManagerListenerTest.java index 19dc820aff..9b09f4fcb6 100644 --- a/impl/src/test/java/org/ehcache/core/events/CacheManagerListenerTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/core/events/CacheManagerListenerTest.java @@ -26,7 +26,6 @@ import org.junit.Before; import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import static org.ehcache.config.units.MemoryUnit.MB; @@ -42,9 +41,6 @@ public class CacheManagerListenerTest { @Rule public TemporaryFolder folder = new TemporaryFolder(); - @Rule - public ExpectedException expectedException = ExpectedException.none(); - @Before public void before() { CacheConfigurationBuilder cacheConfiguration = diff --git a/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java b/ehcache-impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java similarity index 89% rename from impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java rename to ehcache-impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java index 339dce04f6..784b5132f4 100644 --- a/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java @@ -16,7 +16,7 @@ package org.ehcache.core.spi; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.heap.OnHeapStore; @@ -26,9 +26,10 @@ import org.hamcrest.core.IsCollectionContaining; import org.hamcrest.core.IsSame; import org.junit.Test; +import org.mockito.Answers; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; -import static org.junit.Assert.assertThat; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; +import static org.hamcrest.MatcherAssert.assertThat; import static org.mockito.Mockito.mock; /** @@ -49,6 +50,7 @@ public void testSupportsMultipleAuthoritativeTierProviders() throws Exception { dependencySet.with(authoritativeTierProvider); dependencySet.with(diskStoreProvider); dependencySet.with(mock(DiskResourceService.class)); + dependencySet.with(mock(CacheManagerProviderService.class, Answers.RETURNS_DEEP_STUBS)); ServiceLocator serviceLocator = dependencySet.build(); serviceLocator.startAllServices(); diff --git a/ehcache-impl/src/test/java/org/ehcache/docs/ConfigurationDerivation.java b/ehcache-impl/src/test/java/org/ehcache/docs/ConfigurationDerivation.java new file mode 100644 index 0000000000..1e74b8bcdd --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/docs/ConfigurationDerivation.java @@ -0,0 +1,227 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.docs; + +import org.ehcache.config.Configuration; +import org.ehcache.config.FluentConfigurationBuilder; +import org.ehcache.config.ResourceType; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.ConfigurationBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.resilience.ThrowingResilienceStrategy; +import org.ehcache.core.spi.service.ServiceUtils; +import org.ehcache.core.util.ClassLoading; +import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyConfiguration; +import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; +import org.ehcache.impl.serialization.PlainJavaSerializer; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.test.MockitoUtil; +import org.hamcrest.collection.IsEmptyCollection; +import org.hamcrest.collection.IsIterableContainingInAnyOrder; +import org.hamcrest.collection.IsMapContaining; +import org.hamcrest.core.Is; +import org.hamcrest.core.IsCollectionContaining; +import org.hamcrest.core.IsInstanceOf; +import org.hamcrest.core.IsNot; +import org.hamcrest.core.IsNull; +import org.hamcrest.core.IsSame; +import org.junit.Test; + +import java.io.File; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Date; + +import static org.hamcrest.MatcherAssert.assertThat; + +public class ConfigurationDerivation { + + @Test + public void identityTransform() { + Configuration configuration = ConfigurationBuilder.newConfigurationBuilder() + .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(10)))) + .build(); + + // tag::deriveContract[] + FluentConfigurationBuilder derivedBuilder = configuration.derive(); // <1> + Configuration configurationCopy = derivedBuilder.build(); // <2> + // end::deriveContract[] + } + + @Test + public void withCustomClassLoader() { + Configuration configuration = ConfigurationBuilder.newConfigurationBuilder() + .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10))) + .build(); + + ClassLoader classLoader = MockitoUtil.mock(ClassLoader.class); + + // tag::customClassLoader[] + Configuration withClassLoader = configuration.derive() + .withClassLoader(classLoader) + .build(); + // end::customClassLoader[] + + assertThat(configuration.getClassLoader(), Is.is(IsSame.sameInstance(ClassLoading.getDefaultClassLoader()))); + assertThat(withClassLoader.getClassLoader(), Is.is(IsSame.sameInstance(classLoader))); + } + + @Test + public void withCache() { + Configuration configuration = ConfigurationBuilder.newConfigurationBuilder().build(); + + //tag::withCache[] + Configuration withCache = configuration.derive() + .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder( + Long.class, String.class, ResourcePoolsBuilder.heap(10))) + .build(); + //end::withCache[] + + assertThat(configuration.getCacheConfigurations().keySet(), Is.is(IsEmptyCollection.empty())); + assertThat(withCache.getCacheConfigurations().keySet(), IsIterableContainingInAnyOrder.containsInAnyOrder("cache")); + } + + @Test + public void withoutCache() { + Configuration configuration = ConfigurationBuilder.newConfigurationBuilder() + .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10))) + .build(); + + //tag::withoutCache[] + Configuration withoutCache = configuration.derive() + .withoutCache("cache") + .build(); + //end::withoutCache[] + + assertThat(configuration.getCacheConfigurations().keySet(), IsIterableContainingInAnyOrder.containsInAnyOrder("cache")); + assertThat(withoutCache.getCacheConfigurations().keySet(), Is.is(IsEmptyCollection.empty())); + } + + @Test + public void updateCache() { + Configuration configuration = ConfigurationBuilder.newConfigurationBuilder() + .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10))) + .build(); + + //tag::updateCache[] + Configuration withOffHeap = configuration.derive() + .updateCache("cache", cache -> cache.updateResourcePools( + resources -> ResourcePoolsBuilder.newResourcePoolsBuilder(resources) + .offheap(100, MemoryUnit.MB) + .build())) + .build(); + //end::updateCache[] + + assertThat(configuration.getCacheConfigurations().get("cache").getResourcePools().getResourceTypeSet(), IsIterableContainingInAnyOrder.containsInAnyOrder(ResourceType.Core.HEAP)); + assertThat(withOffHeap.getCacheConfigurations().get("cache").getResourcePools().getResourceTypeSet(), IsIterableContainingInAnyOrder.containsInAnyOrder(ResourceType.Core.HEAP, ResourceType.Core.OFFHEAP)); + } + + @Test + public void withServiceCreation() { + Configuration configuration = ConfigurationBuilder.newConfigurationBuilder() + .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10))) + .build(); + + //tag::withServiceCreation[] + Configuration withBoundedThreads = configuration.derive() + .withService(new PooledExecutionServiceConfiguration() + .addDefaultPool("default", 1, 16)) + .build(); + //end::withServiceCreation[] + + assertThat(configuration.getServiceCreationConfigurations(), IsNot.not(IsCollectionContaining.hasItem(IsInstanceOf.instanceOf(PooledExecutionServiceConfiguration.class)))); + PooledExecutionServiceConfiguration serviceCreationConfiguration = ServiceUtils.findSingletonAmongst(PooledExecutionServiceConfiguration.class, withBoundedThreads.getServiceCreationConfigurations()); + assertThat(serviceCreationConfiguration.getDefaultPoolAlias(), Is.is("default")); + assertThat(serviceCreationConfiguration.getPoolConfigurations().keySet(), IsIterableContainingInAnyOrder.containsInAnyOrder("default")); + PooledExecutionServiceConfiguration.PoolConfiguration pool = serviceCreationConfiguration.getPoolConfigurations().get("default"); + assertThat(pool.minSize(), Is.is(1)); + assertThat(pool.maxSize(), Is.is(16)); + } + + @Test + public void updateServiceCreation() { + @SuppressWarnings("unchecked") + Configuration configuration = ConfigurationBuilder.newConfigurationBuilder() + .withService(new DefaultPersistenceConfiguration(new File("temp"))) + .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10))) + .build(); + + //tag::updateServiceCreation[] + Configuration withUpdatedPersistence = configuration.derive() + .updateServices(DefaultPersistenceConfiguration.class, + existing -> new File("/var/persistence/path")) + .build(); + //end::updateServiceCreation[] + + DefaultPersistenceConfiguration initialPersistenceConfiguration = ServiceUtils.findSingletonAmongst(DefaultPersistenceConfiguration.class, configuration.getServiceCreationConfigurations()); + assertThat(initialPersistenceConfiguration.getRootDirectory(), Is.is(new File("temp"))); + + DefaultPersistenceConfiguration revisedPersistenceConfiguration = ServiceUtils.findSingletonAmongst(DefaultPersistenceConfiguration.class, withUpdatedPersistence.getServiceCreationConfigurations()); + assertThat(revisedPersistenceConfiguration.getRootDirectory(), Is.is(new File("/var/persistence/path"))); + } + + @Test + public void withService() { + Configuration configuration = ConfigurationBuilder.newConfigurationBuilder() + .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10))) + .build(); + + //tag::withService[] + Configuration withThrowingStrategy = configuration.derive() + .updateCache("cache", existing -> existing.withService( + new DefaultResilienceStrategyConfiguration(new ThrowingResilienceStrategy<>()) + )) + .build(); + //end::withService[] + + + assertThat(configuration.getServiceCreationConfigurations(), IsNot.not(IsCollectionContaining.hasItem( + IsInstanceOf.instanceOf(DefaultResilienceStrategyConfiguration.class)))); + + DefaultResilienceStrategyConfiguration resilienceStrategyConfiguration = + ServiceUtils.findSingletonAmongst(DefaultResilienceStrategyConfiguration.class, + withThrowingStrategy.getCacheConfigurations().get("cache").getServiceConfigurations()); + assertThat(resilienceStrategyConfiguration.getInstance(), IsInstanceOf.instanceOf(ThrowingResilienceStrategy.class)); + } + + public static final class OptimizedDateSerializer implements Serializer { + + public OptimizedDateSerializer(ClassLoader classLoader) {} + + @Override + public ByteBuffer serialize(Date object) throws SerializerException { + ByteBuffer buffer = ByteBuffer.allocate(8); + return (ByteBuffer) buffer.putLong(object.getTime()).flip(); + } + + @Override + public Date read(ByteBuffer binary) throws ClassNotFoundException, SerializerException { + return new Date(binary.getLong()); + } + + @Override + public boolean equals(Date object, ByteBuffer binary) throws ClassNotFoundException, SerializerException { + return binary.getLong() == object.getTime(); + } + } +} diff --git a/impl/src/test/java/org/ehcache/docs/Ehcache3.java b/ehcache-impl/src/test/java/org/ehcache/docs/Ehcache3.java similarity index 83% rename from impl/src/test/java/org/ehcache/docs/Ehcache3.java rename to ehcache-impl/src/test/java/org/ehcache/docs/Ehcache3.java index 18e2ede059..fe4d6bf46b 100644 --- a/impl/src/test/java/org/ehcache/docs/Ehcache3.java +++ b/ehcache-impl/src/test/java/org/ehcache/docs/Ehcache3.java @@ -17,18 +17,16 @@ import org.ehcache.Cache; import org.ehcache.CacheManager; -import org.ehcache.ValueSupplier; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expiry; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.TimeSourceConfiguration; import org.ehcache.internal.TestTimeSource; import org.junit.Test; -import java.util.concurrent.TimeUnit; - +import java.time.Duration; +import java.util.function.Supplier; public class Ehcache3 { @@ -41,19 +39,19 @@ public void ehcache3Expiry() throws Exception { CacheConfigurationBuilder configuration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder .heap(100)) - .withExpiry(new Expiry() { // <1> + .withExpiry(new ExpiryPolicy() { // <1> @Override public Duration getExpiryForCreation(Long key, String value) { return getTimeToLiveDuration(key, value); // <2> } @Override - public Duration getExpiryForAccess(Long key, ValueSupplier value) { + public Duration getExpiryForAccess(Long key, Supplier value) { return null; // Keeping the existing expiry } @Override - public Duration getExpiryForUpdate(Long key, ValueSupplier oldValue, String newValue) { + public Duration getExpiryForUpdate(Long key, Supplier oldValue, String newValue) { return null; // Keeping the existing expiry } }); @@ -80,11 +78,11 @@ public Duration getExpiryForUpdate(Long key, ValueSupplier old private Duration getTimeToLiveDuration(Long key, String value) { // Returns TTL of 10 seconds for keys less than 1000 if (key < 1000) { - return Duration.of(2, TimeUnit.SECONDS); + return Duration.ofSeconds(2); } // Otherwise return 5 seconds TTL - return Duration.of(1, TimeUnit.SECONDS); + return Duration.ofSeconds(5); } @@ -98,7 +96,7 @@ private CacheManager initCacheManager() { .build(true); } - private void sleep(int millisecondsToSleep) throws Exception { + private void sleep(int millisecondsToSleep) { timeSource.advanceTime(millisecondsToSleep); } } diff --git a/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java b/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java new file mode 100644 index 0000000000..f3429094e9 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/docs/GettingStarted.java @@ -0,0 +1,383 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.docs; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.builders.WriteBehindConfigurationBuilder; +import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.docs.plugs.ListenerObject; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.serialization.JavaSerializer; +import org.ehcache.docs.plugs.OddKeysEvictionAdvisor; +import org.ehcache.docs.plugs.SampleLoaderWriter; +import org.ehcache.event.EventFiring; +import org.ehcache.event.EventOrdering; +import org.ehcache.event.EventType; +import org.ehcache.impl.copy.ReadWriteCopier; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.File; +import java.io.IOException; +import java.io.Serializable; +import java.time.Duration; +import java.util.EnumSet; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +import static java.util.Collections.singletonMap; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +/** + * Samples to get started with Ehcache 3 + * + * If you add new examples, you should use tags to have them included in the README.adoc + * You need to edit the README.adoc too to add your new content. + * The callouts are also used in docs/user/index.adoc + */ +@SuppressWarnings("unused") +public class GettingStarted { + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + @Test + public void cachemanagerExample() { + // tag::cachemanagerExample[] + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() // <1> + .withCache("preConfigured", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10))) // <2> + .build(); // <3> + cacheManager.init(); // <4> + + Cache preConfigured = + cacheManager.getCache("preConfigured", Long.class, String.class); // <5> + + Cache myCache = cacheManager.createCache("myCache", // <6> + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10))); + + myCache.put(1L, "da one!"); // <7> + String value = myCache.get(1L); // <8> + + cacheManager.removeCache("preConfigured"); // <9> + + cacheManager.close(); // <10> + // end::cachemanagerExample[] + } + + @Test + public void threeTiersCacheManager() throws Exception { + // tag::threeTiersCacheManager[] + PersistentCacheManager persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .with(CacheManagerBuilder.persistence(new File(getStoragePath(), "myData"))) // <1> + .withCache("threeTieredCache", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) // <2> + .offheap(1, MemoryUnit.MB) // <3> + .disk(20, MemoryUnit.MB, true) // <4> + ) + ).build(true); + + Cache threeTieredCache = persistentCacheManager.getCache("threeTieredCache", Long.class, String.class); + threeTieredCache.put(1L, "stillAvailableAfterRestart"); // <5> + + persistentCacheManager.close(); + // end::threeTiersCacheManager[] + } + + @Test + public void testCacheEventListener() { + // tag::cacheEventListener[] + CacheEventListenerConfigurationBuilder cacheEventListenerConfiguration = CacheEventListenerConfigurationBuilder + .newEventListenerConfiguration(new ListenerObject(), EventType.CREATED, EventType.UPDATED) // <1> + .unordered().asynchronous(); // <2> + + final CacheManager manager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("foo", + CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, ResourcePoolsBuilder.heap(10)) + .withService(cacheEventListenerConfiguration) // <3> + ).build(true); + + final Cache cache = manager.getCache("foo", String.class, String.class); + cache.put("Hello", "World"); // <4> + cache.put("Hello", "Everyone"); // <5> + cache.remove("Hello"); // <6> + // end::cacheEventListener[] + + manager.close(); + } + + @Test + public void writeThroughCache() { + // tag::writeThroughCache[] + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().build(true); + + Cache writeThroughCache = cacheManager.createCache("writeThroughCache", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) + .withLoaderWriter(new SampleLoaderWriter<>(singletonMap(41L, "zero"))) // <1> + .build()); + + assertThat(writeThroughCache.get(41L), is("zero")); // <2> + writeThroughCache.put(42L, "one"); // <3> + assertThat(writeThroughCache.get(42L), equalTo("one")); + + cacheManager.close(); + // end::writeThroughCache[] + } + + @Test + public void writeBehindCache() { + // tag::writeBehindCache[] + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().build(true); + + Cache writeBehindCache = cacheManager.createCache("writeBehindCache", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) + .withLoaderWriter(new SampleLoaderWriter<>(singletonMap(41L, "zero"))) // <1> + .withService(WriteBehindConfigurationBuilder // <2> + .newBatchedWriteBehindConfiguration(1, TimeUnit.SECONDS, 3)// <3> + .queueSize(3)// <4> + .concurrencyLevel(1) // <5> + .enableCoalescing()) // <6> + .build()); + + assertThat(writeBehindCache.get(41L), is("zero")); + writeBehindCache.put(42L, "one"); + writeBehindCache.put(43L, "two"); + writeBehindCache.put(42L, "This goes for the record"); + assertThat(writeBehindCache.get(42L), equalTo("This goes for the record")); + + cacheManager.close(); + // end::writeBehindCache[] + } + + @Test + public void registerListenerAtRuntime() { + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.heap(10L))) + .build(true); + + Cache cache = cacheManager.getCache("cache", Long.class, String.class); + + // tag::registerListenerAtRuntime[] + ListenerObject listener = new ListenerObject(); // <1> + cache.getRuntimeConfiguration().registerCacheEventListener(listener, EventOrdering.ORDERED, + EventFiring.ASYNCHRONOUS, EnumSet.of(EventType.CREATED, EventType.REMOVED)); // <2> + + cache.put(1L, "one"); + cache.put(2L, "two"); + cache.remove(1L); + cache.remove(2L); + + cache.getRuntimeConfiguration().deregisterCacheEventListener(listener); // <3> + + cache.put(1L, "one again"); + cache.remove(1L); + // end::registerListenerAtRuntime[] + + cacheManager.close(); + } + + @Test + public void configuringEventProcessing() { + CacheEventListenerConfigurationBuilder cacheEventListenerConfiguration = CacheEventListenerConfigurationBuilder + .newEventListenerConfiguration(ListenerObject.class, EventType.EVICTED).ordered().synchronous(); + // tag::configuringEventProcessingQueues[] + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.heap(5L)) + .withDispatcherConcurrency(10) // <1> + .withEventListenersThreadPool("listeners-pool") + .build(); + // end::configuringEventProcessingQueues[] + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().withCache("cache", cacheConfiguration) + .build(true); + cacheManager.close(); + } + + @Test + public void cacheEvictionAdvisor() throws Exception { + // tag::cacheEvictionAdvisor[] + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.heap(2L)) // <1> + .withEvictionAdvisor(new OddKeysEvictionAdvisor<>()) // <2> + .build(); + + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("cache", cacheConfiguration) + .build(true); + + Cache cache = cacheManager.getCache("cache", Long.class, String.class); + + // Work with the cache + cache.put(42L, "The Answer!"); + cache.put(41L, "The wrong Answer!"); + cache.put(39L, "The other wrong Answer!"); + + cacheManager.close(); + // end::cacheEvictionAdvisor[] + } + + @Test + public void expiry() throws Exception { + // tag::expiry[] + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.heap(100)) // <1> + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(20))) // <2> + .build(); + // end::expiry[] + } + + @Test + public void customExpiry() throws Exception { + // tag::customExpiry[] + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.heap(100)) + .withExpiry(new CustomExpiry()) // <1> + .build(); + // end::customExpiry[] + } + + private static class Description { + int id; + String alias; + + Description(Description other) { + this.id = other.id; + this.alias = other.alias; + } + + Description(int id, String alias) { + this.id = id; + this.alias = alias; + } + + @Override + public boolean equals(final Object other) { + if(this == other) return true; + if(other == null || this.getClass() != other.getClass()) return false; + + Description that = (Description)other; + if(id != that.id) return false; + if ((alias == null) ? (alias != null) : !alias.equals(that.alias)) return false; + return true; + } + + @Override + public int hashCode() { + int result = 1; + result = 31 * result + id; + result = 31 * result + (alias == null ? 0 : alias.hashCode()); + return result; + } + } + + private static class Person implements Serializable { + + private static final long serialVersionUID = 1L; + + String name; + int age; + + Person(Person other) { + this.name = other.name; + this.age = other.age; + } + + Person(String name, int age) { + this.name = name; + this.age = age; + } + + @Override + public boolean equals(final Object other) { + if(this == other) return true; + if(other == null || this.getClass() != other.getClass()) return false; + + Person that = (Person)other; + if(age != that.age) return false; + if((name == null) ? (that.name != null) : !name.equals(that.name)) return false; + + return true; + } + + @Override + public int hashCode() { + int result = 1; + result = 31 * result + age; + result = 31 * result + (name == null ? 0 : name.hashCode()); + return result; + } + } + + public static class DescriptionCopier extends ReadWriteCopier { + + @Override + public Description copy(final Description obj) { + return new Description(obj); + } + } + + public static class PersonCopier extends ReadWriteCopier { + + @Override + public Person copy(final Person obj) { + return new Person(obj); + } + } + + static class PersonSerializer extends JavaSerializer { + public PersonSerializer() { + super(ClassLoader.getSystemClassLoader()); + } + } + + private String getStoragePath() throws IOException { + return diskPath.newFolder().getAbsolutePath(); + } + + public static class CustomExpiry implements ExpiryPolicy { + + @Override + public Duration getExpiryForCreation(Long key, String value) { + throw new UnsupportedOperationException("TODO Implement me!"); + } + + @Override + public Duration getExpiryForAccess(Long key, Supplier value) { + throw new UnsupportedOperationException("TODO Implement me!"); + } + + @Override + public Duration getExpiryForUpdate(Long key, Supplier oldValue, String newValue) { + throw new UnsupportedOperationException("TODO Implement me!"); + } + } + +} diff --git a/impl/src/test/java/org/ehcache/docs/ThreadPools.java b/ehcache-impl/src/test/java/org/ehcache/docs/ThreadPools.java similarity index 92% rename from impl/src/test/java/org/ehcache/docs/ThreadPools.java rename to ehcache-impl/src/test/java/org/ehcache/docs/ThreadPools.java index 2bf60e7acc..9a8e07a2ae 100644 --- a/impl/src/test/java/org/ehcache/docs/ThreadPools.java +++ b/ehcache-impl/src/test/java/org/ehcache/docs/ThreadPools.java @@ -30,10 +30,12 @@ import org.ehcache.event.EventType; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.ehcache.config.builders.PooledExecutionServiceConfigurationBuilder; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import java.io.File; -import java.net.URISyntaxException; +import java.io.IOException; import java.util.concurrent.TimeUnit; import static java.util.Collections.singletonMap; @@ -44,6 +46,9 @@ @SuppressWarnings("unused") public class ThreadPools { + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + @Test public void diskStore() throws Exception { // tag::diskStore[] @@ -93,7 +98,7 @@ public void writeBehind() throws Exception { CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES)) .withLoaderWriter(new SampleLoaderWriter<>(singletonMap(41L, "zero"))) - .add(WriteBehindConfigurationBuilder + .withService(WriteBehindConfigurationBuilder .newBatchedWriteBehindConfiguration(1, TimeUnit.SECONDS, 3) .queueSize(3) .concurrencyLevel(1))) @@ -101,7 +106,7 @@ public void writeBehind() throws Exception { CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES)) .withLoaderWriter(new SampleLoaderWriter<>(singletonMap(41L, "zero"))) - .add(WriteBehindConfigurationBuilder + .withService(WriteBehindConfigurationBuilder .newBatchedWriteBehindConfiguration(1, TimeUnit.SECONDS, 3) .useThreadPool("cache2Pool") // <3> .queueSize(3) @@ -130,12 +135,12 @@ public void events() throws Exception { .withCache("cache1", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES)) - .add(CacheEventListenerConfigurationBuilder + .withService(CacheEventListenerConfigurationBuilder .newEventListenerConfiguration(new ListenerObject(), EventType.CREATED, EventType.UPDATED))) .withCache("cache2", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES)) - .add(CacheEventListenerConfigurationBuilder + .withService(CacheEventListenerConfigurationBuilder .newEventListenerConfiguration(new ListenerObject(), EventType.CREATED, EventType.UPDATED)) .withEventListenersThreadPool("cache2Pool")) // <3> .build(true); @@ -149,8 +154,8 @@ public void events() throws Exception { // end::events[] } - private String getStoragePath() throws URISyntaxException { - return getClass().getClassLoader().getResource(".").toURI().getPath(); + private String getStoragePath() throws IOException { + return diskPath.newFolder().getAbsolutePath(); } } diff --git a/ehcache-impl/src/test/java/org/ehcache/docs/Tiering.java b/ehcache-impl/src/test/java/org/ehcache/docs/Tiering.java new file mode 100644 index 0000000000..6d1cb8d3e9 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/docs/Tiering.java @@ -0,0 +1,212 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.docs; + +import java.io.File; +import java.io.IOException; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.ResourceType; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.docs.plugs.ListenerObject; +import org.ehcache.event.EventType; +import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +/** + * Tiering + */ +public class Tiering { + + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + @Test + public void tierSizing() { + // tag::heap[] + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES); // <1> + // or + ResourcePoolsBuilder.heap(10); // <2> + // or + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, MemoryUnit.MB); // <3> + // end::heap[] + // tag::offheap[] + ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB); // <1> + // end::offheap[] + } + + @Test + public void testSingleTier() { + // tag::offheapOnly[] + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, // <1> + ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(2, MemoryUnit.GB)).build(); // <2> + // end::offheapOnly[] + } + + @Test + public void threeTiersCacheManager() throws Exception { + // tag::threeTiersCacheManager[] + PersistentCacheManager persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .with(CacheManagerBuilder.persistence(new File(getStoragePath(), "myData"))) + .withCache("threeTieredCache", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .disk(20, MemoryUnit.MB, true) + ) + ).build(true); + // end::threeTiersCacheManager[] + + persistentCacheManager.close(); + } + + @Test + public void persistentCacheManager() throws Exception { + // tag::persistentCacheManager[] + PersistentCacheManager persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() // <1> + .with(CacheManagerBuilder.persistence(new File(getStoragePath(), "myData"))) // <2> + .withCache("persistent-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB, true)) // <3> + ) + .build(true); + + persistentCacheManager.close(); + // end::persistentCacheManager[] + } + + @Test + public void diskSegments() throws Exception { + // tag::diskSegments[] + String storagePath = getStoragePath(); + PersistentCacheManager persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .with(CacheManagerBuilder.persistence(new File(storagePath, "myData"))) + .withCache("less-segments", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB)) + .withService(new OffHeapDiskStoreConfiguration(2)) // <1> + ) + .build(true); + + persistentCacheManager.close(); + // end::diskSegments[] + } + + @Test + public void updateResourcesAtRuntime() throws InterruptedException { + ListenerObject listener = new ListenerObject(); + CacheEventListenerConfigurationBuilder cacheEventListenerConfiguration = CacheEventListenerConfigurationBuilder + .newEventListenerConfiguration(listener, EventType.EVICTED).unordered().synchronous(); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10L, EntryUnit.ENTRIES)) + .withService(cacheEventListenerConfiguration) + .build(); + + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().withCache("cache", cacheConfiguration) + .build(true); + + Cache cache = cacheManager.getCache("cache", Long.class, String.class); + for(long i = 0; i < 20; i++ ){ + cache.put(i, "Hello World"); + } + assertThat(listener.evicted(), is(10)); + + cache.clear(); + listener.resetEvictionCount(); + + // tag::updateResourcesAtRuntime[] + ResourcePools pools = ResourcePoolsBuilder.newResourcePoolsBuilder().heap(20L, EntryUnit.ENTRIES).build(); // <1> + cache.getRuntimeConfiguration().updateResourcePools(pools); // <2> + assertThat(cache.getRuntimeConfiguration().getResourcePools() + .getPoolForResource(ResourceType.Core.HEAP).getSize(), is(20L)); + // end::updateResourcesAtRuntime[] + + for(long i = 0; i < 20; i++ ){ + cache.put(i, "Hello World"); + } + assertThat(listener.evicted(), is(0)); + + cacheManager.close(); + } + + @Test + public void testPersistentDiskTier() throws Exception { + // tag::diskPersistent[] + CacheManagerBuilder.newCacheManagerBuilder() + .with(CacheManagerBuilder.persistence(getStoragePath())) // <1> + .withCache("myCache", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().disk(1, MemoryUnit.GB, true))); // <2> + // end::diskPersistent[]f + } + + @Test + public void testNotShared() { + // tag::notShared[] + ResourcePools pool = ResourcePoolsBuilder.heap(10).build(); + + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("test-cache1", CacheConfigurationBuilder.newCacheConfigurationBuilder(Integer.class, String.class, pool)) + .withCache("test-cache2", CacheConfigurationBuilder.newCacheConfigurationBuilder(Integer.class, String.class, pool)) + .build(true); + // end::notShared[] + } + + @Test + public void byteSizedTieredCache() { + // tag::byteSizedTieredCache[] + CacheConfiguration usesConfiguredInCacheConfig = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, MemoryUnit.KB) // <1> + .offheap(10, MemoryUnit.MB)) // <2> + .withSizeOfMaxObjectGraph(1000) + .withSizeOfMaxObjectSize(1000, MemoryUnit.B) // <3> + .build(); + + CacheConfiguration usesDefaultSizeOfEngineConfig = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, MemoryUnit.KB)) + .build(); + + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withDefaultSizeOfMaxObjectSize(500, MemoryUnit.B) + .withDefaultSizeOfMaxObjectGraph(2000) // <4> + .withCache("usesConfiguredInCache", usesConfiguredInCacheConfig) + .withCache("usesDefaultSizeOfEngine", usesDefaultSizeOfEngineConfig) + .build(true); + // end::byteSizedTieredCache[] + } + + private String getStoragePath() throws IOException { + return folder.newFolder().getAbsolutePath(); + } +} diff --git a/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java b/ehcache-impl/src/test/java/org/ehcache/docs/UserManagedCaches.java similarity index 92% rename from impl/src/test/java/org/ehcache/docs/UserManagedCaches.java rename to ehcache-impl/src/test/java/org/ehcache/docs/UserManagedCaches.java index fcf188eb0e..07be4a80f3 100644 --- a/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java +++ b/ehcache-impl/src/test/java/org/ehcache/docs/UserManagedCaches.java @@ -29,20 +29,25 @@ import org.ehcache.impl.persistence.DefaultLocalPersistenceService; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.impl.config.persistence.UserManagedPersistenceContext; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import java.io.File; -import java.net.URISyntaxException; +import java.io.IOException; import java.util.concurrent.Executors; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * UserManagedCaches */ public class UserManagedCaches { + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + @Test public void userManagedCacheExample() { // tag::userManagedCacheExample[] @@ -101,8 +106,8 @@ public void userManagedListenerCache() throws Exception { // end::userManagedListenerCache[] } - private String getStoragePath() throws URISyntaxException { - return getClass().getClassLoader().getResource(".").toURI().getPath(); + private String getStoragePath() throws IOException { + return diskPath.newFolder().getAbsolutePath(); } } diff --git a/impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java b/ehcache-impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java similarity index 89% rename from impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java rename to ehcache-impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java index 87b088be32..f5417b2422 100644 --- a/impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java +++ b/ehcache-impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java @@ -15,7 +15,7 @@ */ package org.ehcache.docs.plugs; -import org.ehcache.core.EhcacheWithLoaderWriter; +import org.ehcache.core.Ehcache; import org.ehcache.event.CacheEvent; import org.ehcache.event.CacheEventListener; import org.ehcache.event.EventType; @@ -29,7 +29,7 @@ public class ListenerObject implements CacheEventListener { private int evicted; @Override public void onEvent(CacheEvent event) { - Logger logger = LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "GettingStarted"); + Logger logger = LoggerFactory.getLogger(Ehcache.class + "-" + "GettingStarted"); logger.info(event.getType().toString()); if(event.getType() == EventType.EVICTED){ evicted++; diff --git a/impl/src/test/java/org/ehcache/docs/plugs/LongCopier.java b/ehcache-impl/src/test/java/org/ehcache/docs/plugs/LongCopier.java similarity index 100% rename from impl/src/test/java/org/ehcache/docs/plugs/LongCopier.java rename to ehcache-impl/src/test/java/org/ehcache/docs/plugs/LongCopier.java diff --git a/impl/src/test/java/org/ehcache/docs/plugs/OddKeysEvictionAdvisor.java b/ehcache-impl/src/test/java/org/ehcache/docs/plugs/OddKeysEvictionAdvisor.java similarity index 100% rename from impl/src/test/java/org/ehcache/docs/plugs/OddKeysEvictionAdvisor.java rename to ehcache-impl/src/test/java/org/ehcache/docs/plugs/OddKeysEvictionAdvisor.java diff --git a/impl/src/test/java/org/ehcache/docs/plugs/SampleLoaderWriter.java b/ehcache-impl/src/test/java/org/ehcache/docs/plugs/SampleLoaderWriter.java similarity index 87% rename from impl/src/test/java/org/ehcache/docs/plugs/SampleLoaderWriter.java rename to ehcache-impl/src/test/java/org/ehcache/docs/plugs/SampleLoaderWriter.java index e99d9e3a4c..bf69f64002 100644 --- a/impl/src/test/java/org/ehcache/docs/plugs/SampleLoaderWriter.java +++ b/ehcache-impl/src/test/java/org/ehcache/docs/plugs/SampleLoaderWriter.java @@ -44,7 +44,7 @@ public SampleLoaderWriter(Map initialData) { } @Override - public V load(K key) throws Exception { + public V load(K key) { lock.readLock().lock(); try { V value = data.get(key); @@ -56,12 +56,12 @@ public V load(K key) throws Exception { } @Override - public Map loadAll(Iterable keys) throws Exception { + public Map loadAll(Iterable keys) { throw new UnsupportedOperationException("Implement me!"); } @Override - public void write(K key, V value) throws Exception { + public void write(K key, V value) { lock.writeLock().lock(); try { data.put(key, value); @@ -72,7 +72,7 @@ public void write(K key, V value) throws Exception { } @Override - public void writeAll(Iterable> entries) throws BulkCacheWritingException, Exception { + public void writeAll(Iterable> entries) { lock.writeLock().lock(); try { for (Map.Entry entry : entries) { @@ -85,7 +85,7 @@ public void writeAll(Iterable> ent } @Override - public void delete(K key) throws Exception { + public void delete(K key) { lock.writeLock().lock(); try { data.remove(key); @@ -96,7 +96,7 @@ public void delete(K key) throws Exception { } @Override - public void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception { + public void deleteAll(Iterable keys) { lock.writeLock().lock(); try { for (K key : keys) { diff --git a/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java b/ehcache-impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java similarity index 100% rename from impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java rename to ehcache-impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/BaseCacheConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/BaseCacheConfigurationTest.java new file mode 100644 index 0000000000..f1d5ab30e7 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/BaseCacheConfigurationTest.java @@ -0,0 +1,56 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config; + +import org.ehcache.config.ResourcePools; +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.startsWith; +import static org.junit.Assert.assertThrows; +import static org.mockito.Mockito.mock; + +/** + * BaseCacheConfigurationTest + */ +public class BaseCacheConfigurationTest { + + @Test + public void testThrowsWithNullKeyType() { + NullPointerException thrown = assertThrows(NullPointerException.class, () -> new BaseCacheConfiguration<>(null, String.class, null, + null, null, mock(ResourcePools.class))); + assertThat(thrown, hasProperty("message", startsWith("keyType"))); + } + + @Test + public void testThrowsWithNullValueType() { + NullPointerException thrown = assertThrows(NullPointerException.class, () -> + new BaseCacheConfiguration<>(Long.class, null, null, + null, null, mock(ResourcePools.class))); + assertThat(thrown, hasProperty("message", startsWith("valueType"))); + } + + @Test + public void testThrowsWithNullResourcePools() { + NullPointerException thrown = assertThrows(NullPointerException.class, () -> + new BaseCacheConfiguration<>(Long.class, String.class, null, + null, null, null)); + assertThat(thrown, hasProperty("message", startsWith("resourcePools"))); + } + +} diff --git a/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/ResourcePoolsImplTest.java similarity index 83% rename from core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/config/ResourcePoolsImplTest.java index 4c7ca74a7e..d8a1793c6e 100644 --- a/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/ResourcePoolsImplTest.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.ehcache.core.config; +package org.ehcache.impl.config; import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourcePools; @@ -25,19 +25,20 @@ import org.hamcrest.Matchers; import org.junit.Test; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; -import java.util.List; +import static java.util.Arrays.asList; +import static java.util.Arrays.stream; +import static java.util.function.Function.identity; +import static java.util.stream.Collectors.toMap; import static org.ehcache.config.ResourceType.Core.HEAP; import static org.ehcache.config.ResourceType.Core.OFFHEAP; import static org.ehcache.config.units.EntryUnit.ENTRIES; import static org.ehcache.config.units.MemoryUnit.KB; import static org.ehcache.config.units.MemoryUnit.MB; -import static org.ehcache.core.config.ResourcePoolsImpl.validateResourcePools; +import static org.ehcache.impl.config.ResourcePoolsImpl.validateResourcePools; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; /** @@ -238,10 +239,8 @@ public void testMemoryResourceUnequalUnitInversion() { @Test public void testAddingNewTierWhileUpdating() { - ResourcePools existing = new ResourcePoolsImpl(Collections., ResourcePool>singletonMap( - ResourceType.Core.HEAP, new SizedResourcePoolImpl<>(ResourceType.Core.HEAP, 10L, EntryUnit.ENTRIES, false))); - ResourcePools toBeUpdated = new ResourcePoolsImpl(Collections., ResourcePool>singletonMap( - ResourceType.Core.DISK, new SizedResourcePoolImpl<>(ResourceType.Core.DISK, 10L, MemoryUnit.MB, false))); + ResourcePools existing = resources(new SizedResourcePoolImpl<>(ResourceType.Core.HEAP, 10L, EntryUnit.ENTRIES, false)); + ResourcePools toBeUpdated = resources(new SizedResourcePoolImpl<>(ResourceType.Core.DISK, 10L, MemoryUnit.MB, false)); try { existing.validateAndMerge(toBeUpdated); fail(); @@ -252,8 +251,8 @@ public void testAddingNewTierWhileUpdating() { @Test public void testUpdatingOffHeap() { - ResourcePools existing = ResourcePoolsHelper.createOffheapOnlyPools(10); - ResourcePools toBeUpdated = ResourcePoolsHelper.createOffheapOnlyPools(50); + ResourcePools existing = resources(new SizedResourcePoolImpl<>(ResourceType.Core.OFFHEAP, 10L, MemoryUnit.MB, false)); + ResourcePools toBeUpdated = resources(new SizedResourcePoolImpl<>(ResourceType.Core.OFFHEAP, 50L, MemoryUnit.MB, false)); try { existing.validateAndMerge(toBeUpdated); fail(); @@ -264,8 +263,8 @@ public void testUpdatingOffHeap() { @Test public void testUpdatingDisk() { - ResourcePools existing = ResourcePoolsHelper.createDiskOnlyPools(10, MB); - ResourcePools toBeUpdated = ResourcePoolsHelper.createDiskOnlyPools(50, MB); + ResourcePools existing = resources(new SizedResourcePoolImpl<>(ResourceType.Core.DISK, 10L, MemoryUnit.MB, false)); + ResourcePools toBeUpdated = resources(new SizedResourcePoolImpl<>(ResourceType.Core.DISK, 50L, MemoryUnit.MB, false)); try { existing.validateAndMerge(toBeUpdated); fail(); @@ -276,8 +275,13 @@ public void testUpdatingDisk() { @Test public void testUpdateResourceUnitSuccess() { - ResourcePools existing = ResourcePoolsHelper.createHeapDiskPools(200, MB, 4096); - ResourcePools toBeUpdated = ResourcePoolsHelper.createHeapOnlyPools(2, MemoryUnit.GB); + ResourcePools existing = resources( + new SizedResourcePoolImpl<>(ResourceType.Core.HEAP, 200L, MemoryUnit.MB, false), + new SizedResourcePoolImpl<>(ResourceType.Core.DISK, 4096L, MemoryUnit.MB, false) + ); + ResourcePools toBeUpdated = resources( + new SizedResourcePoolImpl<>(ResourceType.Core.HEAP, 2, MemoryUnit.GB, false) + ); existing = existing.validateAndMerge(toBeUpdated); assertThat(existing.getPoolForResource(ResourceType.Core.HEAP).getSize(), Matchers.is(2L)); @@ -286,8 +290,13 @@ public void testUpdateResourceUnitSuccess() { @Test public void testUpdateResourceUnitFailure() { - ResourcePools existing = ResourcePoolsHelper.createHeapDiskPools(20, MB, 200); - ResourcePools toBeUpdated = ResourcePoolsHelper.createHeapOnlyPools(500, EntryUnit.ENTRIES); + ResourcePools existing = resources( + new SizedResourcePoolImpl<>(ResourceType.Core.HEAP, 20L, MemoryUnit.MB, false), + new SizedResourcePoolImpl<>(ResourceType.Core.DISK, 200, MemoryUnit.MB, false) + ); + ResourcePools toBeUpdated = resources( + new SizedResourcePoolImpl<>(ResourceType.Core.HEAP, 500, EntryUnit.ENTRIES, false) + ); try { existing = existing.validateAndMerge(toBeUpdated); @@ -299,10 +308,7 @@ public void testUpdateResourceUnitFailure() { assertThat(existing.getPoolForResource(ResourceType.Core.HEAP).getUnit(), Matchers.is(MemoryUnit.MB)); } - private Collection asList(T value1, T value2) { - @SuppressWarnings("unchecked") - List list = Arrays.asList(value1, value2); - return list; + private static ResourcePoolsImpl resources(ResourcePool ... resources) { + return new ResourcePoolsImpl(stream(resources).collect(toMap(ResourcePool::getType, identity()))); } - } diff --git a/core/src/test/java/org/ehcache/core/config/SizedResourcePoolImplTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/SizedResourcePoolImplTest.java similarity index 93% rename from core/src/test/java/org/ehcache/core/config/SizedResourcePoolImplTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/config/SizedResourcePoolImplTest.java index 433effec13..dd15bbd519 100644 --- a/core/src/test/java/org/ehcache/core/config/SizedResourcePoolImplTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/SizedResourcePoolImplTest.java @@ -14,10 +14,9 @@ * limitations under the License. */ -package org.ehcache.core.config; +package org.ehcache.impl.config; import org.ehcache.config.ResourceType; -import org.ehcache.config.SizedResourcePool; import org.ehcache.config.units.EntryUnit; import org.junit.Test; diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfigurationTest.java new file mode 100644 index 0000000000..d8a31ad627 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfigurationTest.java @@ -0,0 +1,39 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.copy; + +import org.ehcache.impl.copy.IdentityCopier; +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class DefaultCopyProviderConfigurationTest { + + @Test @SuppressWarnings({"unchecked", "rawtypes"}) + public void testDerivedConfigurationDetachesCorrectly() { + DefaultCopyProviderConfiguration configuration = new DefaultCopyProviderConfiguration(); + configuration.addCopierFor(String.class, (Class) IdentityCopier.class); + + DefaultCopyProviderConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getDefaults().get(String.class).getClazz(), sameInstance(IdentityCopier.class)); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/event/CacheEventDispatcherFactoryConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/event/CacheEventDispatcherFactoryConfigurationTest.java new file mode 100644 index 0000000000..3386e7d447 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/event/CacheEventDispatcherFactoryConfigurationTest.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.event; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class CacheEventDispatcherFactoryConfigurationTest { + + @Test + public void testDeriveDetachesCorrectly() { + CacheEventDispatcherFactoryConfiguration configuration = new CacheEventDispatcherFactoryConfiguration("foobar"); + CacheEventDispatcherFactoryConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getThreadPoolAlias(), is(configuration.getThreadPoolAlias())); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventDispatcherConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventDispatcherConfigurationTest.java new file mode 100644 index 0000000000..71bec44e2b --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventDispatcherConfigurationTest.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.event; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class DefaultCacheEventDispatcherConfigurationTest { + + @Test + public void testDeriveDetachesProperly() { + DefaultCacheEventDispatcherConfiguration configuration = new DefaultCacheEventDispatcherConfiguration("foobar"); + DefaultCacheEventDispatcherConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getThreadPoolAlias(), is(configuration.getThreadPoolAlias())); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfigurationTest.java similarity index 100% rename from impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfigurationTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfigurationTest.java diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/event/DefaultEventSourceConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/event/DefaultEventSourceConfigurationTest.java new file mode 100644 index 0000000000..0774c54281 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/event/DefaultEventSourceConfigurationTest.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.event; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class DefaultEventSourceConfigurationTest { + + @Test + public void testDeriveDetachesProperly() { + DefaultEventSourceConfiguration configuration = new DefaultEventSourceConfiguration(42); + DefaultEventSourceConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getDispatcherConcurrency(), is(configuration.getDispatcherConcurrency())); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/executor/PooledExecutionServiceConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/executor/PooledExecutionServiceConfigurationTest.java new file mode 100644 index 0000000000..a0a7396526 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/executor/PooledExecutionServiceConfigurationTest.java @@ -0,0 +1,37 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.executor; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class PooledExecutionServiceConfigurationTest { + + @Test + public void testDeriveDetachesCorrectly() { + PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); + configuration.addPool("foo", 1, 2); + + PooledExecutionServiceConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterConfigurationTest.java new file mode 100644 index 0000000000..02b0641b88 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterConfigurationTest.java @@ -0,0 +1,39 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.loaderwriter; + +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.junit.Test; + +import static org.ehcache.test.MockitoUtil.mock; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class DefaultCacheLoaderWriterConfigurationTest { + + @Test + public void testDeriveDetachesCorrectly() { + CacheLoaderWriter mock = mock(CacheLoaderWriter.class); + DefaultCacheLoaderWriterConfiguration configuration = new DefaultCacheLoaderWriterConfiguration(mock); + DefaultCacheLoaderWriterConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getInstance(), sameInstance(configuration.getInstance())); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterProviderConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterProviderConfigurationTest.java new file mode 100644 index 0000000000..a55d57444d --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterProviderConfigurationTest.java @@ -0,0 +1,41 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.loaderwriter; + +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.junit.Test; + +import static org.ehcache.test.MockitoUtil.mock; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class DefaultCacheLoaderWriterProviderConfigurationTest { + + @Test @SuppressWarnings("unchecked") + public void testDeriveDetachesCorrectly() { + DefaultCacheLoaderWriterProviderConfiguration configuration = new DefaultCacheLoaderWriterProviderConfiguration(); + configuration.addLoaderFor("foo", (Class>) mock(CacheLoaderWriter.class).getClass()); + + DefaultCacheLoaderWriterProviderConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getDefaults(), is(not(sameInstance(configuration.getDefaults())))); + assertThat(derived.getDefaults(), is(configuration.getDefaults())); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/loaderwriter/writebehind/WriteBehindProviderConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/loaderwriter/writebehind/WriteBehindProviderConfigurationTest.java new file mode 100644 index 0000000000..df541d270d --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/loaderwriter/writebehind/WriteBehindProviderConfigurationTest.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.loaderwriter.writebehind; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class WriteBehindProviderConfigurationTest { + + @Test + public void testDeriveDetachesCorrectly() { + WriteBehindProviderConfiguration configuration = new WriteBehindProviderConfiguration("foobar"); + WriteBehindProviderConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getThreadPoolAlias(), is(configuration.getThreadPoolAlias())); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/persistence/DefaultPersistenceConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/persistence/DefaultPersistenceConfigurationTest.java new file mode 100644 index 0000000000..83a4fc3787 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/persistence/DefaultPersistenceConfigurationTest.java @@ -0,0 +1,38 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.persistence; + +import org.junit.Test; + +import java.io.File; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class DefaultPersistenceConfigurationTest { + + @Test + public void testDeriveDetachesCorrectly() { + DefaultPersistenceConfiguration configuration = new DefaultPersistenceConfiguration(new File("foo")); + DefaultPersistenceConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getRootDirectory(), is(configuration.getRootDirectory())); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/resilience/DefaultResilienceStrategyConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/resilience/DefaultResilienceStrategyConfigurationTest.java new file mode 100644 index 0000000000..044e8b7f09 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/resilience/DefaultResilienceStrategyConfigurationTest.java @@ -0,0 +1,110 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.config.resilience; + +import org.ehcache.impl.internal.resilience.RobustResilienceStrategy; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.RecoveryStore; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.junit.Test; + +import static org.ehcache.test.MockitoUtil.mock; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.collection.IsArrayContainingInOrder.arrayContaining; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsNull.nullValue; +import static org.hamcrest.core.IsSame.sameInstance; +import static org.junit.Assert.fail; + +public class DefaultResilienceStrategyConfigurationTest { + + @Test + public void testBindOnInstanceConfigurationReturnsSelf() { + DefaultResilienceStrategyConfiguration configuration = new DefaultResilienceStrategyConfiguration((ResilienceStrategy) mock(ResilienceStrategy.class)); + assertThat(configuration.bind(null), sameInstance(configuration)); + } + + @Test + public void testLoaderWriterBindOnInstanceConfigurationReturnsSelf() { + DefaultResilienceStrategyConfiguration configuration = new DefaultResilienceStrategyConfiguration((ResilienceStrategy) mock(ResilienceStrategy.class)); + assertThat(configuration.bind(null, null), sameInstance(configuration)); + } + + @Test + public void testBindOnRegularConfigurationAppendsParameters() { + Object foo = new Object(); + DefaultResilienceStrategyConfiguration configuration = new DefaultResilienceStrategyConfiguration(RobustResilienceStrategy.class, foo); + RecoveryStore recoveryStore = mock(RecoveryStore.class); + DefaultResilienceStrategyConfiguration bound = configuration.bind(recoveryStore); + + assertThat(bound.getArguments(), arrayContaining(foo, recoveryStore)); + assertThat(bound.getClazz(), sameInstance(RobustResilienceStrategy.class)); + assertThat(bound.getInstance(), nullValue()); + } + + @Test + public void testLoaderWriterBindOnInstanceConfigurationAppendsParameters() { + Object foo = new Object(); + DefaultResilienceStrategyConfiguration configuration = new DefaultResilienceStrategyConfiguration(RobustResilienceStrategy.class, foo); + RecoveryStore recoveryStore = mock(RecoveryStore.class); + CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); + DefaultResilienceStrategyConfiguration bound = configuration.bind(recoveryStore, loaderWriter); + + assertThat(bound.getArguments(), arrayContaining(foo, recoveryStore, loaderWriter)); + assertThat(bound.getClazz(), sameInstance(RobustResilienceStrategy.class)); + assertThat(bound.getInstance(), nullValue()); + } + + @Test + public void testAlreadyBoundConfigurationCannotBeBound() { + DefaultResilienceStrategyConfiguration configuration = new DefaultResilienceStrategyConfiguration(RobustResilienceStrategy.class); + RecoveryStore recoveryStore = mock(RecoveryStore.class); + CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); + DefaultResilienceStrategyConfiguration bound = configuration.bind(recoveryStore, loaderWriter); + + try { + bound.bind(recoveryStore); + fail("Expected IllegalStateException"); + } catch (IllegalStateException e) { + //expected + } + } + + @Test + public void testAlreadyBoundLoaderWriterConfigurationCannotBeBound() { + DefaultResilienceStrategyConfiguration configuration = new DefaultResilienceStrategyConfiguration(RobustResilienceStrategy.class); + RecoveryStore recoveryStore = mock(RecoveryStore.class); + DefaultResilienceStrategyConfiguration bound = configuration.bind(recoveryStore); + + try { + bound.bind(recoveryStore); + fail("Expected IllegalStateException"); + } catch (IllegalStateException e) { + //expected + } + } + + @Test + public void testDeriveDetachesCorrectly() { + ResilienceStrategy resilienceStrategy = mock(ResilienceStrategy.class); + DefaultResilienceStrategyConfiguration configuration = new DefaultResilienceStrategyConfiguration(resilienceStrategy); + DefaultResilienceStrategyConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getInstance(), sameInstance(configuration.getInstance())); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/resilience/DefaultResilienceStrategyProviderConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/resilience/DefaultResilienceStrategyProviderConfigurationTest.java new file mode 100644 index 0000000000..e1950302f1 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/resilience/DefaultResilienceStrategyProviderConfigurationTest.java @@ -0,0 +1,45 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.resilience; + +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.junit.Test; + +import static org.ehcache.test.MockitoUtil.mock; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class DefaultResilienceStrategyProviderConfigurationTest { + + @Test + public void testDeriveDetachesCorrectly() { + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + configuration.setDefaultResilienceStrategy(mock(ResilienceStrategy.class)); + configuration.setDefaultLoaderWriterResilienceStrategy(mock(ResilienceStrategy.class)); + configuration.addResilienceStrategyFor("foo", mock(ResilienceStrategy.class)); + + DefaultResilienceStrategyProviderConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getDefaultConfiguration(), is(configuration.getDefaultConfiguration())); + assertThat(derived.getDefaultLoaderWriterConfiguration(), is(configuration.getDefaultLoaderWriterConfiguration())); + assertThat(derived.getDefaults(), is(not(sameInstance(configuration.getDefaults())))); + assertThat(derived.getDefaults(), is(configuration.getDefaults())); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java new file mode 100644 index 0000000000..cb26efaf69 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java @@ -0,0 +1,205 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.serializer; + +import org.ehcache.impl.serialization.JavaSerializer; +import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.core.spi.service.FileBasedPersistenceContext; +import org.ehcache.spi.serialization.StatefulSerializer; +import org.junit.Test; + +import java.nio.ByteBuffer; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.startsWith; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertThrows; + +public class DefaultSerializationProviderConfigurationTest { + + @Test + public void testAddSerializerFor() throws Exception { + DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); + config.addSerializerFor(Long.class, MinimalSerializer.class); + + assertSame(MinimalSerializer.class, config.getDefaultSerializers().get(Long.class)); + } + + @Test + public void testAddSerializerForDuplicateThrows() throws Exception { + DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); + config.addSerializerFor(Long.class, MinimalSerializer.class); + IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> config.addSerializerFor(Long.class, MinimalSerializer.class)); + assertThat(thrown, hasProperty("message", startsWith("Duplicate serializer for class"))); + } + + @Test + public void testAddSerializerForConstructorless() throws Exception { + DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); + IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> config.addSerializerFor(Long.class, UnusableSerializer.class)); + assertThat(thrown, hasProperty("message", endsWith("does not have a constructor that takes in a ClassLoader."))); + } + + @Test + public void testAddSerializerForStatefulSerializer() throws Exception { + DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); + config.addSerializerFor(Long.class, MinimalStatefulSerializer.class); + assertSame(MinimalStatefulSerializer.class, config.getDefaultSerializers().get(Long.class)); + } + + @Test + public void testAddSerializerForStatefulConstructorless() throws Exception { + DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); + IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> config.addSerializerFor(Long.class, UnusableStatefulSerializer.class)); + assertThat(thrown, hasProperty("message", endsWith("does not have a constructor that takes in a ClassLoader."))); + } + + @Test + public void testAddSerializerForLegacySerializer() throws Exception { + DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); + IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> config.addSerializerFor(Long.class, LegacySerializer.class)); + assertThat(thrown, hasProperty("message", endsWith("does not have a constructor that takes in a ClassLoader."))); + } + + @Test @SuppressWarnings("unchecked") + public void testDeriveDetachesCorrectly() { + DefaultSerializationProviderConfiguration configuration = new DefaultSerializationProviderConfiguration(); + configuration.addSerializerFor(String.class, (Class) JavaSerializer.class); + + DefaultSerializationProviderConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getDefaultSerializers(), is(not(sameInstance(configuration.getDefaultSerializers())))); + assertThat(derived.getDefaultSerializers().get(String.class), sameInstance(JavaSerializer.class)); + } + + private static class MinimalSerializer implements Serializer { + + public MinimalSerializer(ClassLoader loader) { + } + + @Override + public ByteBuffer serialize(final Long object) throws SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + } + + private static class LegacySerializer implements Serializer { + + public LegacySerializer(ClassLoader loader, FileBasedPersistenceContext context) { + } + + @Override + public ByteBuffer serialize(final Long object) throws SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + } + + private static class UnusableSerializer implements Serializer { + + @Override + public ByteBuffer serialize(final Long object) throws SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + } + + private static class MinimalStatefulSerializer implements StatefulSerializer { + + public MinimalStatefulSerializer(ClassLoader loader) { + } + + @Override + public void init(final StateRepository stateRepository) { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public ByteBuffer serialize(final Long object) throws SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + } + + private static class UnusableStatefulSerializer implements StatefulSerializer { + + @Override + public void init(final StateRepository stateRepository) { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public ByteBuffer serialize(final Long object) throws SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + } +} diff --git a/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java similarity index 91% rename from impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java index 1acc4368b0..0b15b4d954 100644 --- a/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java @@ -42,8 +42,8 @@ import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * SerializerCountingTest @@ -80,8 +80,8 @@ public void tearDown() { public void testOnHeapPutGet() { Cache cache = cacheManager.createCache("onHeap", newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) + .withService(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)) + .withService(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .build()); cache.put(42L, "TheAnswer!"); @@ -122,8 +122,8 @@ public void testOffHeapPutGet() { public void testOffHeapOnHeapCopyPutGet() { Cache cache = cacheManager.createCache("offHeap", newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB)) - .add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) + .withService(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)) + .withService(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .build() ); @@ -146,8 +146,8 @@ public void testOffHeapOnHeapCopyPutGet() { public void testDiskOffHeapOnHeapCopyPutGet() { Cache cache = cacheManager.createCache("offHeap", newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder().heap(2, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB).disk(100, MemoryUnit.MB)) - .add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) + .withService(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)) + .withService(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .build() ); diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreConfigurationTest.java new file mode 100644 index 0000000000..c22d4c6132 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreConfigurationTest.java @@ -0,0 +1,38 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.store.disk; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class OffHeapDiskStoreConfigurationTest { + + @Test + public void testDeriveDetachesProperly() { + OffHeapDiskStoreConfiguration configuration = new OffHeapDiskStoreConfiguration("foobar", 16, 42); + OffHeapDiskStoreConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getThreadPoolAlias(), is(configuration.getThreadPoolAlias())); + assertThat(derived.getDiskSegments(), is(configuration.getDiskSegments())); + assertThat(derived.getWriterConcurrency(), is(configuration.getWriterConcurrency())); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreProviderConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreProviderConfigurationTest.java new file mode 100644 index 0000000000..aca89ecc60 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/config/store/disk/OffHeapDiskStoreProviderConfigurationTest.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.config.store.disk; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; + +public class OffHeapDiskStoreProviderConfigurationTest { + + @Test + public void testDeriveDetachesCorrectly() { + OffHeapDiskStoreProviderConfiguration configuration = new OffHeapDiskStoreProviderConfiguration("foobar"); + OffHeapDiskStoreProviderConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getThreadPoolAlias(), is(configuration.getThreadPoolAlias())); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/events/CacheEventDispatcherImplTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/events/CacheEventDispatcherImplTest.java similarity index 100% rename from impl/src/test/java/org/ehcache/impl/events/CacheEventDispatcherImplTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/events/CacheEventDispatcherImplTest.java diff --git a/impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java similarity index 86% rename from impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java index 7a3d6d7a49..e22c643ef0 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java @@ -19,14 +19,12 @@ import org.ehcache.core.spi.time.SystemTimeSource; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; -import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.core.spi.ServiceLocator; import org.junit.Test; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; import static org.hamcrest.CoreMatchers.sameInstance; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; import static org.mockito.Mockito.mock; /** diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/TimeSourceConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/TimeSourceConfigurationTest.java new file mode 100644 index 0000000000..eec92b7ac9 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/TimeSourceConfigurationTest.java @@ -0,0 +1,38 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal; + +import org.ehcache.core.spi.time.TimeSource; +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; +import static org.mockito.Mockito.mock; + +public class TimeSourceConfigurationTest { + + @Test + public void testDeriveDetachesCorrectly() { + TimeSourceConfiguration configuration = new TimeSourceConfiguration(mock(TimeSource.class)); + TimeSourceConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getTimeSource(), sameInstance(configuration.getTimeSource())); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderConfigurationTest.java similarity index 89% rename from impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderConfigurationTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderConfigurationTest.java index 7983b50c60..001e4154f9 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderConfigurationTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderConfigurationTest.java @@ -30,7 +30,7 @@ public class ClassInstanceProviderConfigurationTest { @Test public void testOrdering() throws Exception { - ClassInstanceProviderConfiguration classInstanceProviderFactoryConfig = new ClassInstanceProviderConfiguration<>(); + ClassInstanceProviderConfiguration> classInstanceProviderFactoryConfig = new ClassInstanceProviderConfiguration<>(); for (int i = 0; i < 100; i++) { classInstanceProviderFactoryConfig.getDefaults().put("" + i, new ClassInstanceConfiguration(String.class)); diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java new file mode 100644 index 0000000000..5c467fcdde --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java @@ -0,0 +1,231 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.classes; + +import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.junit.Test; + +import java.io.Closeable; +import java.io.IOException; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; +import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +/** + * @author Ludovic Orban + */ +public class ClassInstanceProviderTest { + + @SuppressWarnings("unchecked") + private Class> configClass = (Class)ClassInstanceConfiguration.class; + + @Test + public void testNewInstanceUsingAliasAndNoArgs() throws Exception { + ClassInstanceProvider, TestService> classInstanceProvider = new ClassInstanceProvider<>(null, configClass); + + classInstanceProvider.preconfigured.put("test stuff", new ClassInstanceConfiguration(TestService.class)); + TestService obj = classInstanceProvider.newInstance("test stuff", (ServiceConfiguration) null); + + assertThat(obj.theString, is(nullValue())); + } + + @Test + public void testNewInstanceUsingAliasAndArg() throws Exception { + ClassInstanceProvider, TestService> classInstanceProvider = new ClassInstanceProvider<>(null, configClass); + + classInstanceProvider.preconfigured.put("test stuff", new ClassInstanceConfiguration<>(TestService.class, "test string")); + TestService obj = classInstanceProvider.newInstance("test stuff", (ServiceConfiguration) null); + + assertThat(obj.theString, equalTo("test string")); + } + + @Test + public void testNewInstanceUsingServiceConfig() throws Exception { + ClassInstanceProvider, TestService> classInstanceProvider = new ClassInstanceProvider<>(null, configClass); + + TestServiceConfiguration config = new TestServiceConfiguration(); + TestService obj = classInstanceProvider.newInstance("test stuff", config); + + assertThat(obj.theString, is(nullValue())); + } + + @Test + public void testNewInstanceUsingServiceConfigFactory() throws Exception { + TestServiceProviderConfiguration factoryConfig = new TestServiceProviderConfiguration(); + factoryConfig.getDefaults().put("test stuff", new ClassInstanceConfiguration(TestService.class)); + + ClassInstanceProvider, TestService> classInstanceProvider = new ClassInstanceProvider<>(factoryConfig, configClass); + classInstanceProvider.start(null); + + TestService obj = classInstanceProvider.newInstance("test stuff", (ServiceConfiguration) null); + assertThat(obj.theString, is(nullValue())); + } + + @Test(expected = IllegalArgumentException.class) + public void testReleaseInstanceByAnotherProvider() throws Exception { + ClassInstanceProvider, String> classInstanceProvider = new ClassInstanceProvider<>(null, null); + + classInstanceProvider.releaseInstance("foo"); + } + + @Test(expected = IllegalArgumentException.class) + public void testReleaseSameInstanceMultipleTimesThrows() throws Exception { + ClassInstanceProvider, String> classInstanceProvider = new ClassInstanceProvider<>(null, null); + classInstanceProvider.providedVsCount.put("foo", new AtomicInteger(1)); + + classInstanceProvider.releaseInstance("foo"); + classInstanceProvider.releaseInstance("foo"); + } + + @Test + public void testReleaseCloseableInstance() throws Exception { + ClassInstanceProvider, Closeable> classInstanceProvider = new ClassInstanceProvider<>(null, null); + Closeable closeable = mock(Closeable.class); + classInstanceProvider.providedVsCount.put(closeable, new AtomicInteger(1)); + classInstanceProvider.instantiated.add(closeable); + + classInstanceProvider.releaseInstance(closeable); + verify(closeable).close(); + } + + @Test(expected = IOException.class) + public void testReleaseCloseableInstanceThrows() throws Exception { + ClassInstanceProvider, Closeable> classInstanceProvider = new ClassInstanceProvider<>(null, null); + Closeable closeable = mock(Closeable.class); + doThrow(IOException.class).when(closeable).close(); + classInstanceProvider.providedVsCount.put(closeable, new AtomicInteger(1)); + classInstanceProvider.instantiated.add(closeable); + + classInstanceProvider.releaseInstance(closeable); + } + + @Test + public void testNewInstanceWithActualInstanceInServiceConfig() throws Exception { + ClassInstanceProvider, TestService> classInstanceProvider = new ClassInstanceProvider<>(null, configClass); + + TestService service = new TestService(); + TestServiceConfiguration config = new TestServiceConfiguration(service); + + TestService newService = classInstanceProvider.newInstance("test stuff", config); + + assertThat(newService, sameInstance(service)); + } + + @Test + public void testSameInstanceRetrievedMultipleTimesUpdatesTheProvidedCount() throws Exception { + ClassInstanceProvider, TestService> classInstanceProvider = new ClassInstanceProvider<>(null, configClass); + + TestService service = new TestService(); + TestServiceConfiguration config = new TestServiceConfiguration(service); + + TestService newService = classInstanceProvider.newInstance("test stuff", config); + assertThat(newService, sameInstance(service)); + assertThat(classInstanceProvider.providedVsCount.get(service).get(), is(1)); + newService = classInstanceProvider.newInstance("test stuff", config); + assertThat(newService, sameInstance(service)); + assertThat(classInstanceProvider.providedVsCount.get(service).get(), is(2)); + } + + @Test + public void testInstancesNotCreatedByProviderDoesNotClose() throws IOException { + @SuppressWarnings("unchecked") + Class> configClass = (Class) ClassInstanceConfiguration.class; + ClassInstanceProvider, TestCloseableService> classInstanceProvider = new ClassInstanceProvider<>(null, configClass); + + TestCloseableService service = mock(TestCloseableService.class); + TestCloaseableServiceConfig config = new TestCloaseableServiceConfig(service); + + TestCloseableService newService = classInstanceProvider.newInstance("testClose", config); + assertThat(newService, sameInstance(service)); + classInstanceProvider.releaseInstance(newService); + verify(service, times(0)).close(); + + } + + + public static abstract class TestCloseableService implements Service, Closeable { + + } + + public static class TestCloaseableServiceConfig extends ClassInstanceConfiguration implements ServiceConfiguration { + + public TestCloaseableServiceConfig() { + super(TestCloseableService.class); + } + + public TestCloaseableServiceConfig(TestCloseableService testCloseableService) { + super(testCloseableService); + } + + @Override + public Class getServiceType() { + return TestCloseableService.class; + } + } + + public static class TestService implements Service { + public final String theString; + + public TestService() { + this(null); + } + + public TestService(String theString) { + this.theString = theString; + } + + @Override + public void start(ServiceProvider serviceProvider) { + } + + @Override + public void stop() { + } + } + + public static class TestServiceConfiguration extends ClassInstanceConfiguration implements ServiceConfiguration { + public TestServiceConfiguration() { + super(TestService.class); + } + + public TestServiceConfiguration(TestService service) { + super(service); + } + + @Override + public Class getServiceType() { + return TestService.class; + } + } + + public static class TestServiceProviderConfiguration extends ClassInstanceProviderConfiguration> implements ServiceConfiguration { + @Override + public Class getServiceType() { + return TestService.class; + } + } + +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java similarity index 99% rename from impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java index ff3bdb1180..d5c93b27b4 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java @@ -25,10 +25,10 @@ import static org.ehcache.config.Eviction.noAdvice; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsNull.notNullValue; import static org.hamcrest.core.IsNull.nullValue; import static org.hamcrest.number.OrderingComparison.greaterThan; -import static org.junit.Assert.assertThat; /** * @author Alex Snaps diff --git a/impl/src/test/java/org/ehcache/impl/internal/concurrent/otherPackage/V8FeaturesTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/concurrent/otherPackage/V8FeaturesTest.java similarity index 99% rename from impl/src/test/java/org/ehcache/impl/internal/concurrent/otherPackage/V8FeaturesTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/concurrent/otherPackage/V8FeaturesTest.java index be4efb1f09..167e0b3189 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/concurrent/otherPackage/V8FeaturesTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/concurrent/otherPackage/V8FeaturesTest.java @@ -25,10 +25,10 @@ import java.util.concurrent.atomic.AtomicInteger; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; /** * @author Ludovic Orban diff --git a/impl/src/test/java/org/ehcache/impl/internal/copy/IdentityCopierTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/copy/IdentityCopierTest.java similarity index 100% rename from impl/src/test/java/org/ehcache/impl/internal/copy/IdentityCopierTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/copy/IdentityCopierTest.java diff --git a/impl/src/test/java/org/ehcache/impl/internal/copy/SerializingCopierTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/copy/SerializingCopierTest.java similarity index 100% rename from impl/src/test/java/org/ehcache/impl/internal/copy/SerializingCopierTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/copy/SerializingCopierTest.java diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java similarity index 95% rename from impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java index 2451651616..ee7193de05 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java @@ -28,8 +28,8 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -92,7 +92,7 @@ public void testCreateCacheEventReturnsDisabledDispatcherWhenThreadPoolFound() t cacheEventDispatcherFactory.start(serviceProvider); Store store = mock(Store.class); - CacheEventDispatcher dispatcher = cacheEventDispatcherFactory.createCacheEventDispatcher(store, new DefaultCacheEventDispatcherConfiguration("myAlias")); + CacheEventDispatcher dispatcher = cacheEventDispatcherFactory.createCacheEventDispatcher(store, new DefaultCacheEventDispatcherConfiguration("myAlias")); assertThat(dispatcher, instanceOf(CacheEventDispatcherImpl.class)); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java similarity index 76% rename from impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java index d078fda150..aa74ce4d45 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java @@ -18,7 +18,6 @@ import org.ehcache.core.spi.store.events.StoreEvent; import org.ehcache.event.EventType; -import org.ehcache.core.spi.store.events.StoreEventFilter; import org.ehcache.core.spi.store.events.StoreEventListener; import org.hamcrest.Matcher; import org.junit.Before; @@ -26,13 +25,13 @@ import org.mockito.InOrder; import java.util.HashSet; +import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; -import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; import static org.ehcache.impl.internal.store.offheap.AbstractOffHeapStoreTest.eventType; +import static org.ehcache.test.MockitoUtil.mock; import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.hamcrest.MockitoHamcrest.argThat; @@ -48,20 +47,19 @@ public class FudgingInvocationScopedEventSinkTest { private Matcher> evictedMatcher = eventType(EventType.EVICTED); @Before - @SuppressWarnings("unchecked") public void setUp() { - HashSet> storeEventListeners = new HashSet<>(); + Set> storeEventListeners = new HashSet<>(); listener = mock(StoreEventListener.class); storeEventListeners.add(listener); - eventSink = new FudgingInvocationScopedEventSink(new HashSet<>(), - false, new BlockingQueue[] { new ArrayBlockingQueue>(10) }, storeEventListeners); - + @SuppressWarnings({"unchecked", "rawtypes"}) + BlockingQueue>[] blockingQueues = new BlockingQueue[] { new ArrayBlockingQueue>(10) }; + eventSink = new FudgingInvocationScopedEventSink<>(new HashSet<>(), false, blockingQueues, storeEventListeners); } @Test public void testEvictedDifferentKeyNoImpact() { eventSink.created("k1", "v1"); - eventSink.evicted("k2", supplierOf("v2")); + eventSink.evicted("k2", () -> "v2"); eventSink.close(); InOrder inOrder = inOrder(listener); @@ -72,8 +70,8 @@ public void testEvictedDifferentKeyNoImpact() { @Test public void testEvictedSameKeyAfterUpdateReplacesWithEvictCreate() { - eventSink.updated("k1", supplierOf("v0"), "v1"); - eventSink.evicted("k1", supplierOf("v0")); + eventSink.updated("k1", () -> "v0", "v1"); + eventSink.evicted("k1", () -> "v0"); eventSink.close(); InOrder inOrder = inOrder(listener); @@ -84,9 +82,9 @@ public void testEvictedSameKeyAfterUpdateReplacesWithEvictCreate() { @Test public void testEvictedSameKeyAfterCreateFudgesExpiryToo() { - eventSink.expired("k1", supplierOf("v0")); + eventSink.expired("k1", () -> "v0"); eventSink.created("k1", "v1"); - eventSink.evicted("k1", supplierOf("v0")); + eventSink.evicted("k1", () -> "v0"); eventSink.close(); InOrder inOrder = inOrder(listener); @@ -97,10 +95,10 @@ public void testEvictedSameKeyAfterCreateFudgesExpiryToo() { @Test public void testEvictedSameKeyAfterUpdateReplacesWithEvictCreateEvenWithMultipleEvictsInBetween() { - eventSink.updated("k1", supplierOf("v0"), "v1"); - eventSink.evicted("k2", supplierOf("v2")); - eventSink.evicted("k3", supplierOf("v3")); - eventSink.evicted("k1", supplierOf("v0")); + eventSink.updated("k1", () -> "v0", "v1"); + eventSink.evicted("k2", () -> "v2"); + eventSink.evicted("k3", () -> "v3"); + eventSink.evicted("k1", () -> "v0"); eventSink.close(); InOrder inOrder = inOrder(listener); @@ -111,11 +109,11 @@ public void testEvictedSameKeyAfterUpdateReplacesWithEvictCreateEvenWithMultiple @Test public void testEvictedSameKeyAfterCreateFudgesExpiryTooEvenWithMultipleEvictsInBetween() { - eventSink.expired("k1", supplierOf("v0")); + eventSink.expired("k1", () -> "v0"); eventSink.created("k1", "v1"); - eventSink.evicted("k2", supplierOf("v2")); - eventSink.evicted("k3", supplierOf("v3")); - eventSink.evicted("k1", supplierOf("v0")); + eventSink.evicted("k2", () -> "v2"); + eventSink.evicted("k3", () -> "v3"); + eventSink.evicted("k1", () -> "v0"); eventSink.close(); InOrder inOrder = inOrder(listener); @@ -126,9 +124,9 @@ public void testEvictedSameKeyAfterCreateFudgesExpiryTooEvenWithMultipleEvictsIn @Test public void testEvictedKeyDoesNotFudgeOlderEvents() { - eventSink.updated("k1", supplierOf("v0"), "v1"); + eventSink.updated("k1", () -> "v0", "v1"); eventSink.created("k2", "v2"); - eventSink.evicted("k1", supplierOf("v0")); + eventSink.evicted("k1", () -> "v0"); eventSink.close(); InOrder inOrder = inOrder(listener); diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java new file mode 100644 index 0000000000..d2b93ef39c --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java @@ -0,0 +1,135 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.events; + +import org.ehcache.core.spi.store.events.StoreEvent; +import org.ehcache.core.spi.store.events.StoreEventFilter; +import org.ehcache.core.spi.store.events.StoreEventListener; +import org.ehcache.event.EventType; +import org.hamcrest.Matcher; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InOrder; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; + +import java.util.Collections; +import java.util.Set; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.IntStream; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.ehcache.impl.internal.store.offheap.AbstractOffHeapStoreTest.eventType; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.hamcrest.MockitoHamcrest.argThat; + +/** + * InvocationScopedEventSinkTest + */ +public class InvocationScopedEventSinkTest { + + @Rule + public MockitoRule rule = MockitoJUnit.rule(); + + @Mock + private StoreEventListener listener; + + private InvocationScopedEventSink eventSink; + private BlockingQueue> blockingQueue; + private Set> storeEventListeners; + + @Before + public void setUp() { + storeEventListeners = Collections.singleton(listener); + blockingQueue = new ArrayBlockingQueue<>(10); + } + + private InvocationScopedEventSink createEventSink(boolean ordered) { + @SuppressWarnings("unchecked") + BlockingQueue>[] queues = (BlockingQueue>[]) new BlockingQueue[] { blockingQueue }; + return new InvocationScopedEventSink<>(Collections.emptySet(), ordered, queues, storeEventListeners); + } + + @Test + public void testReset() { + eventSink = createEventSink(false); + + eventSink.created("k1", "v1"); + eventSink.evicted("k1", () -> "v2"); + eventSink.reset(); + eventSink.created("k1", "v1"); + eventSink.updated("k1", () -> "v1", "v2"); + eventSink.evicted("k1", () -> "v2"); + eventSink.close(); + + InOrder inOrder = inOrder(listener); + Matcher> createdMatcher = eventType(EventType.CREATED); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); + Matcher> updatedMatcher = eventType(EventType.UPDATED); + inOrder.verify(listener).onEvent(argThat(updatedMatcher)); + Matcher> evictedMatcher = eventType(EventType.EVICTED); + inOrder.verify(listener).onEvent(argThat(evictedMatcher)); + verifyNoMoreInteractions(listener); + } + + /** + * Make sure an interrupted sink sets the interrupted flag and keep both event queues in the state + * as of before the event that was interrupted. + * + * @throws InterruptedException + */ + @Test + public void testInterruption() throws InterruptedException { + eventSink = createEventSink(true); + + // Add enough elements to fill the queue + IntStream.range(0, 10).forEachOrdered(i -> eventSink.created("k" + i, "v" + i)); + + AtomicBoolean wasInterrupted = new AtomicBoolean(false); + + Thread t = new Thread(() -> { + // add one element that will block on the full queue + eventSink.created("k", "v"); + wasInterrupted.set(Thread.currentThread().isInterrupted()); + }); + + t.start(); + while(blockingQueue.remainingCapacity() != 0) { + System.out.println(blockingQueue.remainingCapacity()); + } + + t.interrupt(); + t.join(); + + assertThat(wasInterrupted).isTrue(); + assertThat(blockingQueue).hasSize(10); + IntStream.range(0, 10).forEachOrdered(i -> { + try { + assertThat(blockingQueue.take().getEvent().getKey()).isEqualTo("k" + i); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + assertThat(eventSink.getEvents()).hasSize(10); + assertThat(eventSink.getEvents().getLast().getEvent().getKey()).isEqualTo("k9"); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/TestStoreEventDispatcher.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/events/TestStoreEventDispatcher.java similarity index 82% rename from impl/src/test/java/org/ehcache/impl/internal/events/TestStoreEventDispatcher.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/events/TestStoreEventDispatcher.java index 578951a29a..51bc63a89e 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/events/TestStoreEventDispatcher.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/events/TestStoreEventDispatcher.java @@ -16,7 +16,6 @@ package org.ehcache.impl.internal.events; -import org.ehcache.ValueSupplier; import org.ehcache.event.EventType; import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.core.events.StoreEventSink; @@ -26,6 +25,7 @@ import java.util.HashSet; import java.util.Set; +import java.util.function.Supplier; /** * TestStoreEventDispatcher @@ -85,6 +85,11 @@ public void setEventOrdering(boolean ordering) { throw new UnsupportedOperationException("Test impl cannot be made ordered"); } + @Override + public void setSynchronous(boolean synchronous) throws IllegalArgumentException { + throw new UnsupportedOperationException("Test impl cannot be made synchronous"); + } + @Override public boolean isEventOrdering() { return false; @@ -92,9 +97,9 @@ public boolean isEventOrdering() { private class EventBridge implements StoreEventSink { @Override - public void evicted(K key, ValueSupplier value) { - if (accepted(EventType.EVICTED, key, value.value(), null)) { - StoreEvent event = StoreEvents.evictEvent(key, value.value()); + public void evicted(K key, Supplier value) { + if (accepted(EventType.EVICTED, key, value.get(), null)) { + StoreEvent event = StoreEvents.evictEvent(key, value.get()); for (StoreEventListener listener : listeners) { listener.onEvent(event); } @@ -102,9 +107,9 @@ public void evicted(K key, ValueSupplier value) { } @Override - public void expired(K key, ValueSupplier value) { - if (accepted(EventType.EXPIRED, key, value.value(), null)) { - StoreEvent event = StoreEvents.expireEvent(key, value.value()); + public void expired(K key, Supplier value) { + if (accepted(EventType.EXPIRED, key, value.get(), null)) { + StoreEvent event = StoreEvents.expireEvent(key, value.get()); for (StoreEventListener listener : listeners) { listener.onEvent(event); } @@ -122,9 +127,9 @@ public void created(K key, V value) { } @Override - public void updated(K key, ValueSupplier previousValue, V newValue) { - if (accepted(EventType.UPDATED, key, previousValue.value(), newValue)) { - StoreEvent event = StoreEvents.updateEvent(key, previousValue.value(), newValue); + public void updated(K key, Supplier previousValue, V newValue) { + if (accepted(EventType.UPDATED, key, previousValue.get(), newValue)) { + StoreEvent event = StoreEvents.updateEvent(key, previousValue.get(), newValue); for (StoreEventListener listener : listeners) { listener.onEvent(event); } @@ -132,9 +137,9 @@ public void updated(K key, ValueSupplier previousValue, V newValue) { } @Override - public void removed(K key, ValueSupplier removed) { - if (accepted(EventType.REMOVED, key, removed.value(), null)) { - StoreEvent event = StoreEvents.removeEvent(key, removed.value()); + public void removed(K key, Supplier removed) { + if (accepted(EventType.REMOVED, key, removed.get(), null)) { + StoreEvent event = StoreEvents.removeEvent(key, removed.get()); for (StoreEventListener listener : listeners) { listener.onEvent(event); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java similarity index 99% rename from impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java index 91005b0c87..2e93bafd50 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java @@ -33,10 +33,11 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MINUTES; import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; import static org.hamcrest.collection.IsEmptyCollection.empty; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; diff --git a/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedScheduledExecutorTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedScheduledExecutorTest.java similarity index 99% rename from impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedScheduledExecutorTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedScheduledExecutorTest.java index a736abddc0..fed0fc7cab 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedScheduledExecutorTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedScheduledExecutorTest.java @@ -35,10 +35,10 @@ import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.concurrent.TimeUnit.SECONDS; import static org.ehcache.impl.internal.executor.ExecutorUtil.waitFor; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; import static org.hamcrest.collection.IsEmptyCollection.empty; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; public class PartitionedScheduledExecutorTest { diff --git a/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedUnorderedExecutorTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedUnorderedExecutorTest.java similarity index 99% rename from impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedUnorderedExecutorTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedUnorderedExecutorTest.java index a1d15ad348..1f787f0684 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedUnorderedExecutorTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedUnorderedExecutorTest.java @@ -29,10 +29,10 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MINUTES; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; import static org.hamcrest.collection.IsEmptyCollection.empty; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/executor/PooledExecutionServiceTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/executor/PooledExecutionServiceTest.java new file mode 100644 index 0000000000..8d5dc1f00f --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/executor/PooledExecutionServiceTest.java @@ -0,0 +1,152 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.executor; + +import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration; +import org.ehcache.impl.internal.util.ThreadFactoryUtil; +import org.junit.After; +import org.junit.Test; + +import java.io.PrintWriter; +import java.io.StringWriter; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +/** + * @author Ludovic Orban + */ +public class PooledExecutionServiceTest { + + @Test + public void testEmptyConfigThrowsAtStart() throws Exception { + PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); + PooledExecutionService pooledExecutionService = new PooledExecutionService(configuration); + assertThatThrownBy(() -> pooledExecutionService.start(null)) + .isInstanceOf(IllegalStateException.class).hasMessage("Pool configuration is empty"); + } + + @Test + public void testGetOrderedExecutorFailsOnNonExistentPool() throws Exception { + PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); + configuration.addPool("getOrderedExecutorFailsOnNonExistentPool", 0, 1); + PooledExecutionService pooledExecutionService = new PooledExecutionService(configuration); + pooledExecutionService.start(null); + try { + assertThatThrownBy(() -> pooledExecutionService.getOrderedExecutor("abc", new LinkedBlockingDeque<>())) + .isInstanceOf(IllegalArgumentException.class).hasMessage("Pool 'abc' is not in the set of available pools [getOrderedExecutorFailsOnNonExistentPool]"); + } finally { + pooledExecutionService.stop(); + } + } + + @Test + public void testGetOrderedExecutorFailsOnNonExistentDefaultPool() throws Exception { + PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); + configuration.addPool("getOrderedExecutorFailsOnNonExistentDefaultPool", 0, 1); + PooledExecutionService pooledExecutionService = new PooledExecutionService(configuration); + pooledExecutionService.start(null); + try { + assertThatThrownBy(() -> pooledExecutionService.getOrderedExecutor(null, new LinkedBlockingDeque<>())) + .isInstanceOf(IllegalArgumentException.class).hasMessage("Null pool alias provided and no default pool configured"); + } finally { + pooledExecutionService.stop(); + } + } + + @Test + public void testGetOrderedExecutorSucceedsOnExistingPool() throws Exception { + PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); + configuration.addPool("getOrderedExecutorSucceedsOnExistingPool", 0, 1); + PooledExecutionService pooledExecutionService = new PooledExecutionService(configuration); + pooledExecutionService.start(null); + try { + pooledExecutionService.getOrderedExecutor("getOrderedExecutorSucceedsOnExistingPool", new LinkedBlockingDeque<>()).shutdown(); + } finally { + pooledExecutionService.stop(); + } + } + + @Test + public void testGetOrderedExecutorSucceedsOnExistingDefaultPool() throws Exception { + PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); + configuration.addDefaultPool("getOrderedExecutorSucceedsOnExistingDefaultPool", 0, 1); + PooledExecutionService pooledExecutionService = new PooledExecutionService(configuration); + pooledExecutionService.start(null); + try { + pooledExecutionService.getOrderedExecutor(null, new LinkedBlockingDeque<>()).shutdown(); + } finally { + pooledExecutionService.stop(); + } + } + + @Test + public void testAllThreadsAreStopped() throws Exception { + PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); + configuration.addDefaultPool("allThreadsAreStopped", 0, 1); + PooledExecutionService pooledExecutionService = new PooledExecutionService(configuration); + pooledExecutionService.start(null); + try { + final CountDownLatch latch = new CountDownLatch(1); + + pooledExecutionService.getScheduledExecutor("allThreadsAreStopped") + .execute(latch::countDown); + + assertThat(latch.await(30, TimeUnit.SECONDS)).isTrue(); + } finally { + pooledExecutionService.stop(); + } + + assertThat(Thread.currentThread().isInterrupted()).isFalse(); + + assertThat(pooledExecutionService.isStopped()).isTrue(); + } + + /** + * This method can be used to debug a failure in {@link #testAllThreadsAreStopped()} but also any other king of thread + * leaking. You can enable thread tracking in {@link ThreadFactoryUtil}. Note that on a slow machine, the detector might "lie". Because + * even if a thread pool is stopped, it doesn't mean all the underlying threads had the time to die. It only means that they are not + * processing any tasks anymore. + */ + public static void detectLeakingThreads() { + Set threadSet = Thread.getAllStackTraces().keySet(); + Set leakedThreads = new HashSet<>(); + + Map createdThreads = ThreadFactoryUtil.getCreatedThreads(); + + for(Thread thread : threadSet) { + if(thread.isAlive() && thread.getName().startsWith("Ehcache [")) { + int hash = System.identityHashCode(thread); + String stackTrace = null; + if(createdThreads != null) { + Exception exception = createdThreads.get(hash); + StringWriter errors = new StringWriter(); + exception.printStackTrace(new PrintWriter(errors)); + stackTrace = errors.toString(); + } + leakedThreads.add(thread + "(" + hash + ")" + stackTrace); + } + } + + assertThat(leakedThreads).isEmpty(); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java similarity index 87% rename from impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java index 46c3d733f5..b7d4096761 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java @@ -33,26 +33,22 @@ import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; import org.junit.Test; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.builders.WriteBehindConfigurationBuilder.newBatchedWriteBehindConfiguration; import static org.ehcache.config.builders.WriteBehindConfigurationBuilder.newUnBatchedWriteBehindConfiguration; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.collection.IsIterableContainingInOrder.contains; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyObject; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -64,7 +60,7 @@ */ public abstract class AbstractWriteBehindTestBase { - protected abstract CacheManagerBuilder managerBuilder(); + protected abstract CacheManagerBuilder managerBuilder(); protected abstract CacheConfigurationBuilder configurationBuilder(); @@ -75,7 +71,8 @@ public void testWriteOrdering() throws Exception { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { Cache testCache = cacheManager.createCache("testWriteOrdering", configurationBuilder() - .add(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 8).build()) + .withLoaderWriter(loaderWriter) + .withService(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 8).build()) .build()); CountDownLatch countDownLatch = new CountDownLatch(8); @@ -105,7 +102,8 @@ public void testWrites() throws Exception { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { Cache testCache = cacheManager.createCache("testWrites", CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, heap(10)) - .add(newUnBatchedWriteBehindConfiguration().concurrencyLevel(3).queueSize(10).build()) + .withLoaderWriter(loaderWriter) + .withService(newUnBatchedWriteBehindConfiguration().concurrencyLevel(3).queueSize(10).build()) .build()); CountDownLatch countDownLatch = new CountDownLatch(4); @@ -130,7 +128,8 @@ public void testBulkWrites() throws Exception { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { Cache testCache = cacheManager.createCache("testBulkWrites", CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, heap(100)) - .add(newUnBatchedWriteBehindConfiguration().concurrencyLevel(3).queueSize(10).build()) + .withLoaderWriter(loaderWriter) + .withService(newUnBatchedWriteBehindConfiguration().concurrencyLevel(3).queueSize(10).build()) .build()); CountDownLatch countDownLatch = new CountDownLatch(20); @@ -168,14 +167,15 @@ public void testBulkWrites() throws Exception { } @Test - public void testThatAllGetsReturnLatestData() throws BulkCacheWritingException, Exception { + public void testThatAllGetsReturnLatestData() throws Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter<>(); CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { Cache testCache = cacheManager.createCache("testThatAllGetsReturnLatestData", configurationBuilder() - .add(newUnBatchedWriteBehindConfiguration().concurrencyLevel(3).queueSize(10).build()) + .withLoaderWriter(loaderWriter) + .withService(newUnBatchedWriteBehindConfiguration().concurrencyLevel(3).queueSize(10).build()) .build()); for (int i = 0; i < 10; i++) { @@ -218,7 +218,8 @@ public void testAllGetsReturnLatestDataWithKeyCollision() { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { Cache testCache = cacheManager.createCache("testAllGetsReturnLatestDataWithKeyCollision", configurationBuilder() - .add(newUnBatchedWriteBehindConfiguration().concurrencyLevel(3).queueSize(10).build()) + .withLoaderWriter(loaderWriter) + .withService(newUnBatchedWriteBehindConfiguration().concurrencyLevel(3).queueSize(10).build()) .build()); Random random = new Random(); @@ -247,7 +248,8 @@ public void testBatchedDeletedKeyReturnsNull() throws Exception { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { Cache testCache = cacheManager.createCache("testBatchedDeletedKeyReturnsNull", configurationBuilder() - .add(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 2).build()) + .withLoaderWriter(loaderWriter) + .withService(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 2).build()) .build()); assertThat(testCache.get("key"), is("value")); @@ -260,7 +262,7 @@ public void testBatchedDeletedKeyReturnsNull() throws Exception { @Test public void testUnBatchedDeletedKeyReturnsNull() throws Exception { - final Semaphore semaphore = new Semaphore(0); + Semaphore semaphore = new Semaphore(0); @SuppressWarnings("unchecked") CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); @@ -274,7 +276,8 @@ public void testUnBatchedDeletedKeyReturnsNull() throws Exception { CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { Cache testCache = cacheManager.createCache("testUnBatchedDeletedKeyReturnsNull", configurationBuilder() - .add(newUnBatchedWriteBehindConfiguration().build()) + .withLoaderWriter(loaderWriter) + .withService(newUnBatchedWriteBehindConfiguration().build()) .build()); assertThat(testCache.get("key"), is("value")); @@ -297,7 +300,8 @@ public void testBatchedOverwrittenKeyReturnsNewValue() throws Exception { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { Cache testCache = cacheManager.createCache("testBatchedOverwrittenKeyReturnsNewValue", configurationBuilder() - .add(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 2).build()) + .withLoaderWriter(loaderWriter) + .withService(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 2).build()) .build()); assertThat(testCache.get("key"), is("value")); @@ -324,7 +328,8 @@ public void testUnBatchedOverwrittenKeyReturnsNewValue() throws Exception { CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { Cache testCache = cacheManager.createCache("testUnBatchedOverwrittenKeyReturnsNewValue", configurationBuilder() - .add(newUnBatchedWriteBehindConfiguration().build()) + .withLoaderWriter(loaderWriter) + .withService(newUnBatchedWriteBehindConfiguration().build()) .build()); assertThat(testCache.get("key"), is("value")); @@ -345,7 +350,8 @@ public void testCoaslecedWritesAreNotSeen() throws InterruptedException { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { Cache testCache = cacheManager.createCache("testCoaslecedWritesAreNotSeen", configurationBuilder() - .add(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 2).enableCoalescing().build()) + .withLoaderWriter(loaderWriter) + .withService(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 2).enableCoalescing().build()) .build()); CountDownLatch latch = new CountDownLatch(2); @@ -369,7 +375,8 @@ public void testUnBatchedWriteBehindStopWaitsForEmptyQueue() { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { Cache testCache = cacheManager.createCache("testUnBatchedWriteBehindStopWaitsForEmptyQueue", configurationBuilder() - .add(newUnBatchedWriteBehindConfiguration().build()) + .withLoaderWriter(loaderWriter) + .withService(newUnBatchedWriteBehindConfiguration().build()) .build()); testCache.put("key", "value"); @@ -384,7 +391,8 @@ public void testBatchedWriteBehindStopWaitsForEmptyQueue() { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { Cache testCache = cacheManager.createCache("testBatchedWriteBehindStopWaitsForEmptyQueue", configurationBuilder() - .add(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 2).build()) + .withLoaderWriter(loaderWriter) + .withService(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 2).build()) .build()); testCache.put("key", "value"); @@ -406,7 +414,8 @@ public void testUnBatchedWriteBehindBlocksWhenFull() throws Exception { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { final Cache testCache = cacheManager.createCache("testUnBatchedWriteBehindBlocksWhenFull", configurationBuilder() - .add(newUnBatchedWriteBehindConfiguration().queueSize(1).build()) + .withLoaderWriter(loaderWriter) + .withService(newUnBatchedWriteBehindConfiguration().queueSize(1).build()) .build()); testCache.put("key1", "value"); @@ -445,7 +454,8 @@ public void testBatchedWriteBehindBlocksWhenFull() throws Exception { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { final Cache testCache = cacheManager.createCache("testBatchedWriteBehindBlocksWhenFull", configurationBuilder() - .add(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 1).queueSize(1).build()) + .withLoaderWriter(loaderWriter) + .withService(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 1).queueSize(1).build()) .build()); testCache.put("key1", "value"); @@ -477,7 +487,8 @@ public void testFilledBatchedIsWritten() throws Exception { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { Cache testCache = cacheManager.createCache("testFilledBatchedIsWritten", configurationBuilder() - .add(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 2).build()) + .withLoaderWriter(loaderWriter) + .withService(newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 2).build()) .build()); CountDownLatch latch = new CountDownLatch(2); @@ -502,7 +513,8 @@ public void testAgedBatchedIsWritten() throws Exception { try (CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true)) { Cache testCache = cacheManager.createCache("testAgedBatchedIsWritten", configurationBuilder() - .add(newBatchedWriteBehindConfiguration(1, SECONDS, 2).build()) + .withLoaderWriter(loaderWriter) + .withService(newBatchedWriteBehindConfiguration(1, SECONDS, 2).build()) .build()); CountDownLatch latch = new CountDownLatch(1); @@ -523,16 +535,16 @@ public void testWriteBehindQueueSize() throws Exception { class TestWriteBehindProvider extends WriteBehindProviderFactory.Provider { - private WriteBehind writeBehind = null; + private WriteBehind writeBehind = null; @Override @SuppressWarnings("unchecked") - public WriteBehind createWriteBehindLoaderWriter(final CacheLoaderWriter cacheLoaderWriter, final WriteBehindConfiguration configuration) { + public WriteBehind createWriteBehindLoaderWriter(CacheLoaderWriter cacheLoaderWriter, WriteBehindConfiguration configuration) { this.writeBehind = super.createWriteBehindLoaderWriter(cacheLoaderWriter, configuration); - return writeBehind; + return (WriteBehind) writeBehind; } - public WriteBehind getWriteBehind() { + public WriteBehind getWriteBehind() { return writeBehind; } } @@ -542,8 +554,8 @@ public WriteBehind getWriteBehind() { try (CacheManager cacheManager = managerBuilder().using(writeBehindProvider).build(true)) { Cache testCache = cacheManager.createCache("testAgedBatchedIsWritten", configurationBuilder() - .add(new DefaultCacheLoaderWriterConfiguration(loaderWriter)) - .add(newBatchedWriteBehindConfiguration(5, SECONDS, 2).build()) + .withService(new DefaultCacheLoaderWriterConfiguration(loaderWriter)) + .withService(newBatchedWriteBehindConfiguration(5, SECONDS, 2).build()) .build()); testCache.put("key1", "value1"); @@ -553,10 +565,10 @@ public WriteBehind getWriteBehind() { } } - @SuppressWarnings("unchecked") - protected CacheLoaderWriterProvider getMockedCacheLoaderWriterProvider(CacheLoaderWriter loaderWriter) { + @SuppressWarnings({"unchecked", "rawtypes"}) + protected CacheLoaderWriterProvider getMockedCacheLoaderWriterProvider(CacheLoaderWriter loaderWriter) { CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)any())).thenReturn(loaderWriter); + when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)any())).thenReturn((CacheLoaderWriter) loaderWriter); return cacheLoaderWriterProvider; } diff --git a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/PooledExecutorWriteBehindTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/PooledExecutorWriteBehindTest.java similarity index 85% rename from impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/PooledExecutorWriteBehindTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/PooledExecutorWriteBehindTest.java index c70974a837..0aa85f7192 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/PooledExecutorWriteBehindTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/PooledExecutorWriteBehindTest.java @@ -15,12 +15,13 @@ */ package org.ehcache.impl.internal.loaderwriter.writebehind; -import java.util.concurrent.TimeUnit; +import java.time.Duration; + +import org.ehcache.CacheManager; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; @@ -35,11 +36,11 @@ public class PooledExecutorWriteBehindTest extends AbstractWriteBehindTestBase { @Override protected CacheConfigurationBuilder configurationBuilder() { return newCacheConfigurationBuilder(String.class, String.class, heap(100)) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); } @Override - protected CacheManagerBuilder managerBuilder() { + protected CacheManagerBuilder managerBuilder() { PooledExecutionServiceConfiguration threadPoolConfig = new PooledExecutionServiceConfiguration(); threadPoolConfig.addDefaultPool("threadpool", 2, 8); return newCacheManagerBuilder().using(threadPoolConfig); diff --git a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindEvictionTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindEvictionTest.java similarity index 83% rename from impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindEvictionTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindEvictionTest.java index 8f12427608..12f68d8cfd 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindEvictionTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindEvictionTest.java @@ -15,12 +15,12 @@ */ package org.ehcache.impl.internal.loaderwriter.writebehind; -import java.util.concurrent.TimeUnit; +import java.time.Duration; +import org.ehcache.CacheManager; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; @@ -35,11 +35,11 @@ public class WriteBehindEvictionTest extends AbstractWriteBehindTestBase { @Override protected CacheConfigurationBuilder configurationBuilder() { return newCacheConfigurationBuilder(String.class, String.class, heap(10)) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(100, TimeUnit.MILLISECONDS))); + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(100))); } @Override - protected CacheManagerBuilder managerBuilder() { + protected CacheManagerBuilder managerBuilder() { return newCacheManagerBuilder(); } } diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindProviderFactoryTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindProviderFactoryTest.java new file mode 100644 index 0000000000..e60ff8b3fa --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindProviderFactoryTest.java @@ -0,0 +1,107 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.loaderwriter.writebehind; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; +import org.ehcache.config.builders.WriteBehindConfigurationBuilder; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; +import org.ehcache.spi.service.ServiceConfiguration; +import org.hamcrest.core.IsCollectionContaining; +import org.junit.Test; + +import java.util.Collection; +import java.util.Map; + +import static java.util.concurrent.TimeUnit.SECONDS; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.core.IsInstanceOf.instanceOf; +import static org.junit.Assert.assertThrows; + +/** + * @author rism + */ +public class WriteBehindProviderFactoryTest { + + @SuppressWarnings("unchecked") + @Test + public void testAddingWriteBehindConfigurationAtCacheLevel() { + CacheManagerBuilder cacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder(); + WriteBehindConfiguration writeBehindConfiguration = WriteBehindConfigurationBuilder.newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 1) + .concurrencyLevel(3) + .queueSize(10) + .build(); + Class> klazz = (Class>) (Class) (SampleLoaderWriter.class); + CacheManager cacheManager = cacheManagerBuilder.build(true); + final Cache cache = cacheManager.createCache("cache", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(100)) + .withService(writeBehindConfiguration) + .withService(new DefaultCacheLoaderWriterConfiguration(klazz)) + .build()); + Collection> serviceConfiguration = cache.getRuntimeConfiguration() + .getServiceConfigurations(); + assertThat(serviceConfiguration, IsCollectionContaining.>hasItem(instanceOf(WriteBehindConfiguration.class))); + cacheManager.close(); + } + + @Test + public void testWriteBehindWithoutCacheLoaderWriter() { + WriteBehindProviderFactory factory = new WriteBehindProviderFactory(); + NullPointerException thrown = assertThrows(NullPointerException.class, () -> factory.create(null).createWriteBehindLoaderWriter(null, null)); + assertThat(thrown, hasProperty("message", is("WriteBehind requires a non null CacheLoaderWriter."))); + } + + public static class SampleLoaderWriter implements CacheLoaderWriter { + + @Override + public V load(K key) { + throw new UnsupportedOperationException("Implement Me"); + } + + @Override + public Map loadAll(Iterable keys) { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public void write(K key, V value) { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public void writeAll(Iterable> entries) { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public void delete(K key) { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public void deleteAll(Iterable keys) { + throw new UnsupportedOperationException("Implement me!"); + } + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindTest.java similarity index 83% rename from impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindTest.java index 56613a5811..e22e55262e 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindTest.java @@ -15,12 +15,12 @@ */ package org.ehcache.impl.internal.loaderwriter.writebehind; -import java.util.concurrent.TimeUnit; +import java.time.Duration; +import org.ehcache.CacheManager; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; @@ -35,11 +35,11 @@ public class WriteBehindTest extends AbstractWriteBehindTestBase { @Override protected CacheConfigurationBuilder configurationBuilder() { return newCacheConfigurationBuilder(String.class, String.class, heap(100)) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); } @Override - protected CacheManagerBuilder managerBuilder() { + protected CacheManagerBuilder managerBuilder() { return newCacheManagerBuilder(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindTestLoaderWriter.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindTestLoaderWriter.java similarity index 87% rename from impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindTestLoaderWriter.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindTestLoaderWriter.java index a613def24c..0c26fc2f40 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindTestLoaderWriter.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindTestLoaderWriter.java @@ -39,7 +39,7 @@ public synchronized void setLatch(CountDownLatch latch) { } @Override - public synchronized V load(K key) throws Exception { + public synchronized V load(K key) { List values = getValueList(key); if (values.isEmpty()) { return null; @@ -49,7 +49,7 @@ public synchronized V load(K key) throws Exception { } @Override - public synchronized Map loadAll(Iterable keys) throws Exception { + public synchronized Map loadAll(Iterable keys) { Map loaded = new HashMap<>(); for (K k : keys) { loaded.put(k, load(k)); @@ -58,26 +58,26 @@ public synchronized Map loadAll(Iterable keys) throws Excepti } @Override - public synchronized void write(K key, V value) throws Exception { + public synchronized void write(K key, V value) { getValueList(key).add(value); if(latch != null) latch.countDown(); } @Override - public synchronized void writeAll(Iterable> entries) throws BulkCacheWritingException, Exception { + public synchronized void writeAll(Iterable> entries) { for (Entry entry : entries) { write(entry.getKey(), entry.getValue()); } } @Override - public synchronized void delete(K key) throws Exception { + public synchronized void delete(K key) { getValueList(key).add(null); if(latch != null) latch.countDown(); } @Override - public synchronized void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception { + public synchronized void deleteAll(Iterable keys) { for (K k : keys) { delete(k); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java similarity index 86% rename from impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java index cfe907e3d8..ac5137dfce 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java @@ -16,17 +16,18 @@ package org.ehcache.impl.internal.persistence; import org.ehcache.Cache; -import org.ehcache.CachePersistenceException; import org.ehcache.PersistentCacheManager; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import java.io.File; -import java.net.URISyntaxException; +import java.io.IOException; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.impl.internal.util.FileExistenceMatchers.containsCacheDirectory; @@ -35,16 +36,17 @@ import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertNotNull; -/** - * - */ public class CacheManagerDestroyRemovesPersistenceTest { public static final String PERSISTENT_CACHE = "persistent-cache"; + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + private PersistentCacheManager persistentCacheManager; @Test - public void testDestroyRemovesPersistenceData () throws URISyntaxException, CachePersistenceException { + public void testDestroyRemovesPersistenceData () throws Exception { File file = new File(getStoragePath(), "myData"); initCacheManager(file); putValuesInCacheAndCloseCacheManager(); @@ -57,7 +59,7 @@ public void testDestroyRemovesPersistenceData () throws URISyntaxException, Cach } @Test - public void testDestroyCacheDestroysPersistenceContext() throws URISyntaxException, CachePersistenceException { + public void testDestroyCacheDestroysPersistenceContext() throws Exception { File file = new File(getStoragePath(), "testDestroy"); initCacheManager(file); @@ -67,7 +69,7 @@ public void testDestroyCacheDestroysPersistenceContext() throws URISyntaxExcepti } @Test - public void testCreateCacheWithSameAliasAfterDestroy() throws URISyntaxException, CachePersistenceException { + public void testCreateCacheWithSameAliasAfterDestroy() throws Exception { File file = new File(getStoragePath(), "testDestroy"); initCacheManager(file); @@ -85,7 +87,7 @@ public void testCreateCacheWithSameAliasAfterDestroy() throws URISyntaxException } @Test - public void testDestroyCacheWithUnknownAlias() throws URISyntaxException, CachePersistenceException { + public void testDestroyCacheWithUnknownAlias() throws Exception { File file = new File(getStoragePath(), "testDestroyUnknownAlias"); initCacheManager(file); @@ -103,7 +105,7 @@ public void testDestroyCacheWithUnknownAlias() throws URISyntaxException, CacheP assertThat(file, not(containsCacheDirectory(PERSISTENT_CACHE))); } - private void initCacheManager(File file) throws URISyntaxException { + private void initCacheManager(File file) { persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() .with(new CacheManagerPersistenceConfiguration(file)) .withCache(PERSISTENT_CACHE, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, @@ -121,8 +123,7 @@ private void putValuesInCacheAndCloseCacheManager() { persistentCacheManager.close(); } - @SuppressWarnings("ConstantConditions") - private String getStoragePath() throws URISyntaxException { - return getClass().getClassLoader().getResource(".").toURI().getPath(); + private String getStoragePath() throws IOException { + return diskPath.newFolder().getAbsolutePath(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java similarity index 94% rename from impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java index 2d4be23fb0..2538215160 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java @@ -89,7 +89,7 @@ public boolean handlesResourceType(ResourceType resourceType) { } @Override - public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration config) throws CachePersistenceException { + public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration config) throws CachePersistenceException { return diskResourceService.getPersistenceSpaceIdentifier(name, config); } @@ -109,7 +109,7 @@ public void destroy(String name) throws CachePersistenceException { } @Override - public FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { + public FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { return diskResourceService.createPersistenceContextWithin(identifier, name); } diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/resilience/RobustLoaderWriterResilienceStrategyTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/resilience/RobustLoaderWriterResilienceStrategyTest.java new file mode 100644 index 0000000000..e9505f007a --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/resilience/RobustLoaderWriterResilienceStrategyTest.java @@ -0,0 +1,555 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.resilience; + +import org.assertj.core.data.MapEntry; +import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; +import org.ehcache.spi.loaderwriter.BulkCacheWritingException; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.loaderwriter.CacheLoadingException; +import org.ehcache.spi.loaderwriter.CacheWritingException; +import org.ehcache.spi.resilience.RecoveryStore; +import org.ehcache.spi.resilience.StoreAccessException; +import org.junit.After; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.ArgumentMatcher; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collector; + +import static java.util.Collections.singletonMap; +import static java.util.function.Function.identity; +import static java.util.stream.Collectors.toMap; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; + +public class RobustLoaderWriterResilienceStrategyTest { + + @Rule + public MockitoRule rule = MockitoJUnit.rule(); + + @Mock + private RecoveryStore store; + + @Mock + private CacheLoaderWriter loaderWriter; + + @InjectMocks + private RobustLoaderWriterResilienceStrategy strategy; + + private final StoreAccessException accessException = new StoreAccessException("The exception"); + + private final Exception exception = new Exception("failed"); + + private final BulkCacheLoadingException bulkLoadingException = new BulkCacheLoadingException( + singletonMap(1, exception), singletonMap(2, 2L)); + + private final BulkCacheWritingException bulkWritingException = new BulkCacheWritingException( + singletonMap(1, exception), Collections.singleton(2)); + + @After + public void noMoreInteractions() { + verifyNoMoreInteractions(store, loaderWriter); + } + + @Test + public void getFailure() throws Exception { + when(loaderWriter.load(1)).thenReturn(1L); + + assertThat(strategy.getFailure(1, accessException)).isEqualTo(1L); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + } + + @Test + public void getFailure_failedLoaderWriter() throws Exception { + when(loaderWriter.load(1)).thenThrow(exception); + + assertThatThrownBy(() -> strategy.getFailure(1, accessException)) + .isExactlyInstanceOf(CacheLoadingException.class) + .hasCause(exception); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + } + + @Test + public void containsKeyFailure() throws Exception { + assertThat(strategy.containsKeyFailure(1, accessException)).isFalse(); + + verify(store).obliterate(1); + verifyZeroInteractions(loaderWriter); + } + + @Test + public void putFailure() throws Exception { + strategy.putFailure(1, 1L, accessException); + + verify(store).obliterate(1); + verify(loaderWriter).write(1, 1L); + } + + @Test + public void putFailure_failedLoaderWriter() throws Exception { + doThrow(exception).when(loaderWriter).write(1, 1L); + + assertThatThrownBy(() -> strategy.putFailure(1, 1L, accessException)) + .isExactlyInstanceOf(CacheWritingException.class) + .hasCause(exception); + + verify(store).obliterate(1); + verify(loaderWriter).write(1, 1L); + } + + @Test + public void removeFailure() throws Exception { + strategy.removeFailure(1, accessException); + + verify(store).obliterate(1); + verify(loaderWriter).delete(1); + } + + @Test + public void removeFailure_failedLoaderWriter() throws Exception { + doThrow(exception).when(loaderWriter).delete(1); + + assertThatThrownBy(() -> strategy.removeFailure(1, accessException)) + .isExactlyInstanceOf(CacheWritingException.class) + .hasCause(exception); + + verify(store).obliterate(1); + verify(loaderWriter).delete(1); + } + + @Test + public void clearFailure() throws Exception { + strategy.clearFailure(accessException); + + verify(store).obliterate(); + } + + @Test + public void putIfAbsentFailure_found() throws Exception { + when(loaderWriter.load(1)).thenReturn(1L); + + assertThat(strategy.putIfAbsentFailure(1, 2L, accessException)).isEqualTo(1); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + } + + @Test + public void putIfAbsentFailure_notFound() throws Exception { + when(loaderWriter.load(1)).thenReturn(null); + + assertThat(strategy.putIfAbsentFailure(1, 1L, accessException)).isNull(); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + verify(loaderWriter).write(1, 1L); + } + + @Test + public void putIfAbsentFailure_loadFails() throws Exception { + when(loaderWriter.load(1)).thenThrow(exception); + + assertThatThrownBy(() -> strategy.putIfAbsentFailure(1, 1L, accessException)) + .isExactlyInstanceOf(CacheLoadingException.class) + .hasCause(exception); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + } + + @Test + public void putIfAbsentFailure_writeFails() throws Exception { + when(loaderWriter.load(1)).thenReturn(null); + doThrow(exception).when(loaderWriter).write(1, 1L); + + assertThatThrownBy(() -> strategy.putIfAbsentFailure(1, 1L, accessException)) + .isExactlyInstanceOf(CacheWritingException.class) + .hasCause(exception); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + verify(loaderWriter).write(1, 1L); + } + + @Test + public void removeFailure1_notFound() throws Exception { + assertThat(strategy.removeFailure(1, 1L, accessException)).isFalse(); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + } + + @Test + public void removeFailure1_foundNotEquals() throws Exception { + when(loaderWriter.load(1)).thenReturn(2L); + + assertThat(strategy.removeFailure(1, 1L, accessException)).isFalse(); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + } + + @Test + public void removeFailure1_foundEquals() throws Exception { + when(loaderWriter.load(1)).thenReturn(1L); + + assertThat(strategy.removeFailure(1, 1L, accessException)).isTrue(); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + verify(loaderWriter).delete(1); + } + + @Test + public void removeFailure1_loadFails() throws Exception { + when(loaderWriter.load(1)).thenThrow(exception); + + assertThatThrownBy(() -> strategy.removeFailure(1, 1L, accessException)) + .isExactlyInstanceOf(CacheLoadingException.class) + .hasCause(exception); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + } + + @Test + public void removeFailure1_deleteFails() throws Exception { + when(loaderWriter.load(1)).thenReturn(1L); + doThrow(exception).when(loaderWriter).delete(1); + + assertThatThrownBy(() -> strategy.removeFailure(1, 1L, accessException)) + .isExactlyInstanceOf(CacheWritingException.class) + .hasCause(exception); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + verify(loaderWriter).delete(1); + } + + @Test + public void replaceFailure_notFound() throws Exception { + assertThat(strategy.replaceFailure(1, 1L, accessException)).isNull(); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + } + + @Test + public void replaceFailure_found() throws Exception { + when(loaderWriter.load(1)).thenReturn(2L); + + assertThat(strategy.replaceFailure(1, 1L, accessException)).isEqualTo(2L); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + verify(loaderWriter).write(1, 1L); + } + + @Test + public void replaceFailure_loadFails() throws Exception { + when(loaderWriter.load(1)).thenThrow(exception); + + assertThatThrownBy(() -> strategy.replaceFailure(1, 1L, accessException)) + .isExactlyInstanceOf(CacheLoadingException.class) + .hasCause(exception); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + } + + @Test + public void replaceFailure_writeFails() throws Exception { + when(loaderWriter.load(1)).thenReturn(2L); + doThrow(exception).when(loaderWriter).write(1, 1L); + + assertThatThrownBy(() -> strategy.replaceFailure(1, 1L, accessException)) + .isExactlyInstanceOf(CacheWritingException.class) + .hasCause(exception); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + verify(loaderWriter).write(1, 1L); + } + + @Test + public void replaceFailure1_notFound() throws Exception { + assertThat(strategy.replaceFailure(1, 1L, 2L, accessException)).isFalse(); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + } + + @Test + public void replaceFailure1_foundNotEquals() throws Exception { + when(loaderWriter.load(1)).thenReturn(3L); + + assertThat(strategy.replaceFailure(1, 1L, 2L, accessException)).isFalse(); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + } + + @Test + public void replaceFailure1_foundEquals() throws Exception { + when(loaderWriter.load(1)).thenReturn(1L); + + assertThat(strategy.replaceFailure(1, 1L, 2L, accessException)).isTrue(); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + verify(loaderWriter).write(1, 2L); + } + + @Test + public void replaceFailure1_loadFails() throws Exception { + when(loaderWriter.load(1)).thenThrow(exception); + + assertThatThrownBy(() -> strategy.replaceFailure(1, 1L, 2L, accessException)) + .isExactlyInstanceOf(CacheLoadingException.class) + .hasCause(exception); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + } + + @Test + public void replaceFailure1_writeFails() throws Exception { + when(loaderWriter.load(1)).thenReturn(1L); + doThrow(exception).when(loaderWriter).write(1, 2L); + + assertThatThrownBy(() -> strategy.replaceFailure(1, 1L, 2L, accessException)) + .isExactlyInstanceOf(CacheWritingException.class) + .hasCause(exception); + + verify(store).obliterate(1); + verify(loaderWriter).load(1); + verify(loaderWriter).write(1, 2L); + } + + @Test + public void getAllFailure_nothingFound() throws Exception { + List keys = Arrays.asList(1, 2); + Map entries = new HashMap<>(); + keys.forEach(k -> entries.put(k, null)); + + when(loaderWriter.loadAll(keys)).thenReturn(entries); + + assertThat(strategy.getAllFailure(keys, accessException)).isEqualTo(entries); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + verify(loaderWriter).loadAll(keys); + } + + @Test + public void getAllFailure_allFound() throws Exception { + List keys = Arrays.asList(1, 2); + Map entries = keys.stream().collect(toMap(identity(), k -> (long) k)); + + when(loaderWriter.loadAll(keys)).thenReturn(entries); + + assertThat(strategy.getAllFailure(keys, accessException)).isEqualTo(entries); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + verify(loaderWriter).loadAll(keys); + } + + @Test + public void getAllFailure_partialFound() throws Exception { + List keys = Arrays.asList(1, 2); + Map entries = new HashMap<>(); + keys.forEach(k -> entries.put(k, k == 2 ? null : (long) k)); + + when(loaderWriter.loadAll(keys)).thenReturn(entries); + + assertThat(strategy.getAllFailure(keys, accessException)).isEqualTo(entries); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + verify(loaderWriter).loadAll(keys); + } + + @Test + public void getAllFailure_loadFailsWithException() throws Exception { + List keys = Arrays.asList(1, 2); + + when(loaderWriter.loadAll(keys)).thenThrow(exception); + + assertThatThrownBy(() -> strategy.getAllFailure(keys, accessException)) + .isExactlyInstanceOf(CacheLoadingException.class) + .hasCause(exception); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + verify(loaderWriter).loadAll(keys); + } + + @Test + public void getAllFailure_loadFailsWithBulkException() throws Exception { + List keys = Arrays.asList(1, 2); + + when(loaderWriter.loadAll(keys)).thenThrow(bulkLoadingException); + + assertThatThrownBy(() -> strategy.getAllFailure(keys, accessException)) + .isSameAs(bulkLoadingException); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + verify(loaderWriter).loadAll(keys); + } + + @Test + public void putAllFailure() throws Exception { + List> entryList = Arrays.asList(entry(1, 1L), entry(2, 2L)); + Map entryMap = entryList.stream().collect(toMap(Map.Entry::getKey, Map.Entry::getValue)); + + doNothing().when(loaderWriter).writeAll(argThat(containsAllMatcher(entryList))); + + strategy.putAllFailure(entryMap, accessException); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + verify(loaderWriter).writeAll(argThat(containsAllMatcher(entryList))); + } + + @Test + public void putAllFailure_writeAllFailsWithException() throws Exception { + List> entryList = Arrays.asList(entry(1, 1L), entry(2, 2L)); + Map entryMap = entryList.stream().collect(toMap(Map.Entry::getKey, Map.Entry::getValue)); + + doThrow(exception).when(loaderWriter).writeAll(argThat(containsAllMatcher(entryList))); + + assertThatThrownBy(() -> strategy.putAllFailure(entryMap, accessException)) + .isExactlyInstanceOf(CacheWritingException.class) + .hasCause(exception); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + verify(loaderWriter).writeAll(argThat(containsAllMatcher(entryList))); + } + + @Test + public void putAllFailure_writeAllFailsWithBulkException() throws Exception { + List> entryList = Arrays.asList(entry(1, 1L), entry(2, 2L)); + Map entryMap = entryList.stream().collect(toMap(Map.Entry::getKey, Map.Entry::getValue)); + + doThrow(bulkWritingException).when(loaderWriter).writeAll(argThat(containsAllMatcher(entryList))); + + assertThatThrownBy(() -> strategy.putAllFailure(entryMap, accessException)) + .isSameAs(bulkWritingException); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + verify(loaderWriter).writeAll(argThat(containsAllMatcher(entryList))); + } + + @Test + public void removeAllFailure() throws Exception { + List entryList = Arrays.asList(1, 2); + + strategy.removeAllFailure(entryList, accessException); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + verify(loaderWriter).deleteAll(entryList); + } + + @Test + public void removeAllFailure_deleteAllFailsWithException() throws Exception { + List entryList = Arrays.asList(1, 2); + + doThrow(exception).when(loaderWriter).deleteAll(entryList); + + assertThatThrownBy(() -> strategy.removeAllFailure(entryList, accessException)) + .isExactlyInstanceOf(CacheWritingException.class) + .hasCause(exception); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + verify(loaderWriter).deleteAll(entryList); + } + + @Test + public void removeAllFailure_deleteAllFailsWithBulkException() throws Exception { + List entryList = Arrays.asList(1, 2); + + doThrow(bulkWritingException).when(loaderWriter).deleteAll(entryList); + + assertThatThrownBy(() -> strategy.removeAllFailure(entryList, accessException)) + .isSameAs(bulkWritingException); + + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + verify(loaderWriter).deleteAll(entryList); + } + + private ArgumentMatcher>> containsAllMatcher(List> entryList) { + return argument -> { + boolean[] notFound = { false }; + argument.forEach(e -> { + if (!entryList.contains(e)) { + notFound[0] = true; + } + }); + return !notFound[0]; + }; + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/resilience/RobustResilienceStrategyTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/resilience/RobustResilienceStrategyTest.java new file mode 100644 index 0000000000..8c76a68850 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/resilience/RobustResilienceStrategyTest.java @@ -0,0 +1,138 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.resilience; + +import org.ehcache.spi.resilience.RecoveryStore; +import org.ehcache.spi.resilience.StoreAccessException; +import org.junit.After; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; + +import java.util.Arrays; + +import static java.util.Arrays.asList; +import static java.util.Arrays.stream; +import static java.util.function.Function.identity; +import static java.util.stream.Collectors.toMap; +import static java.util.stream.Stream.of; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +public class RobustResilienceStrategyTest { + + @Rule + public MockitoRule rule = MockitoJUnit.rule(); + + @Mock + private RecoveryStore store; + + @InjectMocks + private RobustResilienceStrategy strategy; + + private final StoreAccessException accessException = new StoreAccessException("The exception"); + + @After + public void noMoreInteractions() { + verifyNoMoreInteractions(store); + } + + @Test + public void getFailure() throws StoreAccessException { + assertThat(strategy.getFailure(1, accessException)).isNull(); + verify(store).obliterate(1); + } + + @Test + public void containsKeyFailure() throws StoreAccessException { + assertThat(strategy.containsKeyFailure(1, accessException)).isFalse(); + verify(store).obliterate(1); + } + + @Test + public void putFailure() throws StoreAccessException { + strategy.putFailure(1, 1L, accessException); + verify(store).obliterate(1); + } + + @Test + public void removeFailure() throws StoreAccessException { + assertThat(strategy.removeFailure(1, 1L, accessException)).isFalse(); + verify(store).obliterate(1); + } + + @Test + public void clearFailure() throws StoreAccessException { + strategy.clearFailure(accessException); + verify(store).obliterate(); + } + + @Test + public void putIfAbsentFailure() throws StoreAccessException { + assertThat(strategy.putIfAbsentFailure(1, 1L, accessException)).isNull(); + verify(store).obliterate(1); + } + + @Test + public void removeFailure1() throws StoreAccessException { + assertThat(strategy.removeFailure(1, 1L, accessException)).isFalse(); + verify(store).obliterate(1); + } + + @Test + public void replaceFailure() throws StoreAccessException { + assertThat(strategy.replaceFailure(1, 1L, accessException)).isNull(); + verify(store).obliterate(1); + } + + @Test + public void replaceFailure1() throws StoreAccessException { + assertThat(strategy.replaceFailure(1, 1L, 2L, accessException)).isFalse(); + verify(store).obliterate(1); + } + + @Test + public void getAllFailure() throws StoreAccessException { + assertThat(strategy.getAllFailure(asList(1, 2), accessException)).containsExactly(entry(1, null), entry(2, null)); + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + } + + @Test + public void putAllFailure() throws StoreAccessException { + strategy.putAllFailure(of(1, 2).collect(toMap(identity(), k -> (long) k)), accessException); + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + } + + @Test + public void removeAllFailure() throws StoreAccessException { + strategy.removeAllFailure(asList(1, 2), accessException); + @SuppressWarnings("unchecked") + ArgumentCaptor> captor = ArgumentCaptor.forClass(Iterable.class); + verify(store).obliterate(captor.capture()); + assertThat(captor.getValue()).contains(1, 2); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineConfigurationTest.java similarity index 75% rename from impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineConfigurationTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineConfigurationTest.java index 0a4f62a971..b8872af86b 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineConfigurationTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineConfigurationTest.java @@ -19,6 +19,9 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; import static org.junit.Assert.fail; import org.ehcache.config.units.MemoryUnit; @@ -61,4 +64,14 @@ public void testValidArguments() { assertThat(configuration.getUnit(), equalTo(MemoryUnit.B)); } + @Test + public void testDeriveDetachesProperly() { + DefaultSizeOfEngineConfiguration configuration = new DefaultSizeOfEngineConfiguration(42L, MemoryUnit.MB, 123L); + DefaultSizeOfEngineConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getMaxObjectSize(), is(configuration.getMaxObjectSize())); + assertThat(derived.getUnit(), is(configuration.getUnit())); + assertThat(derived.getMaxObjectGraphSize(), is(configuration.getMaxObjectGraphSize())); + } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderConfigurationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderConfigurationTest.java similarity index 75% rename from impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderConfigurationTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderConfigurationTest.java index ef6807946b..4d5a3b4cfb 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderConfigurationTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderConfigurationTest.java @@ -22,12 +22,15 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; import static org.junit.Assert.fail; + /** * @author Abhilash * */ - public class DefaultSizeOfEngineProviderConfigurationTest { @Test @@ -59,4 +62,16 @@ public void testValidArguments() { assertThat(configuration.getMaxObjectSize(), equalTo(10l)); assertThat(configuration.getUnit(), equalTo(MemoryUnit.B)); } + + @Test + public void testDeriveDetachesCorrectly() { + DefaultSizeOfEngineProviderConfiguration configuration = new DefaultSizeOfEngineProviderConfiguration(42L, MemoryUnit.B, 100L); + + DefaultSizeOfEngineProviderConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getMaxObjectGraphSize(), is(configuration.getMaxObjectGraphSize())); + assertThat(derived.getMaxObjectSize(), is(configuration.getMaxObjectSize())); + assertThat(derived.getUnit(), is(configuration.getUnit())); + } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderFactoryTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderFactoryTest.java similarity index 100% rename from impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderFactoryTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProviderFactoryTest.java diff --git a/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java similarity index 91% rename from impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java index 9f5e1a853e..2bc78d29bc 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java @@ -20,6 +20,7 @@ import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapValueHolder; import org.ehcache.core.spi.store.heap.SizeOfEngine; +import org.ehcache.spi.copy.Copier; import org.junit.Test; import static org.hamcrest.MatcherAssert.assertThat; @@ -37,8 +38,7 @@ public class DefaultSizeOfEngineTest { public void testMaxObjectGraphSizeExceededException() { SizeOfEngine sizeOfEngine = new DefaultSizeOfEngine(3, Long.MAX_VALUE); try { - @SuppressWarnings("unchecked") - IdentityCopier valueCopier = new IdentityCopier(); + Copier valueCopier = IdentityCopier.identityCopier(); sizeOfEngine.sizeof(new MaxDepthGreaterThanThree(), new CopiedOnHeapValueHolder<>(new MaxDepthGreaterThanThree(), 0L, true, valueCopier)); fail(); @@ -52,8 +52,7 @@ public void testMaxObjectSizeExceededException() { SizeOfEngine sizeOfEngine = new DefaultSizeOfEngine(Long.MAX_VALUE, 1000); try { String overSized = new String(new byte[1000]); - @SuppressWarnings("unchecked") - IdentityCopier valueCopier = new IdentityCopier(); + Copier valueCopier = IdentityCopier.identityCopier(); sizeOfEngine.sizeof(overSized, new CopiedOnHeapValueHolder<>("test", 0L, true, valueCopier)); fail(); } catch (Exception limitExceededException) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/TestServiceProvider.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/TestServiceProvider.java similarity index 100% rename from impl/src/test/java/org/ehcache/impl/internal/spi/TestServiceProvider.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/TestServiceProvider.java diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java similarity index 98% rename from impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java index a7e5dfc595..4784034d91 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java @@ -27,10 +27,10 @@ import java.io.Closeable; import java.io.IOException; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.sameInstance; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; /** diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java similarity index 88% rename from impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java index 2a15bcc7a4..c028c8bccb 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java @@ -36,8 +36,8 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertThat; /** * @author rism @@ -55,7 +55,7 @@ public void testCacheConfigUsage() { final CacheManager manager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("foo", CacheConfigurationBuilder.newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) - .add(listenerBuilder) + .withService(listenerBuilder) .build()).build(true); final Collection bar = manager.getCache("foo", Object.class, Object.class).getRuntimeConfiguration().getServiceConfigurations(); assertThat(bar.iterator().next().getClass().toString(), is(ListenerObject.object.toString())); @@ -64,16 +64,16 @@ public void testCacheConfigUsage() { @Test public void testAddingCacheEventListenerConfigurationAtCacheLevel() { CacheManagerBuilder cacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder(); - CacheEventListenerConfiguration cacheEventListenerConfiguration = CacheEventListenerConfigurationBuilder + CacheEventListenerConfiguration cacheEventListenerConfiguration = CacheEventListenerConfigurationBuilder .newEventListenerConfiguration(ListenerObject.class, EventType.CREATED).unordered().asynchronous().build(); CacheManager cacheManager = cacheManagerBuilder.build(true); final Cache cache = cacheManager.createCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(100)) - .add(cacheEventListenerConfiguration) + .withService(cacheEventListenerConfiguration) .build()); - Collection> serviceConfiguration = cache.getRuntimeConfiguration() + Collection> serviceConfiguration = cache.getRuntimeConfiguration() .getServiceConfigurations(); - assertThat(serviceConfiguration, IsCollectionContaining.>hasItem(instanceOf(DefaultCacheEventListenerConfiguration.class))); + assertThat(serviceConfiguration, IsCollectionContaining.>hasItem(instanceOf(DefaultCacheEventListenerConfiguration.class))); cacheManager.close(); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java similarity index 87% rename from impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java index da346e210f..b1054404d9 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java @@ -38,8 +38,8 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; public class DefaultCacheLoaderWriterProviderTest { @@ -49,7 +49,7 @@ public void testCacheConfigUsage() { final CacheManager manager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("foo", CacheConfigurationBuilder.newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) - .add(new DefaultCacheLoaderWriterConfiguration(MyLoader.class)) + .withService(new DefaultCacheLoaderWriterConfiguration(MyLoader.class)) .build()).build(true); final Object foo = manager.getCache("foo", Object.class, Object.class).get(new Object()); assertThat(foo, is(MyLoader.object)); @@ -74,7 +74,7 @@ public void testCacheManagerConfigUsage() { @Test public void testCacheConfigOverridesCacheManagerConfig() { final CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) - .add(new DefaultCacheLoaderWriterConfiguration(MyOtherLoader.class)) + .withService(new DefaultCacheLoaderWriterConfiguration(MyOtherLoader.class)) .build(); final Map> caches = new HashMap<>(); @@ -95,11 +95,11 @@ public void testAddingCacheLoaderWriterConfigurationAtCacheLevel() { CacheManager cacheManager = cacheManagerBuilder.build(true); final Cache cache = cacheManager.createCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(100)) - .add(new DefaultCacheLoaderWriterConfiguration(klazz)) + .withService(new DefaultCacheLoaderWriterConfiguration(klazz)) .build()); - Collection> serviceConfiguration = cache.getRuntimeConfiguration() + Collection> serviceConfiguration = cache.getRuntimeConfiguration() .getServiceConfigurations(); - assertThat(serviceConfiguration, IsCollectionContaining.>hasItem(instanceOf(DefaultCacheLoaderWriterConfiguration.class))); + assertThat(serviceConfiguration, IsCollectionContaining.>hasItem(instanceOf(DefaultCacheLoaderWriterConfiguration.class))); cacheManager.close(); } @@ -132,34 +132,34 @@ public String toString() { }; @Override - public Object load(final Object key) throws Exception { + public Object load(final Object key) { return object; } @Override - public Map loadAll(final Iterable keys) throws Exception { + public Map loadAll(final Iterable keys) { throw new UnsupportedOperationException("Implement me!"); } private static Object lastWritten; @Override - public void write(final Object key, final Object value) throws Exception { + public void write(final Object key, final Object value) { lastWritten = value; } @Override - public void writeAll(final Iterable> entries) throws Exception { + public void writeAll(final Iterable> entries) { throw new UnsupportedOperationException("Implement me!"); } @Override - public void delete(final Object key) throws Exception { + public void delete(final Object key) { throw new UnsupportedOperationException("Implement me!"); } @Override - public void deleteAll(final Iterable keys) throws Exception { + public void deleteAll(final Iterable keys) { throw new UnsupportedOperationException("Implement me!"); } } @@ -176,12 +176,12 @@ public String toString() { private static Object lastWritten; @Override - public Object load(final Object key) throws Exception { + public Object load(final Object key) { return object; } @Override - public void write(final Object key, final Object value) throws Exception { + public void write(final Object key, final Object value) { lastWritten = value; } diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/resilience/DefaultResilienceStrategyProviderFactoryTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/resilience/DefaultResilienceStrategyProviderFactoryTest.java new file mode 100644 index 0000000000..cd6c9d118d --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/resilience/DefaultResilienceStrategyProviderFactoryTest.java @@ -0,0 +1,60 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.spi.resilience; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyProviderConfiguration; +import org.ehcache.spi.resilience.RecoveryStore; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.ResilienceStrategyProvider; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.junit.Test; + +import static org.ehcache.test.MockitoUtil.mock; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.IsNull.notNullValue; +import static org.hamcrest.core.IsSame.sameInstance; +import static org.junit.Assert.fail; + +public class DefaultResilienceStrategyProviderFactoryTest { + + @Test + public void testNullGivesValidFactory() { + ResilienceStrategyProvider provider = new DefaultResilienceStrategyProviderFactory().create(null); + assertThat(provider.createResilienceStrategy("test", mock(CacheConfiguration.class), mock(RecoveryStore.class)), notNullValue()); + } + + @Test + public void testWrongConfigTypeFails() { + try { + new DefaultResilienceStrategyProviderFactory().create(mock(ServiceCreationConfiguration.class)); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + //expected + } + } + + @Test + public void testSpecifiedConfigIsPassed() { + ResilienceStrategy resilienceStrategy = mock(ResilienceStrategy.class); + + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + configuration.setDefaultResilienceStrategy(resilienceStrategy); + ResilienceStrategyProvider provider = new DefaultResilienceStrategyProviderFactory().create(configuration); + + assertThat(provider.createResilienceStrategy("foo", mock(CacheConfiguration.class), mock(RecoveryStore.class)), sameInstance(resilienceStrategy)); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/resilience/DefaultResilienceStrategyProviderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/resilience/DefaultResilienceStrategyProviderTest.java new file mode 100644 index 0000000000..ec62ea981a --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/resilience/DefaultResilienceStrategyProviderTest.java @@ -0,0 +1,201 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.spi.resilience; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyConfiguration; +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyProviderConfiguration; +import org.ehcache.impl.internal.resilience.RobustResilienceStrategy; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.RecoveryStore; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.junit.Test; + +import java.util.Collections; + +import static org.ehcache.test.MockitoUtil.mock; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsInstanceOf.instanceOf; +import static org.hamcrest.core.IsSame.sameInstance; +import static org.mockito.Mockito.when; + +public class DefaultResilienceStrategyProviderTest { + + @Test + public void testDefaultInstanceReturned() { + ResilienceStrategy resilienceStrategy = mock(ResilienceStrategy.class); + + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + configuration.setDefaultResilienceStrategy(resilienceStrategy); + + DefaultResilienceStrategyProvider provider = new DefaultResilienceStrategyProvider(configuration); + + assertThat(provider.createResilienceStrategy("foo", mock(CacheConfiguration.class), mock(RecoveryStore.class)), sameInstance(resilienceStrategy)); + } + + @Test + public void testDefaultLoaderWriterInstanceReturned() { + ResilienceStrategy resilienceStrategy = mock(ResilienceStrategy.class); + + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + configuration.setDefaultLoaderWriterResilienceStrategy(resilienceStrategy); + + DefaultResilienceStrategyProvider provider = new DefaultResilienceStrategyProvider(configuration); + + assertThat(provider.createResilienceStrategy("foo", mock(CacheConfiguration.class), mock(RecoveryStore.class), mock(CacheLoaderWriter.class)), sameInstance(resilienceStrategy)); + } + + @Test + public void testDefaultInstanceConstructed() { + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + configuration.setDefaultResilienceStrategy(TestResilienceStrategy.class, "FooBar"); + + DefaultResilienceStrategyProvider provider = new DefaultResilienceStrategyProvider(configuration); + + ResilienceStrategy resilienceStrategy = provider.createResilienceStrategy("foo", mock(CacheConfiguration.class), mock(RecoveryStore.class)); + assertThat(resilienceStrategy, instanceOf(TestResilienceStrategy.class)); + assertThat(((TestResilienceStrategy) resilienceStrategy).message, is("FooBar")); + } + + @Test + public void testDefaultLoaderWriterInstanceConstructed() { + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + configuration.setDefaultLoaderWriterResilienceStrategy(TestResilienceStrategy.class, "FooBar"); + + DefaultResilienceStrategyProvider provider = new DefaultResilienceStrategyProvider(configuration); + + ResilienceStrategy resilienceStrategy = provider.createResilienceStrategy("foo", mock(CacheConfiguration.class), mock(RecoveryStore.class), mock(CacheLoaderWriter.class)); + assertThat(resilienceStrategy, instanceOf(TestResilienceStrategy.class)); + assertThat(((TestResilienceStrategy) resilienceStrategy).message, is("FooBar")); + } + + @Test + public void testPreconfiguredInstanceReturned() { + ResilienceStrategy resilienceStrategy = mock(ResilienceStrategy.class); + + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + configuration.addResilienceStrategyFor("foo", resilienceStrategy); + + DefaultResilienceStrategyProvider provider = new DefaultResilienceStrategyProvider(configuration); + + assertThat(provider.createResilienceStrategy("foo", mock(CacheConfiguration.class), mock(RecoveryStore.class)), sameInstance(resilienceStrategy)); + } + + @Test + public void testPreconfiguredLoaderWriterInstanceReturned() { + ResilienceStrategy resilienceStrategy = mock(ResilienceStrategy.class); + + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + configuration.addResilienceStrategyFor("foo", resilienceStrategy); + + DefaultResilienceStrategyProvider provider = new DefaultResilienceStrategyProvider(configuration); + + assertThat(provider.createResilienceStrategy("foo", mock(CacheConfiguration.class), mock(RecoveryStore.class), mock(CacheLoaderWriter.class)), sameInstance(resilienceStrategy)); + } + + @Test + public void testPreconfiguredInstanceConstructed() { + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + configuration.addResilienceStrategyFor("foo", TestResilienceStrategy.class, "FooBar"); + + DefaultResilienceStrategyProvider provider = new DefaultResilienceStrategyProvider(configuration); + + ResilienceStrategy resilienceStrategy = provider.createResilienceStrategy("foo", mock(CacheConfiguration.class), mock(RecoveryStore.class)); + assertThat(resilienceStrategy, instanceOf(TestResilienceStrategy.class)); + assertThat(((TestResilienceStrategy) resilienceStrategy).message, is("FooBar")); + } + + @Test + public void testPreconfiguredLoaderWriterInstanceConstructed() { + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + configuration.addResilienceStrategyFor("foo", TestResilienceStrategy.class, "FooBar"); + + DefaultResilienceStrategyProvider provider = new DefaultResilienceStrategyProvider(configuration); + + ResilienceStrategy resilienceStrategy = provider.createResilienceStrategy("foo", mock(CacheConfiguration.class), mock(RecoveryStore.class), mock(CacheLoaderWriter.class)); + assertThat(resilienceStrategy, instanceOf(TestResilienceStrategy.class)); + assertThat(((TestResilienceStrategy) resilienceStrategy).message, is("FooBar")); + } + + + @Test + public void testProvidedInstanceReturned() { + ResilienceStrategy resilienceStrategy = mock(ResilienceStrategy.class); + + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + DefaultResilienceStrategyProvider provider = new DefaultResilienceStrategyProvider(configuration); + + CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + when(cacheConfiguration.getServiceConfigurations()).thenReturn(Collections.singleton(new DefaultResilienceStrategyConfiguration(resilienceStrategy))); + + assertThat(provider.createResilienceStrategy("foo", cacheConfiguration, mock(RecoveryStore.class)), sameInstance(resilienceStrategy)); + } + + @Test + public void testProvidedLoaderWriterInstanceReturned() { + ResilienceStrategy resilienceStrategy = mock(ResilienceStrategy.class); + + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + DefaultResilienceStrategyProvider provider = new DefaultResilienceStrategyProvider(configuration); + + CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + when(cacheConfiguration.getServiceConfigurations()).thenReturn(Collections.singleton(new DefaultResilienceStrategyConfiguration(resilienceStrategy))); + + assertThat(provider.createResilienceStrategy("foo", cacheConfiguration, mock(RecoveryStore.class), mock(CacheLoaderWriter.class)), sameInstance(resilienceStrategy)); + } + + @Test + public void testProvidedInstanceConstructed() { + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + DefaultResilienceStrategyProvider provider = new DefaultResilienceStrategyProvider(configuration); + + CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + when(cacheConfiguration.getServiceConfigurations()).thenReturn(Collections.singleton(new DefaultResilienceStrategyConfiguration(TestResilienceStrategy.class, "FooBar"))); + + ResilienceStrategy resilienceStrategy = provider.createResilienceStrategy("foo", cacheConfiguration, mock(RecoveryStore.class)); + assertThat(resilienceStrategy, instanceOf(TestResilienceStrategy.class)); + assertThat(((TestResilienceStrategy) resilienceStrategy).message, is("FooBar")); + } + + @Test + public void testProvidedLoaderWriterInstanceConstructed() { + DefaultResilienceStrategyProviderConfiguration configuration = new DefaultResilienceStrategyProviderConfiguration(); + DefaultResilienceStrategyProvider provider = new DefaultResilienceStrategyProvider(configuration); + + CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + when(cacheConfiguration.getServiceConfigurations()).thenReturn(Collections.singleton(new DefaultResilienceStrategyConfiguration(TestResilienceStrategy.class, "FooBar"))); + + ResilienceStrategy resilienceStrategy = provider.createResilienceStrategy("foo", cacheConfiguration, mock(RecoveryStore.class), mock(CacheLoaderWriter.class)); + assertThat(resilienceStrategy, instanceOf(TestResilienceStrategy.class)); + assertThat(((TestResilienceStrategy) resilienceStrategy).message, is("FooBar")); + } + + public static class TestResilienceStrategy extends RobustResilienceStrategy { + + public final String message; + + public TestResilienceStrategy(String message, RecoveryStore store) { + super(store); + this.message = message; + } + + public TestResilienceStrategy(String message, RecoveryStore store, CacheLoaderWriter loaderWriter) { + super(store); + this.message = message; + } + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java similarity index 90% rename from impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java index 88a7fe93b3..4c07aca184 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java @@ -39,11 +39,9 @@ import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import java.io.Closeable; -import java.io.File; import java.io.IOException; import java.io.Serializable; import java.nio.ByteBuffer; @@ -52,15 +50,18 @@ import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; +import static org.ehcache.test.MockitoUtil.mock; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.hasProperty; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** @@ -71,9 +72,6 @@ public class DefaultSerializationProviderTest { @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); - @Rule - public ExpectedException expectedException = ExpectedException.none(); - @Test public void testCreateSerializerNoConfig() throws Exception { DefaultSerializationProviderConfiguration dspfConfig = new DefaultSerializationProviderConfiguration(); @@ -95,8 +93,7 @@ public void testCreateSerializerWithConfig() throws Exception { DefaultSerializationProvider dsp = new DefaultSerializationProvider(dspfConfig); dsp.start(providerContaining()); - @SuppressWarnings("unchecked") - DefaultSerializerConfiguration dspConfig = new DefaultSerializerConfiguration(getSerializerClass(), DefaultSerializerConfiguration.Type.VALUE); + DefaultSerializerConfiguration dspConfig = new DefaultSerializerConfiguration<>(getSerializerClass(), DefaultSerializerConfiguration.Type.VALUE); assertThat(dsp.createValueSerializer(String.class, ClassLoader.getSystemClassLoader(), dspConfig), instanceOf(TestSerializer.class)); assertThat(dsp.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), dspConfig), instanceOf(TestSerializer.class)); @@ -165,7 +162,7 @@ public void testRemembersCreationConfigurationAfterStopStart() throws Unsupporte @Test public void testReleaseSerializerWithProvidedCloseableSerializerDoesNotClose() throws Exception { DefaultSerializationProvider provider = new DefaultSerializationProvider(null); - CloseableSerializer closeableSerializer = new CloseableSerializer(); + CloseableSerializer closeableSerializer = new CloseableSerializer<>(); provider.providedVsCount.put(closeableSerializer, new AtomicInteger(1)); provider.releaseSerializer(closeableSerializer); @@ -178,7 +175,7 @@ public void testReleaseSerializerWithInstantiatedCloseableSerializerDoesClose() Class> serializerClass = (Class) CloseableSerializer.class; DefaultSerializerConfiguration config = new DefaultSerializerConfiguration<>(serializerClass, DefaultSerializerConfiguration.Type.KEY); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); - Serializer serializer = provider.createKeySerializer(String.class, getSystemClassLoader(), config); + Serializer serializer = provider.createKeySerializer(String.class, getSystemClassLoader(), config); provider.releaseSerializer(serializer); assertTrue(((CloseableSerializer)serializer).closed); @@ -310,54 +307,50 @@ public void testDefaultByteArraySerializer() throws Exception { @Test public void testCreateTransientSerializerWithoutConstructor() throws Exception { - expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @SuppressWarnings("unchecked") Class> serializerClass = (Class) BaseSerializer.class; DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration<>(serializerClass, DefaultSerializerConfiguration.Type.VALUE); - provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + RuntimeException thrown = assertThrows(RuntimeException.class, () -> provider.createValueSerializer(Object.class, getSystemClassLoader(), configuration)); + assertThat(thrown, hasProperty("message", endsWith("does not have a constructor that takes in a ClassLoader."))); } @Test public void testCreatePersistentSerializerWithoutConstructor() throws Exception { - expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @SuppressWarnings("unchecked") Class> serializerClass = (Class) BaseSerializer.class; DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration<>(serializerClass, DefaultSerializerConfiguration.Type.VALUE); - provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + RuntimeException thrown = assertThrows(RuntimeException.class, () -> provider.createValueSerializer(Object.class, getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock())); + assertThat(thrown, hasProperty("message", endsWith("does not have a constructor that takes in a ClassLoader."))); } @Test public void testCreateTransientStatefulSerializerWithoutConstructor() throws Exception { - expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @SuppressWarnings("unchecked") Class> serializerClass = (Class) StatefulBaseSerializer.class; DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration<>(serializerClass, DefaultSerializerConfiguration.Type.VALUE); - provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + RuntimeException thrown = assertThrows(RuntimeException.class, () -> provider.createValueSerializer(Object.class, getSystemClassLoader(), configuration)); + assertThat(thrown, hasProperty("message", endsWith("does not have a constructor that takes in a ClassLoader."))); } @Test public void testCreatePersistentStatefulSerializerWithoutConstructor() throws Exception { - expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @SuppressWarnings("unchecked") Class> serializerClass = (Class) StatefulBaseSerializer.class; DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration<>(serializerClass, DefaultSerializerConfiguration.Type.VALUE); - provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + RuntimeException thrown = assertThrows(RuntimeException.class, () -> provider.createValueSerializer(Object.class, getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock())); + assertThat(thrown, hasProperty("message", endsWith("does not have a constructor that takes in a ClassLoader."))); } @Test @@ -422,15 +415,14 @@ public void testPersistentMinimalStatefulSerializer() throws Exception { @Test public void testTransientLegacySerializer() throws Exception { - expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @SuppressWarnings("unchecked") Class> serializerClass = (Class) LegacySerializer.class; DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration<>(serializerClass, DefaultSerializerConfiguration.Type.VALUE); - provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + RuntimeException thrown = assertThrows(RuntimeException.class, () -> provider.createValueSerializer(Object.class, getSystemClassLoader(), configuration)); + assertThat(thrown, hasProperty("message", endsWith("does not have a constructor that takes in a ClassLoader."))); } @Test @@ -441,12 +433,8 @@ public void testPersistentLegacySerializer() throws Exception { @SuppressWarnings("unchecked") Class> serializerClass = (Class) LegacySerializer.class; DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration<>(serializerClass, DefaultSerializerConfiguration.Type.VALUE); - expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); - Serializer valueSerializer = - provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); - assertThat(valueSerializer, instanceOf(LegacySerializer.class)); - assertThat(LegacySerializer.legacyConstructorInvoked, is(true)); + RuntimeException thrown = assertThrows(RuntimeException.class, () -> provider.createValueSerializer(Object.class, getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock())); + assertThat(thrown, hasProperty("message", endsWith("does not have a constructor that takes in a ClassLoader."))); } @Test @@ -484,28 +472,26 @@ public void testPersistentLegacyComboSerializer() throws Exception { @Test public void testCreateTransientStatefulLegacySerializer() throws Exception { - expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @SuppressWarnings("unchecked") Class> serializerClass = (Class) StatefulLegacySerializer.class; DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration<>(serializerClass, DefaultSerializerConfiguration.Type.VALUE); - provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + RuntimeException thrown = assertThrows(RuntimeException.class, () -> provider.createValueSerializer(Object.class, getSystemClassLoader(), configuration)); + assertThat(thrown, hasProperty("message", endsWith("does not have a constructor that takes in a ClassLoader."))); } @Test public void testCreatePersistentStatefulLegacySerializer() throws Exception { - expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @SuppressWarnings("unchecked") Class> serializerClass = (Class) StatefulLegacySerializer.class; DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration<>(serializerClass, DefaultSerializerConfiguration.Type.VALUE); - provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + RuntimeException thrown = assertThrows(RuntimeException.class, () -> provider.createValueSerializer(Object.class, getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock())); + assertThat(thrown, hasProperty("message", endsWith("does not have a constructor that takes in a ClassLoader."))); } @Test @@ -541,8 +527,8 @@ public void testPersistentStatefulLegacyComboSerializer() throws Exception { assertThat(StatefulLegacyComboSerializer.legacyConstructorInvoked, is(false)); } - private PersistableResourceService.PersistenceSpaceIdentifier getPersistenceSpaceIdentifierMock() { - PersistableResourceService.PersistenceSpaceIdentifier spaceIdentifier = mock(DiskResourceService.PersistenceSpaceIdentifier.class); + private PersistableResourceService.PersistenceSpaceIdentifier getPersistenceSpaceIdentifierMock() { + PersistableResourceService.PersistenceSpaceIdentifier spaceIdentifier = mock(DiskResourceService.PersistenceSpaceIdentifier.class); when(spaceIdentifier.getServiceType()).thenReturn(DiskResourceService.class); return spaceIdentifier; } diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultCacheStatisticsTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultCacheStatisticsTest.java new file mode 100644 index 0000000000..b804590eb4 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultCacheStatisticsTest.java @@ -0,0 +1,240 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.statistics; + +import org.assertj.core.api.AbstractObjectAssert; +import org.ehcache.CacheManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.core.InternalCache; +import org.ehcache.core.config.store.StoreStatisticsConfiguration; +import org.ehcache.core.statistics.CacheOperationOutcomes; +import org.ehcache.core.statistics.ChainedOperationObserver; +import org.ehcache.core.internal.statistics.DefaultCacheStatistics; +import org.ehcache.event.CacheEvent; +import org.ehcache.event.CacheEventListener; +import org.ehcache.event.EventType; +import org.ehcache.impl.internal.TimeSourceConfiguration; +import org.ehcache.internal.TestTimeSource; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.ehcache.config.builders.ResourcePoolsBuilder.*; + +@RunWith(Parameterized.class) +public class DefaultCacheStatisticsTest { + + /** + * Statistics can be disabled on the stores. However, the cache statistics should still work nicely when it's the case. + * + * @return if store statistics are enabled or disabled + */ + @Parameterized.Parameters + public static Object[] data() { + return new Object[] { Boolean.FALSE, Boolean.TRUE }; + } + + private static final String[][] KNOWN_STATISTICS = { + { + // Disabled + "Cache:EvictionCount", + "Cache:ExpirationCount", + "Cache:HitCount", + "Cache:MissCount", + "Cache:PutCount", + "Cache:RemovalCount", + "OnHeap:EvictionCount", + "OnHeap:ExpirationCount", + "OnHeap:MappingCount" + }, + { + // Enabled + "Cache:EvictionCount", + "Cache:ExpirationCount", + "Cache:HitCount", + "Cache:MissCount", + "Cache:PutCount", + "Cache:RemovalCount", + "OnHeap:EvictionCount", + "OnHeap:ExpirationCount", + "OnHeap:HitCount", + "OnHeap:MappingCount", + "OnHeap:MissCount", + "OnHeap:PutCount", + "OnHeap:RemovalCount" + } + }; + + private static final int TIME_TO_EXPIRATION = 100; + + private final boolean enableStoreStatistics; + private DefaultCacheStatistics cacheStatistics; + private CacheManager cacheManager; + private InternalCache cache; + private final TestTimeSource timeSource = new TestTimeSource(System.currentTimeMillis()); + private final List> expirations = new ArrayList<>(); + + public DefaultCacheStatisticsTest(boolean enableStoreStatistics) { + this.enableStoreStatistics = enableStoreStatistics; + } + + @Before + public void before() { + CacheEventListenerConfigurationBuilder cacheEventListenerConfiguration = CacheEventListenerConfigurationBuilder + .newEventListenerConfiguration((CacheEventListener) expirations::add, EventType.EXPIRED) + .unordered() + .synchronous(); + + CacheConfiguration cacheConfiguration = + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(TIME_TO_EXPIRATION))) + .withService(cacheEventListenerConfiguration) + .withService(new StoreStatisticsConfiguration(enableStoreStatistics)) + .build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(new TimeSourceConfiguration(timeSource)) + .build(true); + + cache = (InternalCache) cacheManager.getCache("aCache", Long.class, String.class); + + cacheStatistics = new DefaultCacheStatistics(cache); + } + + @After + public void after() { + if (cacheManager != null) { + cacheManager.close(); + } + } + + @Test + public void getKnownStatistics() { + assertThat(cacheStatistics.getKnownStatistics()).containsOnlyKeys(KNOWN_STATISTICS[enableStoreStatistics ? 1 : 0]); + } + + @Test + public void getCacheHits() throws Exception { + cache.put(1L, "a"); + cache.get(1L); + assertThat(cacheStatistics.getCacheHits()).isEqualTo(1L); + assertStat("Cache:HitCount").isEqualTo(1L); + } + + @Test + public void getCacheHitPercentage() throws Exception { + cache.put(1L, "a"); + cache.get(1L); + assertThat(cacheStatistics.getCacheHitPercentage()).isEqualTo(100.0f); + } + + @Test + public void getCacheMisses() throws Exception { + cache.get(1L); + assertThat(cacheStatistics.getCacheMisses()).isEqualTo(1L); + assertStat("Cache:MissCount").isEqualTo(1L); + } + + @Test + public void getCacheMissPercentage() throws Exception { + cache.get(1L); + assertThat(cacheStatistics.getCacheMissPercentage()).isEqualTo(100.0f); + } + + @Test + public void getCacheGets() throws Exception { + cache.get(1L); + assertThat(cacheStatistics.getCacheGets()).isEqualTo(1); + } + + @Test + public void getCachePuts() throws Exception { + cache.put(1L, "a"); + assertThat(cacheStatistics.getCachePuts()).isEqualTo(1); + assertStat("Cache:PutCount").isEqualTo(1L); + } + + @Test + public void getCacheRemovals() throws Exception { + cache.put(1L, "a"); + cache.remove(1L); + assertThat(cacheStatistics.getCacheRemovals()).isEqualTo(1); + assertStat("Cache:RemovalCount").isEqualTo(1L); + } + + @Test + public void getCacheEvictions() throws Exception { + for (long i = 0; i < 11; i++) { + cache.put(i, "a"); + } + assertThat(cacheStatistics.getCacheEvictions()).isEqualTo(1); + assertStat("Cache:EvictionCount").isEqualTo(1L); + } + + @Test + public void getExpirations() throws Exception { + cache.put(1L, "a"); + assertThat(expirations).isEmpty(); + timeSource.advanceTime(TIME_TO_EXPIRATION); + assertThat(cache.get(1L)).isNull(); + assertThat(expirations).hasSize(1); + assertThat(expirations.get(0).getKey()).isEqualTo(1L); + assertThat(cacheStatistics.getCacheExpirations()).isEqualTo(1L); + assertStat("Cache:ExpirationCount").isEqualTo(1L); + } + + @Test + public void registerDerivedStatistics() { + AtomicBoolean endCalled = new AtomicBoolean(); + ChainedOperationObserver derivedStatistic = new org.ehcache.core.statistics.ChainedOperationObserver() { + + @Override + public void begin(long time) { + + } + + @Override + public void end(long time, long latency, CacheOperationOutcomes.PutOutcome result) { + endCalled.set(true); + assertThat(result).isEqualTo(CacheOperationOutcomes.PutOutcome.PUT); + } + }; + + cacheStatistics.registerDerivedStatistic(CacheOperationOutcomes.PutOutcome.class, "put", derivedStatistic); + + cache.put(1L, "a"); + + assertThat(endCalled.get()).isTrue(); + } + + private AbstractObjectAssert assertStat(String key) { + return assertThat((Number) cacheStatistics.getKnownStatistics().get(key).value()); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultStatisticsServiceTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultStatisticsServiceTest.java similarity index 85% rename from impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultStatisticsServiceTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultStatisticsServiceTest.java index 353d465fe4..7c516027cf 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultStatisticsServiceTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultStatisticsServiceTest.java @@ -22,22 +22,19 @@ import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; import org.ehcache.spi.test.After; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import static org.assertj.core.api.Assertions.assertThat; import static org.ehcache.config.units.MemoryUnit.MB; +import static org.junit.Assert.assertThrows; public class DefaultStatisticsServiceTest { private static final String CACHE = "myCache"; - @Rule - public ExpectedException expectedException = ExpectedException.none(); - private final DefaultStatisticsService service = new DefaultStatisticsService(); private CacheManager cacheManager; @@ -65,20 +62,13 @@ public void after() { public void startStopStart() throws Exception { cacheManager.init(); - assertThat(service.isStarted()).isTrue(); - Cache cache = cacheManager.getCache(CACHE, Long.class, String.class); cache.get(2L); assertThat(service.getCacheStatistics(CACHE).getCacheMisses()).isEqualTo(1); cacheManager.close(); - - assertThat(service.isStarted()).isFalse(); - cacheManager.init(); - assertThat(service.isStarted()).isTrue(); - // We expect the stats to be reinitialized after a stop start assertThat(service.getCacheStatistics(CACHE).getCacheMisses()).isEqualTo(0); cache = cacheManager.getCache(CACHE, Long.class, String.class); @@ -88,8 +78,7 @@ public void startStopStart() throws Exception { @Test public void startInMaintenance() throws Exception { - expectedException.expect(IllegalStateException.class); - service.stateTransition(Status.UNINITIALIZED, Status.MAINTENANCE); + assertThrows(IllegalStateException.class, () -> service.stateTransition(Status.UNINITIALIZED, Status.MAINTENANCE)); } } diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultTierStatisticsDisabledTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultTierStatisticsDisabledTest.java new file mode 100644 index 0000000000..a29ddd23b5 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultTierStatisticsDisabledTest.java @@ -0,0 +1,163 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.statistics; + +import org.assertj.core.api.AbstractObjectAssert; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.core.internal.statistics.DefaultTierStatistics; +import org.ehcache.impl.internal.TimeSourceConfiguration; +import org.ehcache.internal.TestTimeSource; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.time.Duration; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; + +/** + * Test the behavior of statistics when they are disabled (the default with one tier) on a store. + */ +public class DefaultTierStatisticsDisabledTest { + + private static final int TIME_TO_EXPIRATION = 100; + + private DefaultTierStatistics onHeap; + private CacheManager cacheManager; + private Cache cache; + private TestTimeSource timeSource = new TestTimeSource(System.currentTimeMillis()); + + @Before + public void before() { + CacheConfiguration cacheConfiguration = + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES)) + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(TIME_TO_EXPIRATION))) + .build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(new TimeSourceConfiguration(timeSource)) + .build(true); + + cache = cacheManager.getCache("aCache", Long.class, String.class); + + onHeap = new DefaultTierStatistics(cache, "OnHeap"); + } + + @After + public void after() { + if(cacheManager != null) { + cacheManager.close(); + } + } + + @Test + public void getKnownStatistics() { + // Passthrough are there. Special ones needed for the cache statistics are there + assertThat(onHeap.getKnownStatistics()).containsOnlyKeys("OnHeap:EvictionCount", "OnHeap:ExpirationCount", "OnHeap:MappingCount"); + } + + @Test + public void getHits() { + cache.put(1L, "a"); + cache.get(1L); + assertThat(onHeap.getHits()).isEqualTo(0L); + assertNoStat("OnHeap:HitCount"); + } + + @Test + public void getMisses() { + cache.get(1L); + assertThat(onHeap.getMisses()).isEqualTo(0L); + assertNoStat("OnHeap:MissCount"); + } + + @Test + public void getPuts() { + cache.put(1L, "a"); + assertThat(onHeap.getPuts()).isEqualTo(0L); + assertNoStat("OnHeap:PutCount"); + } + + @Test + public void getUpdates() { + cache.put(1L, "a"); + cache.put(1L, "b"); + assertThat(onHeap.getPuts()).isEqualTo(0L); + assertNoStat("OnHeap:PutCount"); + } + + @Test + public void getRemovals() { + cache.put(1L, "a"); + cache.remove(1L); + assertThat(onHeap.getRemovals()).isEqualTo(0L); + assertNoStat("OnHeap:RemovalCount"); + } + + @Test + public void getEvictions() { + for (long i = 0; i < 11; i++) { + cache.put(i, "a"); + } + assertThat(onHeap.getEvictions()).isEqualTo(1L); + assertStat("OnHeap:EvictionCount").isEqualTo(1L); + } + + @Test + public void getExpirations() { + cache.put(1L, "a"); + timeSource.advanceTime(TIME_TO_EXPIRATION); + cache.get(1L); + assertThat(onHeap.getExpirations()).isEqualTo(1L); + assertStat("OnHeap:ExpirationCount").isEqualTo(1L); + } + + @Test + public void getMappings() { + cache.put(1L, "a"); + assertThat(onHeap.getMappings()).isEqualTo(1L); + assertStat("OnHeap:MappingCount").isEqualTo(1L); + } + + @Test + public void getAllocatedByteSize() { + cache.put(1L, "a"); + assertThat(onHeap.getAllocatedByteSize()).isEqualTo(-1L); + } + + @Test + public void getOccupiedByteSize() { + cache.put(1L, "a"); + assertThat(onHeap.getOccupiedByteSize()).isEqualTo(-1L); + } + + private AbstractObjectAssert assertStat(String key) { + return assertThat((Number) onHeap.getKnownStatistics().get(key).value()); + } + + private void assertNoStat(String key) { + assertThat(onHeap.getKnownStatistics()).doesNotContainKey(key); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultTierStatisticsTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultTierStatisticsTest.java similarity index 85% rename from impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultTierStatisticsTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultTierStatisticsTest.java index 6cf38f2bd2..e4bb37aed7 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultTierStatisticsTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultTierStatisticsTest.java @@ -16,22 +16,24 @@ package org.ehcache.impl.internal.statistics; -import java.util.concurrent.TimeUnit; - import org.assertj.core.api.AbstractObjectAssert; import org.ehcache.Cache; import org.ehcache.CacheManager; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.core.config.store.StoreStatisticsConfiguration; +import org.ehcache.core.internal.statistics.DefaultTierStatistics; import org.ehcache.impl.internal.TimeSourceConfiguration; import org.ehcache.internal.TestTimeSource; import org.junit.After; import org.junit.Before; import org.junit.Test; +import java.time.Duration; + import static org.assertj.core.api.Assertions.assertThat; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; @@ -48,8 +50,9 @@ public class DefaultTierStatisticsTest { public void before() { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - newResourcePoolsBuilder().heap(10)) - .withExpiry(Expirations.timeToLiveExpiration(Duration.of(TIME_TO_EXPIRATION, TimeUnit.MILLISECONDS))) + newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES)) + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(TIME_TO_EXPIRATION))) + .withService(new StoreStatisticsConfiguration(true)) // explicitly enable statistics .build(); cacheManager = CacheManagerBuilder.newCacheManagerBuilder() @@ -71,9 +74,7 @@ public void after() { @Test public void getKnownStatistics() { - assertThat(onHeap.getKnownStatistics()).containsOnlyKeys("OnHeap:HitCount", "OnHeap:MissCount", - "OnHeap:PutCount", "OnHeap:RemovalCount", "OnHeap:EvictionCount", "OnHeap:ExpirationCount", "OnHeap:MappingCount", - "OnHeap:OccupiedByteSize"); + assertThat(onHeap.getKnownStatistics()).containsOnlyKeys("OnHeap:HitCount", "OnHeap:MissCount", "OnHeap:PutCount", "OnHeap:RemovalCount", "OnHeap:EvictionCount", "OnHeap:ExpirationCount", "OnHeap:MappingCount"); } @Test @@ -139,12 +140,6 @@ public void getMappings() throws Exception { assertStat("OnHeap:MappingCount").isEqualTo(1L); } - @Test - public void getMaxMappings() throws Exception { - cache.put(1L, "a"); - assertThat(onHeap.getAllocatedByteSize()).isEqualTo(-1L); - } - @Test public void getAllocatedByteSize() throws Exception { cache.put(1L, "a"); @@ -158,6 +153,6 @@ public void getOccupiedByteSize() throws Exception { } private AbstractObjectAssert assertStat(String key) { - return assertThat(onHeap.getKnownStatistics().get(key).value()); + return assertThat((Number) onHeap.getKnownStatistics().get(key).value()); } } diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/StatsUtilsTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/StatsUtilsTest.java new file mode 100644 index 0000000000..6a356d89a6 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/statistics/StatsUtilsTest.java @@ -0,0 +1,222 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.statistics; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.core.config.store.StoreStatisticsConfiguration; +import org.ehcache.core.statistics.CacheOperationOutcomes; +import org.ehcache.core.statistics.StoreOperationOutcomes; +import org.ehcache.core.statistics.TierOperationOutcomes; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.terracotta.context.ContextManager; +import org.terracotta.context.TreeNode; +import org.terracotta.context.query.Matchers; +import org.terracotta.context.query.Query; +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.StatisticsManager; +import org.terracotta.statistics.StatisticType; + +import java.util.Collections; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.internal.statistics.StatsUtils.findLowestTier; +import static org.ehcache.core.internal.statistics.StatsUtils.findOperationStatisticOnChildren; +import static org.ehcache.core.internal.statistics.StatsUtils.findStatisticOnDescendants; +import static org.ehcache.core.internal.statistics.StatsUtils.findTiers; +import static org.ehcache.core.internal.statistics.StatsUtils.hasOperationStat; +import static org.ehcache.core.internal.statistics.StatsUtils.hasProperty; +import static org.ehcache.core.internal.statistics.StatsUtils.hasTag; +import static org.junit.Assert.assertThrows; +import static org.terracotta.context.query.Matchers.attributes; +import static org.terracotta.context.query.Matchers.context; +import static org.terracotta.context.query.Matchers.hasAttribute; +import static org.terracotta.context.query.QueryBuilder.queryBuilder; +import static org.terracotta.statistics.StatisticsManager.properties; +import static org.terracotta.statistics.StatisticsManager.tags; + +public class StatsUtilsTest { + + CacheManager cacheManager; + Cache cache; + + @Before + public void before() { + CacheConfiguration cacheConfiguration = + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .withService(new StoreStatisticsConfiguration(true)) // explicitly enable statistics + .build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .build(true); + + cache = cacheManager.getCache("aCache", Long.class, String.class); + + StatisticsManager.createPassThroughStatistic(cache, "test", tags(), properties("myproperty=myvalue"), StatisticType.COUNTER, () -> 0); + + cache.get(1L); + } + + @After + public void after() { + if (cacheManager != null) { + cacheManager.close(); + } + } + + @Test + public void testHasTag_found() throws Exception { + Set statResult = queryProperty("cache"); + assertThat(statResult.size()).isEqualTo(1); + } + + @Test + public void testHasTag_notfound() throws Exception { + Set statResult = queryProperty("xxx"); + assertThat(statResult.size()).isZero(); + } + + private Set queryProperty(String tag) { + @SuppressWarnings("unchecked") + Query statQuery = queryBuilder() + .descendants() + .filter(context(attributes(Matchers.>allOf( + hasAttribute("name", "get"), + hasTag(tag) + )))) + .build(); + + return statQuery.execute(Collections.singleton(ContextManager.nodeFor(cache))); + } + + @Test + public void testHasProperty_found() throws Exception { + Set statResult = queryProperty("myproperty", "myvalue"); + assertThat(statResult.size()).isEqualTo(1); + } + + @Test + public void testHasProperty_notfoundKey() throws Exception { + Set statResult = queryProperty("xxx"); + assertThat(statResult.size()).isZero(); + } + + @Test + public void testHasProperty_valueDoesntMatch() throws Exception { + Set statResult = queryProperty("myproperty", "xxx"); + assertThat(statResult.size()).isZero(); + } + + @SuppressWarnings("unchecked") + private Set queryProperty(String key, String value) { + Query statQuery = queryBuilder() + .descendants() + .filter(context(attributes(Matchers.>allOf( + hasAttribute("name", "test"), + hasProperty(key, value) + )))) + .build(); + + return statQuery.execute(Collections.singleton(ContextManager.nodeFor(cache))); + } + + @SuppressWarnings("unchecked") + @Test + public void testFindStatisticOnDescendantsWithDiscriminator() throws Exception { + Optional> stat = findStatisticOnDescendants(cache, "OnHeap", "tier", "get"); + assertThat(stat.get().sum()).isEqualTo(1L); + + stat = findStatisticOnDescendants(cache, "OnHeap", "tier", "xxx"); + assertThat(stat.isPresent()).isFalse(); + + stat = findStatisticOnDescendants(cache, "xxx", "tier", "xxx"); + assertThat(stat.isPresent()).isFalse(); + } + + @SuppressWarnings("unchecked") + @Test + public void testFindStatisticOnDescendants() throws Exception { + Optional> stat = findStatisticOnDescendants(cache, "OnHeap", "get"); + assertThat(stat.get().sum()).isEqualTo(1L); + + stat = findStatisticOnDescendants(cache, "OnHeap", "xxx"); + assertThat(stat.isPresent()).isFalse(); + + stat = findStatisticOnDescendants(cache, "xxx", "xxx"); + assertThat(stat.isPresent()).isFalse(); + } + + @Test + public void testFindCacheStatistic() { + OperationStatistic stat = findOperationStatisticOnChildren(cache, CacheOperationOutcomes.GetOutcome.class, "get"); + assertThat(stat.sum()).isEqualTo(1L); + } + + @Test + public void testFindCacheStatistic_notExisting() { + assertThrows(RuntimeException.class, () -> findOperationStatisticOnChildren(cache, CacheOperationOutcomes.GetOutcome.class, "xxx")); + } + + @Test + public void testFindTiers() { + String[] tiers = findTiers(cache); + assertThat(tiers).containsOnly("OnHeap"); + } + + @Test + public void testFindLowerTier_one() { + String tier = findLowestTier(new String[]{"OnHeap"}); + assertThat(tier).isEqualTo("OnHeap"); + } + + @Test + public void testFindLowerTier_two() { + String tier = findLowestTier(new String[]{"OnHeap", "Offheap"}); + assertThat(tier).isEqualTo("Offheap"); + } + + @Test + public void testFindLowerTier_three() { + String tier = findLowestTier(new String[]{"OnHeap", "Offheap", "Disk"}); + assertThat(tier).isEqualTo("Disk"); + } + + @Test + public void testFindLowerTier_none() { + assertThrows(RuntimeException.class, () -> findLowestTier(new String[0])); + } + + @Test + public void testHasOperationStatistic_found() { + assertThat(hasOperationStat(cache, StoreOperationOutcomes.GetOutcome.class, "get")).isTrue(); + } + + @Test + public void testHasOperationStatistic_notFound() { + assertThat(hasOperationStat(cache, StoreOperationOutcomes.GetOutcome.class, "xxx")).isFalse(); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/basic/DelegatingValueHolder.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/basic/DelegatingValueHolder.java new file mode 100644 index 0000000000..ff3af1fc49 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/basic/DelegatingValueHolder.java @@ -0,0 +1,66 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.basic; + +import org.ehcache.core.spi.store.Store; + +import java.util.concurrent.TimeUnit; + +import javax.annotation.Nonnull; + +/** + * ValueHolder delegating everything to another ValueHolder. + */ +public class DelegatingValueHolder implements Store.ValueHolder { + + private final Store.ValueHolder valueHolder; + + public DelegatingValueHolder(Store.ValueHolder valueHolder) { + this.valueHolder = valueHolder; + } + + @Override + @Nonnull + public T get() { + return valueHolder.get(); + } + + @Override + public long creationTime() { + return valueHolder.creationTime(); + } + + @Override + public long expirationTime() { + return valueHolder.expirationTime(); + } + + @Override + public boolean isExpired(long expirationTime) { + return valueHolder.isExpired(expirationTime); + } + + @Override + public long lastAccessTime() { + return valueHolder.lastAccessTime(); + } + + @Override + public long getId() { + return valueHolder.getId(); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/basic/SimpleValueHolder.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/basic/SimpleValueHolder.java new file mode 100644 index 0000000000..78442bc9ee --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/basic/SimpleValueHolder.java @@ -0,0 +1,66 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.basic; + +import org.ehcache.core.spi.store.Store; + +import java.util.concurrent.TimeUnit; + +import javax.annotation.Nonnull; + +/** + * A really basic value holder that just holds a value. + */ +public class SimpleValueHolder implements Store.ValueHolder { + + private final T value; + + public SimpleValueHolder(T v) { + this.value = v; + } + + @Override + @Nonnull + public T get() { + return value; + } + + @Override + public long creationTime() { + return 0; + } + + @Override + public long expirationTime() { + return 0; + } + + @Override + public boolean isExpired(long expirationTime) { + return false; + } + + @Override + public long lastAccessTime() { + return 0; + } + + @Override + public long getId() { + return 0; + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java similarity index 99% rename from impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java index 34babc8d49..132ae1aa9c 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java @@ -16,7 +16,6 @@ package org.ehcache.impl.internal.store.disk; -import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; import org.ehcache.impl.internal.store.disk.factories.EhcachePersistentSegmentFactory; import org.ehcache.impl.internal.store.offheap.AbstractEhcacheOffHeapBackingMapTest; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java similarity index 85% rename from impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java index 6aba43f141..5d3b36b429 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java @@ -23,15 +23,18 @@ import org.ehcache.config.ResourceType; import org.ehcache.config.ResourceUnit; import org.ehcache.config.SizedResourcePool; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.ServiceLocator; +import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.Store; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.DefaultTimeSourceService; import org.ehcache.impl.serialization.LongSerializer; import org.ehcache.impl.serialization.StringSerializer; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; @@ -42,14 +45,13 @@ import org.terracotta.context.query.Matchers; import org.terracotta.context.query.Query; -import java.util.Map; import java.util.Set; import static java.util.Collections.singleton; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.empty; -import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.terracotta.context.query.Matchers.attributes; import static org.terracotta.context.query.Matchers.context; @@ -65,7 +67,8 @@ public class OffHeapDiskStoreProviderTest { public void testStatisticsAssociations() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = dependencySet().with(mock(SerializationProvider.class)) + ServiceLocator serviceLocator = dependencySet().with(mock(SerializationProvider.class)).with(new DefaultStatisticsService()) + .with(mock(CacheManagerProviderService.class)) .with(new DefaultTimeSourceService(null)).with(mock(DiskResourceService.class)).build(); provider.start(serviceLocator); @@ -74,11 +77,11 @@ public void testStatisticsAssociations() throws Exception { @SuppressWarnings("unchecked") Query storeQuery = queryBuilder() .children() - .filter(context(attributes(Matchers.>allOf( + .filter(context(attributes(Matchers.allOf( hasAttribute("tags", new Matcher>() { @Override protected boolean matchesSafely(Set object) { - return object.containsAll(singleton("Disk")); + return object.contains("Disk"); } }))))) .build(); @@ -117,8 +120,8 @@ public ClassLoader getClassLoader() { } @Override - public Expiry getExpiry() { - return Expirations.noExpiration(); + public ExpiryPolicy getExpiry() { + return ExpiryPolicyBuilder.noExpiration(); } @Override @@ -129,7 +132,7 @@ public ResourcePools getResourcePools() { public

                                      P getPoolForResource(ResourceType

                                      resourceType) { return (P) new SizedResourcePool() { @Override - public ResourceType getType() { + public ResourceType getType() { return ResourceType.Core.DISK; } @@ -183,5 +186,10 @@ public int getDispatcherConcurrency() { return 1; } + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } + }; }} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java similarity index 83% rename from impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java index 1ad20ba49e..755d7d2c51 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java @@ -19,12 +19,13 @@ import org.ehcache.config.CacheConfiguration; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; -import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.SizedResourcePool; import org.ehcache.config.units.MemoryUnit; import org.ehcache.CachePersistenceException; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.executor.OnDemandExecutionService; @@ -37,7 +38,7 @@ import org.ehcache.internal.store.StoreFactory; import org.ehcache.internal.tier.AuthoritativeTierFactory; import org.ehcache.internal.tier.AuthoritativeTierSPITest; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.spi.serialization.Serializer; @@ -47,6 +48,7 @@ import org.junit.Before; import org.junit.Rule; import org.junit.rules.TemporaryFolder; +import org.terracotta.statistics.StatisticsManager; import java.io.IOException; import java.util.Arrays; @@ -55,10 +57,10 @@ import static org.ehcache.config.ResourceType.Core.DISK; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; import static org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration.DEFAULT_DISK_SEGMENTS; import static org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration.DEFAULT_WRITER_CONCURRENCY; -import static org.mockito.Mockito.mock; +import static org.ehcache.test.MockitoUtil.mock; import static org.mockito.Mockito.when; /** @@ -83,34 +85,34 @@ public void setUp() throws Exception { @Override public AuthoritativeTier newStore() { - return newStore(null, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(null, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } @Override public AuthoritativeTier newStoreWithCapacity(long capacity) { - return newStore(capacity, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(capacity, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } @Override - public AuthoritativeTier newStoreWithExpiry(Expiry expiry, TimeSource timeSource) { + public AuthoritativeTier newStoreWithExpiry(ExpiryPolicy expiry, TimeSource timeSource) { return newStore(null, null, expiry, timeSource); } @Override public AuthoritativeTier newStoreWithEvictionAdvisor(EvictionAdvisor evictionAdvisor) { - return newStore(null, evictionAdvisor, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(null, evictionAdvisor, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } - private AuthoritativeTier newStore(Long capacity, EvictionAdvisor evictionAdvisor, Expiry expiry, TimeSource timeSource) { + private AuthoritativeTier newStore(Long capacity, EvictionAdvisor evictionAdvisor, ExpiryPolicy expiry, TimeSource timeSource) { Serializer keySerializer = new JavaSerializer<>(getClass().getClassLoader()); Serializer valueSerializer = new JavaSerializer<>(getClass().getClassLoader()); try { - CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); String spaceName = "OffheapDiskStore-" + index.getAndIncrement(); - PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); ResourcePools resourcePools = getDiskResourcePool(capacity); SizedResourcePool diskPool = resourcePools.getPoolForResource(DISK); MemoryUnit unit = (MemoryUnit)diskPool.getUnit(); @@ -122,7 +124,7 @@ private AuthoritativeTier newStore(Long capacity, EvictionAdviso new OnDemandExecutionService(), null, DEFAULT_WRITER_CONCURRENCY, DEFAULT_DISK_SEGMENTS, config, timeSource, new TestStoreEventDispatcher<>(), - unit.toBytes(diskPool.getSize())); + unit.toBytes(diskPool.getSize()), new DefaultStatisticsService()); OffHeapDiskStore.Provider.init(store); createdStores.put(store, spaceName); return store; @@ -154,13 +156,13 @@ public Class getValueType() { } @Override - public ServiceConfiguration[] getServiceConfigurations() { + public ServiceConfiguration[] getServiceConfigurations() { try { - CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); String spaceName = "OffheapDiskStore-" + index.getAndIncrement(); - PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); - return new ServiceConfiguration[] {space}; + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); + return new ServiceConfiguration[] {space}; } catch (CachePersistenceException e) { throw new RuntimeException(e); } @@ -194,6 +196,7 @@ public void close(final Store store) { String spaceName = createdStores.get(store); try { OffHeapDiskStore.Provider.close((OffHeapDiskStore)store); + StatisticsManager.nodeFor(store).clean(); } catch (IOException ex) { throw new RuntimeException(ex); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java similarity index 79% rename from impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java index 0c283df278..5cc5fd54b0 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java @@ -22,14 +22,19 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourceType; -import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.spi.service.CacheManagerProviderService; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.statistics.LowerCachingTierOperationsOutcome; import org.ehcache.CachePersistenceException; -import org.ehcache.expiry.Expiry; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; +import org.ehcache.impl.internal.store.offheap.portability.AssertingOffHeapValueHolderPortability; +import org.ehcache.impl.internal.store.offheap.portability.OffHeapValueHolderPortability; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.executor.OnDemandExecutionService; import org.ehcache.impl.internal.persistence.TestDiskResourceService; @@ -38,21 +43,20 @@ import org.ehcache.impl.internal.spi.serialization.DefaultSerializationProvider; import org.ehcache.core.spi.time.SystemTimeSource; import org.ehcache.core.spi.time.TimeSource; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.core.spi.store.Store; import org.ehcache.impl.internal.util.UnmatchedResourceType; -import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; import org.ehcache.core.spi.service.FileBasedPersistenceContext; import org.ehcache.spi.persistence.PersistableResourceService.PersistenceSpaceIdentifier; -import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.test.MockitoUtil; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; +import org.mockito.Answers; import org.terracotta.context.query.Matcher; import org.terracotta.context.query.Query; import org.terracotta.context.query.QueryBuilder; @@ -76,18 +80,18 @@ import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; import static org.ehcache.config.builders.CacheManagerBuilder.persistence; +import static org.ehcache.config.builders.ExpiryPolicyBuilder.noExpiration; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.MemoryUnit.MB; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; -import static org.ehcache.expiry.Expirations.noExpiration; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; import static org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration.DEFAULT_DISK_SEGMENTS; import static org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration.DEFAULT_WRITER_CONCURRENCY; import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -123,16 +127,17 @@ public void testRecovery() throws StoreAccessException, IOException { @Test public void testRecoveryFailureWhenValueTypeChangesToIncompatibleClass() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = dependencySet().with(diskResourceService).with(provider).build(); + ServiceLocator serviceLocator = dependencySet().with(diskResourceService).with(provider) + .with(mock(CacheManagerProviderService.class, Answers.RETURNS_DEEP_STUBS)).build(); serviceLocator.startAllServices(); - CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + CacheConfiguration cacheConfiguration = MockitoUtil.mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); - PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); { @SuppressWarnings("unchecked") - Store.Configuration storeConfig1 = mock(Store.Configuration.class); + Store.Configuration storeConfig1 = MockitoUtil.mock(Store.Configuration.class); when(storeConfig1.getKeyType()).thenReturn(Long.class); when(storeConfig1.getValueType()).thenReturn(String.class); when(storeConfig1.getResourcePools()).thenReturn(ResourcePoolsBuilder.newResourcePoolsBuilder() @@ -148,7 +153,7 @@ public void testRecoveryFailureWhenValueTypeChangesToIncompatibleClass() throws { @SuppressWarnings("unchecked") - Store.Configuration storeConfig2 = mock(Store.Configuration.class); + Store.Configuration storeConfig2 = MockitoUtil.mock(Store.Configuration.class); when(storeConfig2.getKeyType()).thenReturn(Long.class); when(storeConfig2.getValueType()).thenReturn(Serializable.class); when(storeConfig2.getResourcePools()).thenReturn(ResourcePoolsBuilder.newResourcePoolsBuilder() @@ -173,16 +178,17 @@ public void testRecoveryFailureWhenValueTypeChangesToIncompatibleClass() throws @Test public void testRecoveryWithArrayType() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = dependencySet().with(diskResourceService).with(provider).build(); + ServiceLocator serviceLocator = dependencySet().with(diskResourceService).with(provider) + .with(mock(CacheManagerProviderService.class, Answers.RETURNS_DEEP_STUBS)).build(); serviceLocator.startAllServices(); - CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + CacheConfiguration cacheConfiguration = MockitoUtil.mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); - PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); { @SuppressWarnings("unchecked") - Store.Configuration storeConfig1 = mock(Store.Configuration.class); + Store.Configuration storeConfig1 = MockitoUtil.mock(Store.Configuration.class); when(storeConfig1.getKeyType()).thenReturn(Long.class); when(storeConfig1.getValueType()).thenReturn(Object[].class); when(storeConfig1.getResourcePools()).thenReturn(ResourcePoolsBuilder.newResourcePoolsBuilder() @@ -198,7 +204,7 @@ public void testRecoveryWithArrayType() throws Exception { { @SuppressWarnings("unchecked") - Store.Configuration storeConfig2 = mock(Store.Configuration.class); + Store.Configuration storeConfig2 = MockitoUtil.mock(Store.Configuration.class); when(storeConfig2.getKeyType()).thenReturn(Long.class); when(storeConfig2.getValueType()).thenReturn(Object[].class); when(storeConfig2.getResourcePools()).thenReturn(ResourcePoolsBuilder.newResourcePoolsBuilder() @@ -218,15 +224,16 @@ public void testRecoveryWithArrayType() throws Exception { @Test public void testProvidingOffHeapDiskStoreConfiguration() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = dependencySet().with(diskResourceService).with(provider).build(); + ServiceLocator serviceLocator = dependencySet().with(diskResourceService).with(provider) + .with(mock(CacheManagerProviderService.class, Answers.RETURNS_DEEP_STUBS)).build(); serviceLocator.startAllServices(); - CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + CacheConfiguration cacheConfiguration = MockitoUtil.mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); - PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); @SuppressWarnings("unchecked") - Store.Configuration storeConfig1 = mock(Store.Configuration.class); + Store.Configuration storeConfig1 = MockitoUtil.mock(Store.Configuration.class); when(storeConfig1.getKeyType()).thenReturn(Long.class); when(storeConfig1.getValueType()).thenReturn(Object[].class); when(storeConfig1.getResourcePools()).thenReturn(ResourcePoolsBuilder.newResourcePoolsBuilder() @@ -234,15 +241,15 @@ public void testProvidingOffHeapDiskStoreConfiguration() throws Exception { .build()); when(storeConfig1.getDispatcherConcurrency()).thenReturn(1); - OffHeapDiskStore offHeapDiskStore1 = provider.createStore(storeConfig1, space, - new OffHeapDiskStoreConfiguration("pool", 2, 4)); + OffHeapDiskStore offHeapDiskStore1 = provider.createStore( + storeConfig1, space, new OffHeapDiskStoreConfiguration("pool", 2, 4)); assertThat(offHeapDiskStore1.getThreadPoolAlias(), is("pool")); assertThat(offHeapDiskStore1.getWriterConcurrency(), is(2)); assertThat(offHeapDiskStore1.getDiskSegments(), is(4)); } @Override - protected OffHeapDiskStore createAndInitStore(final TimeSource timeSource, final Expiry expiry) { + protected OffHeapDiskStore createAndInitStore(final TimeSource timeSource, final ExpiryPolicy expiry) { try { SerializationProvider serializationProvider = new DefaultSerializationProvider(null); serializationProvider.start(providerContaining(diskResourceService)); @@ -250,13 +257,18 @@ protected OffHeapDiskStore createAndInitStore(final TimeSource t Serializer keySerializer = serializationProvider.createKeySerializer(String.class, classLoader); Serializer valueSerializer = serializationProvider.createValueSerializer(String.class, classLoader); StoreConfigurationImpl storeConfiguration = new StoreConfigurationImpl<>(String.class, String.class, - null, classLoader, expiry, null, 0, keySerializer, valueSerializer); - OffHeapDiskStore offHeapStore = new OffHeapDiskStore<>( + null, classLoader, expiry, null, 0, true, keySerializer, valueSerializer, null, false); + OffHeapDiskStore offHeapStore = new OffHeapDiskStore( getPersistenceContext(), new OnDemandExecutionService(), null, DEFAULT_WRITER_CONCURRENCY, DEFAULT_DISK_SEGMENTS, storeConfiguration, timeSource, new TestStoreEventDispatcher<>(), - MB.toBytes(1)); + MB.toBytes(1), new DefaultStatisticsService()) { + @Override + protected OffHeapValueHolderPortability createValuePortability(Serializer serializer) { + return new AssertingOffHeapValueHolderPortability<>(serializer); + } + }; OffHeapDiskStore.Provider.init(offHeapStore); return offHeapStore; } catch (UnsupportedTypeException e) { @@ -265,7 +277,7 @@ protected OffHeapDiskStore createAndInitStore(final TimeSource t } @Override - protected OffHeapDiskStore createAndInitStore(TimeSource timeSource, Expiry expiry, EvictionAdvisor evictionAdvisor) { + protected OffHeapDiskStore createAndInitStore(TimeSource timeSource, ExpiryPolicy expiry, EvictionAdvisor evictionAdvisor) { try { SerializationProvider serializationProvider = new DefaultSerializationProvider(null); serializationProvider.start(providerContaining(diskResourceService)); @@ -273,13 +285,18 @@ protected OffHeapDiskStore createAndInitStore(TimeSource timeSou Serializer keySerializer = serializationProvider.createKeySerializer(String.class, classLoader); Serializer valueSerializer = serializationProvider.createValueSerializer(byte[].class, classLoader); StoreConfigurationImpl storeConfiguration = new StoreConfigurationImpl<>(String.class, byte[].class, - evictionAdvisor, getClass().getClassLoader(), expiry, null, 0, keySerializer, valueSerializer); - OffHeapDiskStore offHeapStore = new OffHeapDiskStore<>( + evictionAdvisor, getClass().getClassLoader(), expiry, null, 0, true, keySerializer, valueSerializer, null, false); + OffHeapDiskStore offHeapStore = new OffHeapDiskStore( getPersistenceContext(), new OnDemandExecutionService(), null, DEFAULT_WRITER_CONCURRENCY, DEFAULT_DISK_SEGMENTS, storeConfiguration, timeSource, new TestStoreEventDispatcher<>(), - MB.toBytes(1)); + MB.toBytes(1), new DefaultStatisticsService()) { + @Override + protected OffHeapValueHolderPortability createValuePortability(Serializer serializer) { + return new AssertingOffHeapValueHolderPortability<>(serializer); + } + }; OffHeapDiskStore.Provider.init(offHeapStore); return offHeapStore; } catch (UnsupportedTypeException e) { @@ -300,7 +317,7 @@ protected void destroyStore(AbstractOffHeapStore store) { public void testStoreInitFailsWithoutLocalPersistenceService() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); try { - ServiceLocator serviceLocator = dependencySet().with(provider).build(); + dependencySet().with(provider).build(); fail("IllegalStateException expected"); } catch (IllegalStateException e) { assertThat(e.getMessage(), containsString("Failed to find provider with satisfied dependency set for interface" + @@ -335,15 +352,15 @@ public void testRank() throws Exception { private void assertRank(final Store.Provider provider, final int expectedRank, final ResourceType... resources) { assertThat(provider.rank( new HashSet<>(Arrays.asList(resources)), - Collections.>emptyList()), + Collections.emptyList()), is(expectedRank)); } private FileBasedPersistenceContext getPersistenceContext() { try { - CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + CacheConfiguration cacheConfiguration = MockitoUtil.mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MB, false).build()); - PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); return diskResourceService.createPersistenceContextWithin(space, "store"); } catch (CachePersistenceException e) { throw new AssertionError(e); @@ -356,35 +373,38 @@ public void diskStoreShrinkingTest() throws Exception { try (CacheManager manager = newCacheManagerBuilder() .with(persistence(temporaryFolder.newFolder("disk-stores").getAbsolutePath())) .build(true)) { - final Cache cache = manager.createCache("test", newCacheConfigurationBuilder(Long.class, CacheValue.class, - heap(1000).offheap(20, MB).disk(30, MB)) + + CacheConfigurationBuilder cacheConfigurationBuilder = newCacheConfigurationBuilder(Long.class, CacheValue.class, + heap(1000).offheap(10, MB).disk(20, MB)) .withLoaderWriter(new CacheLoaderWriter() { @Override - public CacheValue load(Long key) throws Exception { + public CacheValue load(Long key) { return null; } @Override - public Map loadAll(Iterable keys) throws BulkCacheLoadingException, Exception { + public Map loadAll(Iterable keys) { return Collections.emptyMap(); } @Override - public void write(Long key, CacheValue value) throws Exception { + public void write(Long key, CacheValue value) { } @Override - public void writeAll(Iterable> entries) throws BulkCacheWritingException, Exception { + public void writeAll(Iterable> entries) { } @Override - public void delete(Long key) throws Exception { + public void delete(Long key) { } @Override - public void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception { + public void deleteAll(Iterable keys) { } - })); + }); + + Cache cache = manager.createCache("test", cacheConfigurationBuilder); for (long i = 0; i < 100000; i++) { cache.put(i, new CacheValue((int) i)); @@ -469,6 +489,8 @@ private CacheValue value(Random rndm) { public static class CacheValue implements Serializable { + private static final long serialVersionUID = 1L; + private final int value; private final byte[] padding; @@ -477,6 +499,11 @@ public CacheValue(int value) { this.padding = new byte[800]; } + @Override + public int hashCode() { + return value; + } + public boolean equals(Object o) { if (o instanceof CacheValue) { return value == ((CacheValue) o).value; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java similarity index 86% rename from impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java index e554944567..611c846a25 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java @@ -41,7 +41,7 @@ import static org.ehcache.impl.internal.store.disk.OffHeapDiskStore.persistent; import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.terracotta.offheapstore.util.MemoryUnit.BYTES; @@ -52,20 +52,20 @@ public class EhcachePersistentSegmentTest { public final TemporaryFolder folder = new TemporaryFolder(); @SuppressWarnings("unchecked") - private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment() throws IOException { - return createTestSegment(noAdvice(), mock(EvictionListener.class)); + private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegmentWithAdvisorAndListener() throws IOException { + return createTestSegmentWithAdvisorAndListener(noAdvice(), mock(EvictionListener.class)); } @SuppressWarnings("unchecked") - private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment(EvictionAdvisor evictionPredicate) throws IOException { - return createTestSegment(evictionPredicate, mock(EvictionListener.class)); + private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegmentWithAdvisor(EvictionAdvisor evictionPredicate) throws IOException { + return createTestSegmentWithAdvisorAndListener(evictionPredicate, mock(EvictionListener.class)); } - private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment(EvictionListener evictionListener) throws IOException { - return createTestSegment(noAdvice(), evictionListener); + private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegmentWithListener(EvictionListener evictionListener) throws IOException { + return createTestSegmentWithAdvisorAndListener(noAdvice(), evictionListener); } - private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment(final EvictionAdvisor evictionPredicate, EvictionListener evictionListener) throws IOException { + private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegmentWithAdvisorAndListener(final EvictionAdvisor evictionPredicate, EvictionListener evictionListener) throws IOException { try { HeuristicConfiguration configuration = new HeuristicConfiguration(1024 * 1024); SerializationProvider serializationProvider = new DefaultSerializationProvider(null); @@ -103,7 +103,7 @@ public void setSwitchedOn(boolean switchedOn) { @Test public void testPutAdvisedAgainstEvictionComputesMetadata() throws IOException { - EhcachePersistentSegment segment = createTestSegment((key, value) -> { + EhcachePersistentSegment segment = createTestSegmentWithAdvisor((key, value) -> { return "please-do-not-evict-me".equals(key); }); try { @@ -116,7 +116,7 @@ public void testPutAdvisedAgainstEvictionComputesMetadata() throws IOException { @Test public void testPutPinnedAdvisedAgainstEvictionComputesMetadata() throws IOException { - EhcachePersistentSegment segment = createTestSegment((key, value) -> { + EhcachePersistentSegment segment = createTestSegmentWithAdvisor((key, value) -> { return "please-do-not-evict-me".equals(key); }); try { @@ -129,7 +129,7 @@ public void testPutPinnedAdvisedAgainstEvictionComputesMetadata() throws IOExcep @Test public void testAdviceAgainstEvictionPreventsEviction() throws IOException { - EhcachePersistentSegment segment = createTestSegment(); + EhcachePersistentSegment segment = createTestSegmentWithAdvisorAndListener(); try { assertThat(segment.evictable(1), is(true)); assertThat(segment.evictable(EhcacheSegmentFactory.EhcacheSegment.ADVISED_AGAINST_EVICTION | 1), is(false)); @@ -142,7 +142,7 @@ public void testAdviceAgainstEvictionPreventsEviction() throws IOException { public void testEvictionFiresEvent() throws IOException { @SuppressWarnings("unchecked") EvictionListener evictionListener = mock(EvictionListener.class); - EhcachePersistentSegment segment = createTestSegment(evictionListener); + EhcachePersistentSegment segment = createTestSegmentWithListener(evictionListener); try { segment.put("key", "value"); segment.evict(segment.getEvictionIndex(), false); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java similarity index 84% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java index 3358880e92..26b538675f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java @@ -16,15 +16,13 @@ package org.ehcache.impl.internal.store.heap; import org.ehcache.Cache.Entry; -import org.ehcache.ValueSupplier; import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.core.events.StoreEventSink; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapValueHolder; import org.ehcache.core.spi.time.SystemTimeSource; @@ -46,6 +44,7 @@ import org.junit.runner.Description; import org.mockito.InOrder; +import java.time.Duration; import java.util.EnumSet; import java.util.HashMap; import java.util.Map; @@ -54,10 +53,9 @@ import java.util.concurrent.Exchanger; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.function.Function; import java.util.function.Supplier; +import static org.ehcache.config.builders.ExpiryPolicyBuilder.expiry; import static org.ehcache.impl.internal.util.Matchers.holding; import static org.ehcache.impl.internal.util.Matchers.valueHeld; import static org.hamcrest.MatcherAssert.assertThat; @@ -80,8 +78,8 @@ public abstract class BaseOnHeapStoreTest { private static final RuntimeException RUNTIME_EXCEPTION = new RuntimeException(); - protected StoreEventDispatcher eventDispatcher; - protected StoreEventSink eventSink; + protected StoreEventDispatcher eventDispatcher; + protected StoreEventSink eventSink; @Rule public TestRule watchman = new TestWatcher() { @@ -179,7 +177,7 @@ public void testEvictWithBrokenEvictionAdvisorDoesEvict() throws Exception { public void testGet() throws Exception { OnHeapStore store = newStore(); store.put("key", "value"); - assertThat(store.get("key").value(), equalTo("value")); + assertThat(store.get("key").get(), equalTo("value")); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.GetOutcome.HIT)); } @@ -196,9 +194,9 @@ public void testGetExpired() throws Exception { StoreEventSink eventSink = getStoreEventSink(); StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); OnHeapStore store = newStore(timeSource, - Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); store.put("key", "value"); - assertThat(store.get("key").value(), equalTo("value")); + assertThat(store.get("key").get(), equalTo("value")); timeSource.advanceTime(1); assertThat(store.get("key"), nullValue()); @@ -212,11 +210,11 @@ public void testGetExpired() throws Exception { public void testGetNoExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); OnHeapStore store = newStore(timeSource, - Expirations.timeToLiveExpiration(new Duration(2, TimeUnit.MILLISECONDS))); + ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(2))); StoreEventSink eventSink = getStoreEventSink(); store.put("key", "value"); timeSource.advanceTime(1); - assertThat(store.get("key").value(), equalTo("value")); + assertThat(store.get("key").get(), equalTo("value")); verify(eventSink, never()).expired(anyString(), anyValueSupplier()); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.GetOutcome.HIT)); } @@ -224,14 +222,14 @@ public void testGetNoExpired() throws Exception { @Test public void testAccessTime() throws Exception { TestTimeSource timeSource = new TestTimeSource(); - OnHeapStore store = newStore(timeSource, Expirations.noExpiration()); + OnHeapStore store = newStore(timeSource, ExpiryPolicyBuilder.noExpiration()); store.put("key", "value"); - long first = store.get("key").lastAccessTime(TimeUnit.MILLISECONDS); + long first = store.get("key").lastAccessTime(); assertThat(first, equalTo(timeSource.getTimeMillis())); final long advance = 5; timeSource.advanceTime(advance); - long next = store.get("key").lastAccessTime(TimeUnit.MILLISECONDS); + long next = store.get("key").lastAccessTime(); assertThat(next, equalTo(first + advance)); } @@ -254,7 +252,7 @@ public void testContainsKeyExpired() throws Exception { StoreEventSink eventSink = getStoreEventSink(); StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); OnHeapStore store = newStore(timeSource, - Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); store.put("key", "value"); timeSource.advanceTime(1); @@ -275,7 +273,7 @@ public void testPut() throws Exception { verify(eventSink).created(eq("key"), eq("value")); verifyListenerReleaseEventsInOrder(eventDispatcher); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.PutOutcome.PUT)); - assertThat(store.get("key").value(), equalTo("value")); + assertThat(store.get("key").get(), equalTo("value")); } @Test @@ -289,17 +287,17 @@ public void testPutOverwrite() throws Exception { store.put("key", "value2"); verify(eventSink).updated(eq("key"), argThat(holding("value")), eq("value2")); verifyListenerReleaseEventsInOrder(eventDispatcher); - assertThat(store.get("key").value(), equalTo("value2")); + assertThat(store.get("key").get(), equalTo("value2")); } @Test public void testCreateTime() throws Exception { TestTimeSource timeSource = new TestTimeSource(); - OnHeapStore store = newStore(timeSource, Expirations.noExpiration()); + OnHeapStore store = newStore(timeSource, ExpiryPolicyBuilder.noExpiration()); assertThat(store.containsKey("key"), is(false)); store.put("key", "value"); ValueHolder valueHolder = store.get("key"); - assertThat(timeSource.getTimeMillis(), equalTo(valueHolder.creationTime(TimeUnit.MILLISECONDS))); + assertThat(timeSource.getTimeMillis(), equalTo(valueHolder.creationTime())); } @Test @@ -317,33 +315,33 @@ public void testPutIfAbsentNoValue() throws Exception { StoreEventSink eventSink = getStoreEventSink(); StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); - ValueHolder prev = store.putIfAbsent("key", "value"); + ValueHolder prev = store.putIfAbsent("key", "value", b -> {}); assertThat(prev, nullValue()); verify(eventSink).created(eq("key"), eq("value")); verifyListenerReleaseEventsInOrder(eventDispatcher); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.PUT)); - assertThat(store.get("key").value(), equalTo("value")); + assertThat(store.get("key").get(), equalTo("value")); } @Test public void testPutIfAbsentValuePresent() throws Exception { OnHeapStore store = newStore(); store.put("key", "value"); - ValueHolder prev = store.putIfAbsent("key", "value2"); - assertThat(prev.value(), equalTo("value")); + ValueHolder prev = store.putIfAbsent("key", "value2", b -> {}); + assertThat(prev.get(), equalTo("value")); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.HIT)); } @Test public void testPutIfAbsentUpdatesAccessTime() throws Exception { TestTimeSource timeSource = new TestTimeSource(); - OnHeapStore store = newStore(timeSource, Expirations.noExpiration()); + OnHeapStore store = newStore(timeSource, ExpiryPolicyBuilder.noExpiration()); assertThat(store.get("key"), nullValue()); - store.putIfAbsent("key", "value"); - long first = store.get("key").lastAccessTime(TimeUnit.MILLISECONDS); + store.putIfAbsent("key", "value", b -> {}); + long first = store.get("key").lastAccessTime(); timeSource.advanceTime(1); - long next = store.putIfAbsent("key", "value2").lastAccessTime(TimeUnit.MILLISECONDS); + long next = store.putIfAbsent("key", "value2", b -> {}).lastAccessTime(); assertThat(next - first, equalTo(1L)); } @@ -351,12 +349,12 @@ public void testPutIfAbsentUpdatesAccessTime() throws Exception { public void testPutIfAbsentExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); OnHeapStore store = newStore(timeSource, - Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); store.put("key", "value"); timeSource.advanceTime(1); - ValueHolder prev = store.putIfAbsent("key", "value2"); + ValueHolder prev = store.putIfAbsent("key", "value2", b -> {}); assertThat(prev, nullValue()); - assertThat(store.get("key").value(), equalTo("value2")); + assertThat(store.get("key").get(), equalTo("value2")); checkExpiryEvent(getStoreEventSink(), "key", "value"); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ExpirationOutcome.SUCCESS)); } @@ -397,7 +395,7 @@ public void testRemoveTwoArgNoMatch() throws Exception { RemoveStatus removed = store.remove("key", "not value"); assertThat(removed, equalTo(RemoveStatus.KEY_PRESENT)); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS)); - assertThat(store.get("key").value(), equalTo("value")); + assertThat(store.get("key").get(), equalTo("value")); } @Test @@ -405,10 +403,10 @@ public void testRemoveTwoArgExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); StoreEventSink eventSink = getStoreEventSink(); OnHeapStore store = newStore(timeSource, - Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); store.put("key", "value"); - assertThat(store.get("key").value(), equalTo("value")); + assertThat(store.get("key").get(), equalTo("value")); timeSource.advanceTime(1); RemoveStatus removed = store.remove("key", "value"); assertThat(removed, equalTo(RemoveStatus.KEY_MISSING)); @@ -425,8 +423,8 @@ public void testReplaceTwoArgPresent() throws Exception { store.put("key", "value"); ValueHolder existing = store.replace("key", "value2"); - assertThat(existing.value(), equalTo("value")); - assertThat(store.get("key").value(), equalTo("value2")); + assertThat(existing.get(), equalTo("value")); + assertThat(store.get("key").get(), equalTo("value2")); verify(eventSink).updated(eq("key"), argThat(holding("value")), eq("value2")); verifyListenerReleaseEventsInOrder(eventDispatcher); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.REPLACED)); @@ -447,7 +445,7 @@ public void testReplaceTwoArgExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); StoreEventSink eventSink = getStoreEventSink(); OnHeapStore store = newStore(timeSource, - Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); store.put("key", "value"); timeSource.advanceTime(1); @@ -468,7 +466,7 @@ public void testReplaceThreeArgMatch() throws Exception { ReplaceStatus replaced = store.replace("key", "value", "value2"); assertThat(replaced, equalTo(ReplaceStatus.HIT)); - assertThat(store.get("key").value(), equalTo("value2")); + assertThat(store.get("key").get(), equalTo("value2")); verify(eventSink).updated(eq("key"), argThat(holding("value")), eq("value2")); verifyListenerReleaseEventsInOrder(eventDispatcher); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED)); @@ -494,7 +492,7 @@ public void testReplaceThreeArgExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); StoreEventSink eventSink = getStoreEventSink(); OnHeapStore store = newStore(timeSource, - Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); store.put("key", "value"); timeSource.advanceTime(1); @@ -535,25 +533,22 @@ public void testIterator() throws Exception { public void testIteratorExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); OnHeapStore store = newStore(timeSource, - Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); store.put("key1", "value1"); store.put("key2", "value2"); store.put("key3", "value3"); timeSource.advanceTime(1); Map observed = observe(store.iterator()); - assertThat(3, equalTo(observed.size())); - assertThat(observed.get("key1"), equalTo("value1")); - assertThat(observed.get("key2"), equalTo("value2")); - assertThat(observed.get("key3"), equalTo("value3")); + assertThat(0, equalTo(observed.size())); - StatisticsTestUtils.validateStat(store, StoreOperationOutcomes.ExpirationOutcome.SUCCESS, 0L); + StatisticsTestUtils.validateStat(store, StoreOperationOutcomes.ExpirationOutcome.SUCCESS, 3L); } @Test public void testIteratorDoesNotUpdateAccessTime() throws Exception { TestTimeSource timeSource = new TestTimeSource(); - OnHeapStore store = newStore(timeSource, Expirations.noExpiration()); + OnHeapStore store = newStore(timeSource, ExpiryPolicyBuilder.noExpiration()); store.put("key1", "value1"); store.put("key2", "value2"); @@ -569,21 +564,21 @@ public void testIteratorDoesNotUpdateAccessTime() throws Exception { @Test public void testComputeReplaceTrue() throws Exception { TestTimeSource timeSource = new TestTimeSource(); - OnHeapStore store = newStore(timeSource, Expirations.noExpiration()); + OnHeapStore store = newStore(timeSource, ExpiryPolicyBuilder.noExpiration()); StoreEventSink eventSink = getStoreEventSink(); StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); ValueHolder installedHolder = store.get("key"); - long createTime = installedHolder.creationTime(TimeUnit.MILLISECONDS); - long accessTime = installedHolder.lastAccessTime(TimeUnit.MILLISECONDS); + long createTime = installedHolder.creationTime(); + long accessTime = installedHolder.lastAccessTime(); timeSource.advanceTime(1); - ValueHolder newValue = store.compute("key", (mappedKey, mappedValue) -> mappedValue, () -> true); + ValueHolder newValue = store.computeAndGet("key", (mappedKey, mappedValue) -> mappedValue, () -> true, () -> false); - assertThat(newValue.value(), equalTo("value")); - assertThat(createTime + 1, equalTo(newValue.creationTime(TimeUnit.MILLISECONDS))); - assertThat(accessTime + 1, equalTo(newValue.lastAccessTime(TimeUnit.MILLISECONDS))); + assertThat(newValue.get(), equalTo("value")); + assertThat(createTime + 1, equalTo(newValue.creationTime())); + assertThat(accessTime + 1, equalTo(newValue.lastAccessTime())); verify(eventSink).updated(eq("key"), argThat(holding("value")), eq("value")); verifyListenerReleaseEventsInOrder(eventDispatcher); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ComputeOutcome.PUT)); @@ -592,58 +587,58 @@ public void testComputeReplaceTrue() throws Exception { @Test public void testComputeReplaceFalse() throws Exception { TestTimeSource timeSource = new TestTimeSource(); - OnHeapStore store = newStore(timeSource, Expirations.noExpiration()); + OnHeapStore store = newStore(timeSource, ExpiryPolicyBuilder.noExpiration()); store.put("key", "value"); ValueHolder installedHolder = store.get("key"); - long createTime = installedHolder.creationTime(TimeUnit.MILLISECONDS); - long accessTime = installedHolder.lastAccessTime(TimeUnit.MILLISECONDS); + long createTime = installedHolder.creationTime(); + long accessTime = installedHolder.lastAccessTime(); timeSource.advanceTime(1); - ValueHolder newValue = store.compute("key", (mappedKey, mappedValue) -> mappedValue, () -> false); + ValueHolder newValue = store.computeAndGet("key", (mappedKey, mappedValue) -> mappedValue, () -> false, () -> false); - assertThat(newValue.value(), equalTo("value")); - assertThat(createTime, equalTo(newValue.creationTime(TimeUnit.MILLISECONDS))); - assertThat(accessTime + 1, equalTo(newValue.lastAccessTime(TimeUnit.MILLISECONDS))); + assertThat(newValue.get(), equalTo("value")); + assertThat(createTime, equalTo(newValue.creationTime())); + assertThat(accessTime + 1, equalTo(newValue.lastAccessTime())); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ComputeOutcome.HIT)); } @Test - public void testCompute() throws Exception { + public void testGetAndCompute() throws Exception { OnHeapStore store = newStore(); StoreEventSink eventSink = getStoreEventSink(); StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); - ValueHolder newValue = store.compute("key", (mappedKey, mappedValue) -> { + ValueHolder oldValue = store.getAndCompute("key", (mappedKey, mappedValue) -> { assertThat(mappedKey, equalTo("key")); assertThat(mappedValue, nullValue()); return "value"; }); - assertThat(newValue.value(), equalTo("value")); + assertThat(oldValue, nullValue()); verify(eventSink).created(eq("key"), eq("value")); verifyListenerReleaseEventsInOrder(eventDispatcher); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ComputeOutcome.PUT)); - assertThat(store.get("key").value(), equalTo("value")); + assertThat(store.get("key").get(), equalTo("value")); } @Test - public void testComputeNull() throws Exception { + public void testGetAndComputeNull() throws Exception { OnHeapStore store = newStore(); StoreEventSink eventSink = getStoreEventSink(); StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); - ValueHolder newValue = store.compute("key", (mappedKey, mappedValue) -> null); + ValueHolder oldValue = store.getAndCompute("key", (mappedKey, mappedValue) -> null); - assertThat(newValue, nullValue()); + assertThat(oldValue, nullValue()); assertThat(store.get("key"), nullValue()); StatisticsTestUtils.validateStat(store, StoreOperationOutcomes.ComputeOutcome.MISS, 1L); store.put("key", "value"); - newValue = store.compute("key", (mappedKey, mappedValue) -> null); + oldValue = store.getAndCompute("key", (mappedKey, mappedValue) -> null); - assertThat(newValue, nullValue()); + assertThat(oldValue.get(), equalTo("value")); assertThat(store.get("key"), nullValue()); verify(eventSink).removed(eq("key"), argThat(holding("value"))); verifyListenerReleaseEventsInOrder(eventDispatcher); @@ -656,53 +651,53 @@ public void testComputeException() throws Exception { store.put("key", "value"); try { - store.compute("key", (mappedKey, mappedValue) -> { + store.getAndCompute("key", (mappedKey, mappedValue) -> { throw RUNTIME_EXCEPTION; }); fail("RuntimeException expected"); } catch (StoreAccessException cae) { assertThat(cae.getCause(), is((Throwable)RUNTIME_EXCEPTION)); } - assertThat(store.get("key").value(), equalTo("value")); + assertThat(store.get("key").get(), equalTo("value")); } @Test - public void testComputeExistingValue() throws Exception { + public void testGetAndComputeExistingValue() throws Exception { OnHeapStore store = newStore(); StoreEventSink eventSink = getStoreEventSink(); StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); - ValueHolder newValue = store.compute("key", (mappedKey, mappedValue) -> { + ValueHolder oldValue = store.getAndCompute("key", (mappedKey, mappedValue) -> { assertThat(mappedKey, equalTo("key")); assertThat(mappedValue, equalTo("value")); return "value2"; }); - assertThat(newValue.value(), equalTo("value2")); + assertThat(oldValue.get(), equalTo("value")); verify(eventSink).updated(eq("key"), argThat(holding("value")), eq("value2")); verifyListenerReleaseEventsInOrder(eventDispatcher); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ComputeOutcome.PUT)); - assertThat(store.get("key").value(), equalTo("value2")); + assertThat(store.get("key").get(), equalTo("value2")); } @Test - public void testComputeExpired() throws Exception { + public void testGetAndComputeExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); StoreEventSink eventSink = getStoreEventSink(); OnHeapStore store = newStore(timeSource, - Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); store.put("key", "value"); timeSource.advanceTime(1); - ValueHolder newValue = store.compute("key", (mappedKey, mappedValue) -> { + ValueHolder oldValue = store.getAndCompute("key", (mappedKey, mappedValue) -> { assertThat(mappedKey, equalTo("key")); assertThat(mappedValue, nullValue()); return "value2"; }); - assertThat(newValue.value(), equalTo("value2")); - assertThat(store.get("key").value(), equalTo("value2")); + assertThat(oldValue, nullValue()); + assertThat(store.get("key").get(), equalTo("value2")); checkExpiryEvent(eventSink, "key", "value"); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ExpirationOutcome.SUCCESS)); } @@ -711,9 +706,9 @@ public void testComputeExpired() throws Exception { public void testComputeWhenExpireOnCreate() throws Exception { TestTimeSource timeSource = new TestTimeSource(); timeSource.advanceTime(1000L); - OnHeapStore store = newStore(timeSource, Expirations.builder().setCreate(Duration.ZERO).build()); + OnHeapStore store = newStore(timeSource, expiry().create(Duration.ZERO).build()); - ValueHolder result = store.compute("key", (key, value) -> "value", () -> false); + ValueHolder result = store.computeAndGet("key", (key, value) -> "value", () -> false, () -> false); assertThat(result, nullValue()); } @@ -721,10 +716,10 @@ public void testComputeWhenExpireOnCreate() throws Exception { public void testComputeWhenExpireOnUpdate() throws Exception { TestTimeSource timeSource = new TestTimeSource(); timeSource.advanceTime(1000L); - OnHeapStore store = newStore(timeSource, Expirations.builder().setUpdate(Duration.ZERO).build()); + OnHeapStore store = newStore(timeSource, expiry().update(Duration.ZERO).build()); store.put("key", "value"); - ValueHolder result = store.compute("key", (key, value) -> "newValue", () -> false); + ValueHolder result = store.computeAndGet("key", (key, value) -> "newValue", () -> false, () -> false); assertThat(result, valueHeld("newValue")); } @@ -732,10 +727,10 @@ public void testComputeWhenExpireOnUpdate() throws Exception { public void testComputeWhenExpireOnAccess() throws Exception { TestTimeSource timeSource = new TestTimeSource(); timeSource.advanceTime(1000L); - OnHeapStore store = newStore(timeSource, Expirations.builder().setAccess(Duration.ZERO).build()); + OnHeapStore store = newStore(timeSource, expiry().access(Duration.ZERO).build()); store.put("key", "value"); - ValueHolder result = store.compute("key", (key, value) -> value, () -> false); + ValueHolder result = store.computeAndGet("key", (key, value) -> value, () -> false, () -> false); assertThat(result, valueHeld("value")); } @@ -750,11 +745,11 @@ public void testComputeIfAbsent() throws Exception { return "value"; }); - assertThat(newValue.value(), equalTo("value")); + assertThat(newValue.get(), equalTo("value")); verify(eventSink).created(eq("key"), eq("value")); verifyListenerReleaseEventsInOrder(eventDispatcher); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ComputeIfAbsentOutcome.PUT)); - assertThat(store.get("key").value(), equalTo("value")); + assertThat(store.get("key").get(), equalTo("value")); } @Test @@ -767,9 +762,9 @@ public void testComputeIfAbsentExisting() throws Exception { return null; }); - assertThat(newValue.value(), equalTo("value")); + assertThat(newValue.get(), equalTo("value")); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ComputeIfAbsentOutcome.HIT)); - assertThat(store.get("key").value(), equalTo("value")); + assertThat(store.get("key").get(), equalTo("value")); } @Test @@ -804,7 +799,7 @@ public void testComputeIfAbsentException() throws Exception { public void testComputeIfAbsentExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); OnHeapStore store = newStore(timeSource, - Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); store.put("key", "value"); timeSource.advanceTime(1); @@ -814,8 +809,8 @@ public void testComputeIfAbsentExpired() throws Exception { return "value2"; }); - assertThat(newValue.value(), equalTo("value2")); - assertThat(store.get("key").value(), equalTo("value2")); + assertThat(newValue.get(), equalTo("value2")); + assertThat(store.get("key").get(), equalTo("value2")); final String value = "value"; verify(eventSink).expired(eq("key"), argThat(holding(value))); verify(eventSink).created(eq("key"), eq("value2")); @@ -826,7 +821,7 @@ public void testComputeIfAbsentExpired() throws Exception { @Test public void testExpiryCreateException() throws Exception { TestTimeSource timeSource = new TestTimeSource(); - OnHeapStore store = newStore(timeSource, new Expiry() { + OnHeapStore store = newStore(timeSource, new ExpiryPolicy() { @Override public Duration getExpiryForCreation(String key, String value) { @@ -834,12 +829,12 @@ public Duration getExpiryForCreation(String key, String value) { } @Override - public Duration getExpiryForAccess(String key, ValueSupplier value) { + public Duration getExpiryForAccess(String key, Supplier value) { throw new AssertionError(); } @Override - public Duration getExpiryForUpdate(String key, ValueSupplier oldValue, String newValue) { + public Duration getExpiryForUpdate(String key, Supplier oldValue, String newValue) { throw new AssertionError(); } }); @@ -852,20 +847,20 @@ public Duration getExpiryForUpdate(String key, ValueSupplier o public void testExpiryAccessExceptionReturnsValueAndExpiresIt() throws Exception { TestTimeSource timeSource = new TestTimeSource(); timeSource.advanceTime(5); - OnHeapStore store = newStore(timeSource, new Expiry() { + OnHeapStore store = newStore(timeSource, new ExpiryPolicy() { @Override public Duration getExpiryForCreation(String key, String value) { - return Duration.INFINITE; + return ExpiryPolicy.INFINITE; } @Override - public Duration getExpiryForAccess(String key, ValueSupplier value) { + public Duration getExpiryForAccess(String key, Supplier value) { throw RUNTIME_EXCEPTION; } @Override - public Duration getExpiryForUpdate(String key, ValueSupplier oldValue, String newValue) { + public Duration getExpiryForUpdate(String key, Supplier oldValue, String newValue) { return null; } }); @@ -878,23 +873,23 @@ public Duration getExpiryForUpdate(String key, ValueSupplier o @Test public void testExpiryUpdateException() throws Exception{ final TestTimeSource timeSource = new TestTimeSource(); - OnHeapStore store = newStore(timeSource, new Expiry() { + OnHeapStore store = newStore(timeSource, new ExpiryPolicy() { @Override public Duration getExpiryForCreation(String key, String value) { - return Duration.INFINITE; + return ExpiryPolicy.INFINITE; } @Override - public Duration getExpiryForAccess(String key, ValueSupplier value) { - return Duration.INFINITE; + public Duration getExpiryForAccess(String key, Supplier value) { + return ExpiryPolicy.INFINITE; } @Override - public Duration getExpiryForUpdate(String key, ValueSupplier oldValue, String newValue) { + public Duration getExpiryForUpdate(String key, Supplier oldValue, String newValue) { if (timeSource.getTimeMillis() > 0) { throw new RuntimeException(); } - return Duration.INFINITE; + return ExpiryPolicy.INFINITE; } }); @@ -910,7 +905,7 @@ public Duration getExpiryForUpdate(String key, ValueSupplier o public void testGetOrComputeIfAbsentExpiresOnHit() throws Exception { TestTimeSource timeSource = new TestTimeSource(); OnHeapStore store = newStore(timeSource, - Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); @SuppressWarnings("unchecked") CachingTier.InvalidationListener invalidationListener = mock(CachingTier.InvalidationListener.class); store.setInvalidationListener(invalidationListener); @@ -932,7 +927,7 @@ public void testGetOrComputeIfAbsentExpiresOnHit() throws Exception { public void testGetOfComputeIfAbsentExpiresWithLoaderWriter() throws Exception { TestTimeSource timeSource = new TestTimeSource(); OnHeapStore store = newStore(timeSource, - Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); @SuppressWarnings("unchecked") CachingTier.InvalidationListener invalidationListener = mock(CachingTier.InvalidationListener.class); store.setInvalidationListener(invalidationListener); @@ -946,8 +941,8 @@ public void testGetOfComputeIfAbsentExpiresWithLoaderWriter() throws Exception { @SuppressWarnings("unchecked") final ValueHolder vh = mock(ValueHolder.class); - when(vh.value()).thenReturn("newvalue"); - when(vh.expirationTime(TimeUnit.MILLISECONDS)).thenReturn(2L); + when(vh.get()).thenReturn("newvalue"); + when(vh.expirationTime()).thenReturn(2L); ValueHolder newValue = store.getOrComputeIfAbsent("key", s -> vh); @@ -1058,7 +1053,7 @@ public void testGetOrComputeIfAbsentContention() throws InterruptedException { try { ValueHolder result = store.getOrComputeIfAbsent("42", key -> new CopiedOnHeapValueHolder<>("theAnswer!", System .currentTimeMillis(), -1, false, new IdentityCopier<>())); - assertThat(result.value(), is("theAnswer!")); + assertThat(result.get(), is("theAnswer!")); endLatch.countDown(); } catch (Exception e) { e.printStackTrace(); @@ -1217,7 +1212,7 @@ public void testEvictionDoneUnderEvictedKeyLockScope() throws Exception { @Test(timeout = 2000L) public void testIteratorExpiryHappensUnderExpiredKeyLockScope() throws Exception { TestTimeSource testTimeSource = new TestTimeSource(); - final OnHeapStore store = newStore(testTimeSource, Expirations.timeToLiveExpiration(new Duration(10, TimeUnit.MILLISECONDS))); + final OnHeapStore store = newStore(testTimeSource, ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(10))); store.put("key", "value"); @@ -1247,12 +1242,12 @@ public void testIteratorExpiryHappensUnderExpiredKeyLockScope() throws Exception } @SuppressWarnings("unchecked") - private ValueSupplier anyValueSupplier() { - return any(ValueSupplier.class); + private Supplier anyValueSupplier() { + return any(Supplier.class); } - private void verifyListenerReleaseEventsInOrder(StoreEventDispatcher listener) { - StoreEventSink eventSink = getStoreEventSink(); + private void verifyListenerReleaseEventsInOrder(StoreEventDispatcher listener) { + StoreEventSink eventSink = getStoreEventSink(); InOrder inOrder = inOrder(listener); inOrder.verify(listener).eventSink(); @@ -1290,7 +1285,7 @@ private static Map observeAccessTimes(Iterator map = new HashMap<>(); while (iter.hasNext()) { Entry> entry = iter.next(); - map.put(entry.getKey(), entry.getValue().lastAccessTime(TimeUnit.MILLISECONDS)); + map.put(entry.getKey(), entry.getValue().lastAccessTime()); } return map; } @@ -1300,14 +1295,14 @@ private static Map observe(Iterator map = new HashMap<>(); while (iter.hasNext()) { Entry> entry = iter.next(); - map.put(entry.getKey(), entry.getValue().value()); + map.put(entry.getKey(), entry.getValue().get()); } return map; } private static void assertEntry(Entry> entry, String key, String value) { assertThat(entry.getKey(), equalTo(key)); - assertThat(entry.getValue().value(), equalTo(value)); + assertThat(entry.getValue().get(), equalTo(value)); } private static class TestTimeSource implements TimeSource { @@ -1326,28 +1321,28 @@ private void advanceTime(long delta) { @SuppressWarnings("unchecked") protected StoreEventSink getStoreEventSink() { - return eventSink; + return (StoreEventSink) eventSink; } @SuppressWarnings("unchecked") protected StoreEventDispatcher getStoreEventDispatcher() { - return eventDispatcher; + return (StoreEventDispatcher) eventDispatcher; } protected OnHeapStore newStore() { - return newStore(SystemTimeSource.INSTANCE, Expirations.noExpiration(), Eviction.noAdvice()); + return newStore(SystemTimeSource.INSTANCE, ExpiryPolicyBuilder.noExpiration(), Eviction.noAdvice()); } protected OnHeapStore newStore(EvictionAdvisor evictionAdvisor) { - return newStore(SystemTimeSource.INSTANCE, Expirations.noExpiration(), evictionAdvisor); + return newStore(SystemTimeSource.INSTANCE, ExpiryPolicyBuilder.noExpiration(), evictionAdvisor); } - protected OnHeapStore newStore(TimeSource timeSource, Expiry expiry) { + protected OnHeapStore newStore(TimeSource timeSource, ExpiryPolicy expiry) { return newStore(timeSource, expiry, Eviction.noAdvice()); } protected abstract void updateStoreCapacity(OnHeapStore store, int newCapacity); protected abstract OnHeapStore newStore(final TimeSource timeSource, - final Expiry expiry, final EvictionAdvisor evictionAdvisor); + final ExpiryPolicy expiry, final EvictionAdvisor evictionAdvisor); } diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java new file mode 100644 index 0000000000..14af067cb2 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java @@ -0,0 +1,143 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.heap; + +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.copy.IdentityCopier; +import org.ehcache.impl.internal.events.TestStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; +import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapValueHolder; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.internal.store.StoreFactory; +import org.ehcache.internal.store.StoreSPITest; +import org.ehcache.core.spi.ServiceLocator; +import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.service.ServiceConfiguration; +import org.junit.Before; +import org.terracotta.statistics.StatisticsManager; + +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; + +public class ByteSizedOnHeapStoreByRefSPITest extends StoreSPITest { + + private StoreFactory storeFactory; + private static final int MAGIC_NUM = 500; + + @Override + protected StoreFactory getStoreFactory() { + return storeFactory; + } + + @Before + public void setUp() { + storeFactory = new StoreFactory() { + + @Override + public Store newStore() { + return newStore(null, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); + } + + @Override + public Store newStoreWithCapacity(long capacity) { + return newStore(capacity, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); + } + + @Override + public Store newStoreWithExpiry(ExpiryPolicy expiry, TimeSource timeSource) { + return newStore(null, null, expiry, timeSource); + } + + @Override + public Store newStoreWithEvictionAdvisor(EvictionAdvisor evictionAdvisor) { + return newStore(null, evictionAdvisor, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); + } + + @SuppressWarnings("unchecked") + private Store newStore(Long capacity, EvictionAdvisor evictionAdvisor, ExpiryPolicy expiry, TimeSource timeSource) { + ResourcePools resourcePools = buildResourcePools(capacity); + Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), + evictionAdvisor, getClass().getClassLoader(), expiry, resourcePools, 0, null, null); + return new OnHeapStore<>(config, timeSource, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), + new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), new TestStoreEventDispatcher<>(), new DefaultStatisticsService()); + } + + @Override + public Store.ValueHolder newValueHolder(final String value) { + return new CopiedOnHeapValueHolder<>(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, IdentityCopier.identityCopier()); + } + + private ResourcePools buildResourcePools(Comparable capacityConstraint) { + if (capacityConstraint == null) { + return newResourcePoolsBuilder().heap(10, MemoryUnit.KB).build(); + } else { + return newResourcePoolsBuilder().heap((Long)capacityConstraint * MAGIC_NUM, MemoryUnit.B).build(); + } + } + + @Override + public Class getKeyType() { + return String.class; + } + + @Override + public Class getValueType() { + return String.class; + } + + @Override + public ServiceConfiguration[] getServiceConfigurations() { + return new ServiceConfiguration[0]; + } + + @Override + public String createKey(long seed) { + return "" + seed; + } + + @Override + public String createValue(long seed) { + return "" + seed; + } + + @Override + public void close(final Store store) { + OnHeapStore.Provider.close((OnHeapStore)store); + StatisticsManager.nodeFor(store).clean(); + } + + @Override + public ServiceLocator getServiceProvider() { + ServiceLocator locator = dependencySet().build(); + try { + locator.startAllServices(); + } catch (Exception e) { + throw new RuntimeException(e); + } + return locator; + } + }; + } + +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java similarity index 78% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java index 6002a12b5d..772e2ce7e4 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java @@ -18,10 +18,11 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; -import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; @@ -31,16 +32,17 @@ import org.ehcache.impl.serialization.JavaSerializer; import org.ehcache.internal.store.StoreFactory; import org.ehcache.internal.store.StoreSPITest; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.copy.Copier; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.service.ServiceConfiguration; import org.junit.Before; +import org.terracotta.statistics.StatisticsManager; import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; public class ByteSizedOnHeapStoreByValueSPITest extends StoreSPITest { @@ -62,31 +64,31 @@ public void setUp() { @Override public Store newStore() { - return newStore(null, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(null, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } @Override public Store newStoreWithCapacity(long capacity) { - return newStore(capacity, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(capacity, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } @Override - public Store newStoreWithExpiry(Expiry expiry, TimeSource timeSource) { + public Store newStoreWithExpiry(ExpiryPolicy expiry, TimeSource timeSource) { return newStore(null, null, expiry, timeSource); } @Override public Store newStoreWithEvictionAdvisor(EvictionAdvisor evictionAdvisor) { - return newStore(null, evictionAdvisor, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(null, evictionAdvisor, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } - private Store newStore(Long capacity, EvictionAdvisor evictionAdvisor, Expiry expiry, TimeSource timeSource) { + private Store newStore(Long capacity, EvictionAdvisor evictionAdvisor, ExpiryPolicy expiry, TimeSource timeSource) { ResourcePools resourcePools = buildResourcePools(capacity); Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), evictionAdvisor, getClass().getClassLoader(), expiry, resourcePools, 0, new JavaSerializer<>(getSystemClassLoader()), new JavaSerializer<>(getSystemClassLoader())); return new OnHeapStore<>(config, timeSource, defaultCopier, defaultCopier, - new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), new TestStoreEventDispatcher<>()); + new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), new TestStoreEventDispatcher<>(), new DefaultStatisticsService()); } @Override @@ -113,8 +115,8 @@ public Class getValueType() { } @Override - public ServiceConfiguration[] getServiceConfigurations() { - return new ServiceConfiguration[0]; + public ServiceConfiguration[] getServiceConfigurations() { + return new ServiceConfiguration[0]; } @Override @@ -130,6 +132,7 @@ public String createValue(long seed) { @Override public void close(final Store store) { OnHeapStore.Provider.close((OnHeapStore)store); + StatisticsManager.nodeFor(store).clean(); } @Override @@ -145,8 +148,9 @@ public ServiceLocator getServiceProvider() { }; } - public static void closeStore(OnHeapStore store) { + public static void closeStore(OnHeapStore store) { OnHeapStore.Provider.close(store); + StatisticsManager.nodeFor(store).clean(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java similarity index 81% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java index d58618eb4e..4a97fddd52 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java @@ -21,20 +21,20 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.units.EntryUnit; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.events.StoreEventDispatcher; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.serialization.Serializer; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; public class CountSizedOnHeapStoreByRefTest extends OnHeapStoreByRefTest { - private static final Copier DEFAULT_COPIER = new IdentityCopier(); - @Override protected void updateStoreCapacity(OnHeapStore store, int newCapacity) { CacheConfigurationChangeListener listener = store.getConfigurationChangeListeners().get(0); @@ -46,17 +46,15 @@ protected void updateStoreCapacity(OnHeapStore store, int newCapacity) { @Override @SuppressWarnings("unchecked") protected OnHeapStore newStore(final TimeSource timeSource, - final Expiry expiry, + final ExpiryPolicy expiry, final EvictionAdvisor evictionAdvisor, final int capacity) { - return new OnHeapStore(new Store.Configuration() { - @SuppressWarnings("unchecked") + return new OnHeapStore<>(new Store.Configuration() { @Override public Class getKeyType() { return (Class) String.class; } - @SuppressWarnings("unchecked") @Override public Class getValueType() { return (Class) String.class; @@ -73,7 +71,7 @@ public ClassLoader getClassLoader() { } @Override - public Expiry getExpiry() { + public ExpiryPolicy getExpiry() { return expiry; } @@ -96,7 +94,12 @@ public Serializer getValueSerializer() { public int getDispatcherConcurrency() { return 0; } - }, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, new NoopSizeOfEngine(), eventDispatcher); + + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } + }, timeSource, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), new NoopSizeOfEngine(), (StoreEventDispatcher) eventDispatcher, new DefaultStatisticsService()); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java similarity index 88% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java index 43e2542eb4..b3cfaca03f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java @@ -22,12 +22,14 @@ import org.ehcache.config.ResourcePools; import org.ehcache.config.units.EntryUnit; import org.ehcache.core.events.StoreEventDispatcher; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.store.Store; import org.ehcache.impl.serialization.JavaSerializer; import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.serialization.Serializer; import java.io.Serializable; @@ -46,7 +48,7 @@ protected void updateStoreCapacity(OnHeapStore store, int newCapacity) { @Override protected OnHeapStore newStore(final TimeSource timeSource, - final Expiry expiry, + final ExpiryPolicy expiry, final EvictionAdvisor evictionAdvisor, final Copier keyCopier, final Copier valueCopier, final int capacity) { StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); return new OnHeapStore<>(new Store.Configuration() { @@ -74,7 +76,7 @@ public ClassLoader getClassLoader() { } @Override - public Expiry getExpiry() { + public ExpiryPolicy getExpiry() { return expiry; } @@ -97,7 +99,12 @@ public Serializer getValueSerializer() { public int getDispatcherConcurrency() { return 0; } - }, timeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } + }, timeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher, new DefaultStatisticsService()); } diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java new file mode 100644 index 0000000000..b065a01694 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java @@ -0,0 +1,332 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.heap; + +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.impl.copy.IdentityCopier; +import org.ehcache.core.events.NullStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.core.spi.store.Store; +import org.hamcrest.Matchers; +import org.junit.Test; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; + +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * @author Ludovic Orban + */ +public class OnHeapStoreBulkMethodsTest { + + @SuppressWarnings("unchecked") + protected Store.Configuration mockStoreConfig() { + @SuppressWarnings("rawtypes") + Store.Configuration config = mock(Store.Configuration.class); + when(config.getExpiry()).thenReturn(ExpiryPolicyBuilder.noExpiration()); + when(config.getKeyType()).thenReturn(Number.class); + when(config.getValueType()).thenReturn(CharSequence.class); + when(config.getResourcePools()).thenReturn(newResourcePoolsBuilder().heap(Long.MAX_VALUE, EntryUnit.ENTRIES).build()); + return config; + } + + @SuppressWarnings("unchecked") + protected OnHeapStore newStore() { + Store.Configuration configuration = mockStoreConfig(); + return new OnHeapStore<>(configuration, SystemTimeSource.INSTANCE, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), + new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); + } + + @Test + @SuppressWarnings("unchecked") + public void testBulkComputeFunctionGetsValuesOfEntries() throws Exception { + @SuppressWarnings("rawtypes") + Store.Configuration config = mock(Store.Configuration.class); + when(config.getExpiry()).thenReturn(ExpiryPolicyBuilder.noExpiration()); + when(config.getKeyType()).thenReturn(Number.class); + when(config.getValueType()).thenReturn(Number.class); + when(config.getResourcePools()).thenReturn(newResourcePoolsBuilder().heap(Long.MAX_VALUE, EntryUnit.ENTRIES).build()); + + OnHeapStore store = new OnHeapStore<>(config, SystemTimeSource.INSTANCE, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), + new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); + store.put(1, 2); + store.put(2, 3); + store.put(3, 4); + + Map> result = store.bulkCompute(new HashSet(Arrays.asList(1, 2, 3, 4, 5, 6)), entries -> { + Map newValues = new HashMap<>(); + for (Map.Entry entry : entries) { + final Number currentValue = entry.getValue(); + if(currentValue == null) { + if(entry.getKey().equals(4)) { + newValues.put(entry.getKey(), null); + } else { + newValues.put(entry.getKey(), 0); + } + } else { + newValues.put(entry.getKey(), currentValue.intValue() * 2); + } + + } + return newValues.entrySet(); + }); + + ConcurrentMap check = new ConcurrentHashMap<>(); + check.put(1, 4); + check.put(2, 6); + check.put(3, 8); + check.put(4, 0); + check.put(5, 0); + check.put(6, 0); + + assertThat(result.get(1).get(), Matchers.is(check.get(1))); + assertThat(result.get(2).get(), Matchers.is(check.get(2))); + assertThat(result.get(3).get(), Matchers.is(check.get(3))); + assertThat(result.get(4), nullValue()); + assertThat(result.get(5).get(), Matchers.is(check.get(5))); + assertThat(result.get(6).get(), Matchers.is(check.get(6))); + + for (Number key : check.keySet()) { + final Store.ValueHolder holder = store.get(key); + if(holder != null) { + check.remove(key, holder.get()); + } + } + assertThat(check.size(), is(1)); + assertThat(check.containsKey(4), is(true)); + + } + + @Test + public void testBulkComputeHappyPath() throws Exception { + OnHeapStore store = newStore(); + store.put(1, "one"); + + Map> result = store.bulkCompute(new HashSet(Arrays.asList(1, 2)), entries -> { + Map newValues = new HashMap<>(); + for (Map.Entry entry : entries) { + if(entry.getKey().intValue() == 1) { + newValues.put(entry.getKey(), "un"); + } else if (entry.getKey().intValue() == 2) { + newValues.put(entry.getKey(), "deux"); + } + } + return newValues.entrySet(); + }); + + assertThat(result.size(), is(2)); + assertThat(result.get(1).get(), Matchers.equalTo("un")); + assertThat(result.get(2).get(), Matchers.equalTo("deux")); + + assertThat(store.get(1).get(), Matchers.equalTo("un")); + assertThat(store.get(2).get(), Matchers.equalTo("deux")); + } + + @Test + public void testBulkComputeStoreRemovesValueWhenFunctionReturnsNullMappings() throws Exception { + Store.Configuration configuration = mockStoreConfig(); + + @SuppressWarnings("unchecked") + OnHeapStore store = new OnHeapStore<>(configuration, SystemTimeSource.INSTANCE, IdentityCopier.identityCopier(), + IdentityCopier.identityCopier(), new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); + store.put(1, "one"); + store.put(2, "two"); + store.put(3, "three"); + + Map> result = store.bulkCompute(new HashSet(Arrays.asList(2, 1, 5)), entries -> { + Map newValues = new HashMap<>(); + for (Map.Entry entry : entries) { + newValues.put(entry.getKey(), null); + } + return newValues.entrySet(); + }); + + assertThat(result.size(), is(3)); + + assertThat(store.get(1), is(nullValue())); + assertThat(store.get(2), is(nullValue())); + assertThat(store.get(3).get(), Matchers.equalTo("three")); + assertThat(store.get(5), is(nullValue())); + } + + @Test + public void testBulkComputeRemoveNullValueEntriesFromFunctionReturn() throws Exception { + + OnHeapStore store = newStore(); + store.put(1, "one"); + store.put(2, "two"); + store.put(3, "three"); + + Map> result = store.bulkCompute(new HashSet(Arrays.asList(1, 2, 3)), entries -> { + Map result1 = new HashMap<>(); + for (Map.Entry entry : entries) { + if (entry.getKey().equals(1)) { + result1.put(entry.getKey(), null); + } else if (entry.getKey().equals(3)) { + result1.put(entry.getKey(), null); + } else { + result1.put(entry.getKey(), entry.getValue()); + } + } + return result1.entrySet(); + }); + + assertThat(result.size(), is(3)); + assertThat(result.get(1), is(nullValue())); + assertThat(result.get(2).get(), Matchers.equalTo("two")); + assertThat(result.get(3), is(nullValue())); + + assertThat(store.get(1),is(nullValue())); + assertThat(store.get(2).get(), Matchers.equalTo("two")); + assertThat(store.get(3),is(nullValue())); + + } + + @Test + public void testBulkComputeIfAbsentFunctionDoesNotGetPresentKeys() throws Exception { + + OnHeapStore store = newStore(); + store.put(1, "one"); + store.put(2, "two"); + store.put(3, "three"); + + Map> result = store.bulkComputeIfAbsent(new HashSet(Arrays.asList(1, 2, 3, 4, 5, 6)), keys -> { + Map result1 = new HashMap<>(); + + for (Number key : keys) { + if (key.equals(1)) { + fail(); + } else if (key.equals(2)) { + fail(); + } else if (key.equals(3)) { + fail(); + } else { + result1.put(key, null); + } + } + return result1.entrySet(); + }); + + assertThat(result.size(), is(6)); + assertThat(result.get(1).get(), Matchers.equalTo("one")); + assertThat(result.get(2).get(), Matchers.equalTo("two")); + assertThat(result.get(3).get(), Matchers.equalTo("three")); + assertThat(result.get(4), is(nullValue())); + assertThat(result.get(5), is(nullValue())); + assertThat(result.get(6), is(nullValue())); + + assertThat(store.get(1).get(), Matchers.equalTo("one")); + assertThat(store.get(2).get(), Matchers.equalTo("two")); + assertThat(store.get(3).get(), Matchers.equalTo("three")); + assertThat(store.get(4), is(nullValue())); + assertThat(store.get(5), is(nullValue())); + assertThat(store.get(6), is(nullValue())); + + + } + + @Test + public void testBulkComputeIfAbsentDoesNotOverridePresentKeys() throws Exception { + + OnHeapStore store = newStore(); + store.put(1, "one"); + store.put(2, "two"); + store.put(3, "three"); + + Map> result = store.bulkComputeIfAbsent(new HashSet(Arrays.asList(1, 2, 3, 4, 5, 6)), numbers -> { + Map result1 = new HashMap<>(); + for (Number key : numbers) { + if(key.equals(4)) { + result1.put(key, "quatre"); + } else if(key.equals(5)) { + result1.put(key, "cinq"); + } else if(key.equals(6)) { + result1.put(key, "six"); + } + } + return result1.entrySet(); + }); + + assertThat(result.size(), is(6)); + assertThat(result.get(1).get(), Matchers.equalTo("one")); + assertThat(result.get(2).get(), Matchers.equalTo("two")); + assertThat(result.get(3).get(), Matchers.equalTo("three")); + assertThat(result.get(4).get(), Matchers.equalTo("quatre")); + assertThat(result.get(5).get(), Matchers.equalTo("cinq")); + assertThat(result.get(6).get(), Matchers.equalTo("six")); + + assertThat(store.get(1).get(), Matchers.equalTo("one")); + assertThat(store.get(2).get(), Matchers.equalTo("two")); + assertThat(store.get(3).get(), Matchers.equalTo("three")); + assertThat(store.get(4).get(), Matchers.equalTo("quatre")); + assertThat(store.get(5).get(), Matchers.equalTo("cinq")); + assertThat(store.get(6).get(), Matchers.equalTo("six")); + } + + @Test + public void testBulkComputeIfAbsentDoNothingOnNullValues() throws Exception { + + OnHeapStore store = newStore(); + store.put(1, "one"); + store.put(2, "two"); + store.put(3, "three"); + + Map> result = store.bulkComputeIfAbsent(new HashSet(Arrays.asList(2, 1, 5)), numbers -> { + Map result1 = new HashMap<>(); + for (Number key : numbers) { + // 5 is a missing key, so it's the only key that is going passed to the function + if(key.equals(5)) { + result1.put(key, null); + } + } + Set numbersSet = new HashSet<>(); + for (Number number : numbers) { + numbersSet.add(number); + } + assertThat(numbersSet.size(), is(1)); + assertThat(numbersSet.iterator().next(), Matchers.equalTo(5)); + + return result1.entrySet(); + }); + + assertThat(result.size(), is(3)); + assertThat(result.get(2).get(), Matchers.equalTo("two")); + assertThat(result.get(1).get(), Matchers.equalTo("one")); + assertThat(result.get(5), is(nullValue())); + + assertThat(store.get(1).get(), Matchers.equalTo("one")); + assertThat(store.get(2).get(), Matchers.equalTo("two")); + assertThat(store.get(3).get(), Matchers.equalTo("three")); + assertThat(store.get(5), is(nullValue())); + } + +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java new file mode 100755 index 0000000000..dc7f61bea3 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java @@ -0,0 +1,150 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.heap; + +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.copy.IdentityCopier; +import org.ehcache.impl.internal.events.TestStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; +import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapValueHolder; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.internal.store.StoreFactory; +import org.ehcache.internal.store.StoreSPITest; +import org.ehcache.core.spi.ServiceLocator; +import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.service.ServiceConfiguration; +import org.junit.Before; +import org.terracotta.statistics.StatisticsManager; + +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; + +/** + * Test the {@link org.ehcache.internal.store.heap.OnHeapStore} compliance to the + * {@link Store} contract. + * + * @author Aurelien Broszniowski + */ + +public class OnHeapStoreByRefSPITest extends StoreSPITest { + + private StoreFactory storeFactory; + + @Override + protected StoreFactory getStoreFactory() { + return storeFactory; + } + + @Before + public void setUp() { + storeFactory = new StoreFactory() { + + @Override + public Store newStore() { + return newStore(null, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); + } + + @Override + public Store newStoreWithCapacity(long capacity) { + return newStore(capacity, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); + } + + @Override + public Store newStoreWithExpiry(ExpiryPolicy expiry, TimeSource timeSource) { + return newStore(null, null, expiry, timeSource); + } + + @Override + public Store newStoreWithEvictionAdvisor(EvictionAdvisor evictionAdvisor) { + return newStore(null, evictionAdvisor, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); + } + + @SuppressWarnings("unchecked") + private Store newStore(Long capacity, EvictionAdvisor evictionAdvisor, ExpiryPolicy expiry, TimeSource timeSource) { + ResourcePools resourcePools = buildResourcePools(capacity); + Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), + evictionAdvisor, getClass().getClassLoader(), expiry, resourcePools, 0, null, null); + return new OnHeapStore<>(config, timeSource, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), + new NoopSizeOfEngine(), new TestStoreEventDispatcher<>(), new DefaultStatisticsService()); + } + + @Override + @SuppressWarnings("unchecked") + public Store.ValueHolder newValueHolder(final String value) { + return new CopiedOnHeapValueHolder<>(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, IdentityCopier.identityCopier()); + } + + private ResourcePools buildResourcePools(Comparable capacityConstraint) { + if (capacityConstraint == null) { + return newResourcePoolsBuilder().heap(Long.MAX_VALUE, EntryUnit.ENTRIES).build(); + } else { + return newResourcePoolsBuilder().heap((Long)capacityConstraint, EntryUnit.ENTRIES).build(); + } + } + + @Override + public Class getKeyType() { + return String.class; + } + + @Override + public Class getValueType() { + return String.class; + } + + @Override + public ServiceConfiguration[] getServiceConfigurations() { + return new ServiceConfiguration[0]; + } + + @Override + public String createKey(long seed) { + return "" + seed; + } + + @Override + public String createValue(long seed) { + return "" + seed; + } + + @Override + public void close(final Store store) { + OnHeapStore.Provider.close((OnHeapStore)store); + StatisticsManager.nodeFor(store).clean(); + } + + @Override + public ServiceLocator getServiceProvider() { + ServiceLocator locator = dependencySet().build(); + try { + locator.startAllServices(); + } catch (Exception e) { + throw new RuntimeException(e); + } + return locator; + } + }; + } + +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefTest.java similarity index 89% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefTest.java index 02e43f6be2..abe0f0960f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefTest.java @@ -16,20 +16,20 @@ package org.ehcache.impl.internal.store.heap; import org.ehcache.config.EvictionAdvisor; -import org.ehcache.expiry.Expiry; import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.expiry.ExpiryPolicy; public abstract class OnHeapStoreByRefTest extends BaseOnHeapStoreTest { @Override protected OnHeapStore newStore(final TimeSource timeSource, - final Expiry expiry, + final ExpiryPolicy expiry, final EvictionAdvisor evictionAdvisor) { return newStore(timeSource, expiry, evictionAdvisor, 100); } protected abstract OnHeapStore newStore(TimeSource timeSource, - Expiry expiry, + ExpiryPolicy expiry, EvictionAdvisor evictionAdvisor, int capacity); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java similarity index 78% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java index 5b7a6bb4b6..96b8443734 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java @@ -18,10 +18,11 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; -import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.units.EntryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; @@ -31,16 +32,17 @@ import org.ehcache.impl.serialization.JavaSerializer; import org.ehcache.internal.store.StoreFactory; import org.ehcache.internal.store.StoreSPITest; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.copy.Copier; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.service.ServiceConfiguration; import org.junit.Before; +import org.terracotta.statistics.StatisticsManager; import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; /** * Test the {@link OnHeapStore} compliance to the @@ -68,30 +70,30 @@ public void setUp() { @Override public Store newStore() { - return newStore(null, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(null, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } @Override public Store newStoreWithCapacity(long capacity) { - return newStore(capacity, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(capacity, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } @Override - public Store newStoreWithExpiry(Expiry expiry, TimeSource timeSource) { + public Store newStoreWithExpiry(ExpiryPolicy expiry, TimeSource timeSource) { return newStore(null, null, expiry, timeSource); } @Override public Store newStoreWithEvictionAdvisor(EvictionAdvisor evictionAdvisor) { - return newStore(null, evictionAdvisor, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(null, evictionAdvisor, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } - private Store newStore(Long capacity, EvictionAdvisor evictionAdvisor, Expiry expiry, TimeSource timeSource) { + private Store newStore(Long capacity, EvictionAdvisor evictionAdvisor, ExpiryPolicy expiry, TimeSource timeSource) { ResourcePools resourcePools = buildResourcePools(capacity); Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), evictionAdvisor, getClass().getClassLoader(), expiry, resourcePools, 0, new JavaSerializer<>(getSystemClassLoader()), new JavaSerializer<>(getSystemClassLoader())); - return new OnHeapStore<>(config, timeSource, defaultCopier, defaultCopier, new NoopSizeOfEngine(), new TestStoreEventDispatcher<>()); + return new OnHeapStore<>(config, timeSource, defaultCopier, defaultCopier, new NoopSizeOfEngine(), new TestStoreEventDispatcher<>(), new DefaultStatisticsService()); } @Override @@ -118,8 +120,8 @@ public Class getValueType() { } @Override - public ServiceConfiguration[] getServiceConfigurations() { - return new ServiceConfiguration[0]; + public ServiceConfiguration[] getServiceConfigurations() { + return new ServiceConfiguration[0]; } @Override @@ -135,6 +137,7 @@ public String createValue(long seed) { @Override public void close(final Store store) { OnHeapStore.Provider.close((OnHeapStore)store); + StatisticsManager.nodeFor(store).clean(); } @Override @@ -150,8 +153,9 @@ public ServiceLocator getServiceProvider() { }; } - public static void closeStore(OnHeapStore store) { + public static void closeStore(OnHeapStore store) { OnHeapStore.Provider.close(store); + StatisticsManager.nodeFor(store).clean(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java similarity index 89% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java index b41bc112b4..73f12cc31f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java @@ -21,11 +21,11 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.spi.serialization.SerializerException; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.core.spi.time.SystemTimeSource; import org.ehcache.core.spi.time.TimeSource; @@ -39,8 +39,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.hamcrest.CoreMatchers.instanceOf; @@ -71,21 +69,16 @@ public Long copyForWrite(Long obj) { @Test public void testKeyCopierCalledOnGetOrComputeIfAbsent() throws Exception { LongCopier keyCopier = new LongCopier(); - OnHeapStore store = newStore(SystemTimeSource.INSTANCE, Expirations.noExpiration(), Eviction.noAdvice(), + OnHeapStore store = newStore(SystemTimeSource.INSTANCE, ExpiryPolicyBuilder.noExpiration(), Eviction.noAdvice(), keyCopier, new SerializingCopier<>(new JavaSerializer<>(ClassLoader.getSystemClassLoader())), 100); ValueHolder computed = store.getOrComputeIfAbsent(1L, key -> new AbstractValueHolder(-1, -1) { @Override - public Long value() { + public Long get() { return key * 1000L; } - - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } }); - assertThat(computed.value(), is(1000L)); + assertThat(computed.get(), is(1000L)); assertThat(keyCopier.copyForWriteCount, is(1)); assertThat(keyCopier.copyForReadCount, is(0)); } @@ -126,7 +119,7 @@ public void testValueUniqueObject() throws Exception { value.clear(); ValueHolder valueHolder = store.get(key); - if (valueHolder.value() == value || ! valueHolder.value().equals(Collections.singletonList("value"))) { + if (valueHolder.get() == value || ! valueHolder.get().equals(Collections.singletonList("value"))) { throw new AssertionError(); } } @@ -164,7 +157,7 @@ public void testStoreByValue() { final Cache cache2 = cacheManager.createCache("cache2", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(1)) - .add(copierConfiguration) + .withService(copierConfiguration) .build()); performAssertions(cache2, false); @@ -177,7 +170,7 @@ public void testStoreByValue() { } @Override - protected OnHeapStore newStore(TimeSource timeSource, Expiry expiry, + protected OnHeapStore newStore(TimeSource timeSource, ExpiryPolicy expiry, EvictionAdvisor evictionAdvisor) { Copier keyCopier = new SerializingCopier<>(new JavaSerializer<>(getClass().getClassLoader())); Copier valueCopier = new SerializingCopier<>(new JavaSerializer<>(getClass().getClassLoader())); @@ -185,7 +178,7 @@ protected OnHeapStore newStore(TimeSource timeSource, Expiry OnHeapStore newStore(TimeSource timeSource, - Expiry expiry, EvictionAdvisor evictionAdvisor, + ExpiryPolicy expiry, EvictionAdvisor evictionAdvisor, Copier keyCopier, Copier valueCopier, int capacity); private void performAssertions(Cache cache, boolean same) { diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java new file mode 100644 index 0000000000..765cd2291d --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java @@ -0,0 +1,133 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.heap; + +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.impl.copy.IdentityCopier; +import org.ehcache.core.events.NullStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; +import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapValueHolder; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.internal.tier.CachingTierFactory; +import org.ehcache.internal.tier.CachingTierSPITest; +import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.tiering.CachingTier; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.junit.Before; + +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; + +/** + * This factory instantiates a CachingTier + * + * @author Aurelien Broszniowski + */ +public class OnHeapStoreCachingTierByRefSPITest extends CachingTierSPITest { + + private CachingTierFactory cachingTierFactory; + + @Override + protected CachingTierFactory getCachingTierFactory() { + return cachingTierFactory; + } + + @Before + @SuppressWarnings("unchecked") + public void setUp() { + cachingTierFactory = new CachingTierFactory() { + + @Override + public CachingTier newCachingTier() { + return newCachingTier(null); + } + + @Override + public CachingTier newCachingTier(long capacity) { + return newCachingTier((Long) capacity); + } + + private CachingTier newCachingTier(Long capacity) { + Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), null, + ClassLoader.getSystemClassLoader(), ExpiryPolicyBuilder.noExpiration(), buildResourcePools(capacity), 0, null, null); + + return new OnHeapStore<>(config, SystemTimeSource.INSTANCE, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), + new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); + } + + @Override + public Store.ValueHolder newValueHolder(final String value) { + return new CopiedOnHeapValueHolder<>(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, IdentityCopier.identityCopier()); + } + + @Override + public Store.Provider newProvider() { + return new OnHeapStore.Provider(); + } + + private ResourcePools buildResourcePools(Comparable capacityConstraint) { + if (capacityConstraint == null) { + return newResourcePoolsBuilder().heap(Long.MAX_VALUE, EntryUnit.ENTRIES).build(); + } else { + return newResourcePoolsBuilder().heap((Long)capacityConstraint, EntryUnit.ENTRIES).build(); + } + } + + @Override + public Class getKeyType() { + return String.class; + } + + @Override + public Class getValueType() { + return String.class; + } + + @Override + public ServiceConfiguration[] getServiceConfigurations() { + return new ServiceConfiguration[0]; + } + + @Override + public String createKey(long seed) { + return "" + seed; + } + + @Override + public String createValue(long seed) { + return "" + seed; + } + + @Override + public void disposeOf(CachingTier tier) { + } + + @Override + public ServiceProvider getServiceProvider() { + return dependencySet().build(); + } + + }; + } + +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java new file mode 100644 index 0000000000..fd1dba84e4 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java @@ -0,0 +1,142 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.heap; + +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.impl.copy.SerializingCopier; +import org.ehcache.core.events.NullStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; +import org.ehcache.impl.internal.store.heap.holders.SerializedOnHeapValueHolder; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.impl.serialization.JavaSerializer; +import org.ehcache.internal.tier.CachingTierFactory; +import org.ehcache.internal.tier.CachingTierSPITest; +import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.tiering.CachingTier; +import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.junit.Before; + +import static java.lang.ClassLoader.getSystemClassLoader; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; + +/** + * This factory instantiates a CachingTier + * + * @author Aurelien Broszniowski + */ +public class OnHeapStoreCachingTierByValueSPITest extends CachingTierSPITest { + + private CachingTierFactory cachingTierFactory; + + @Override + protected CachingTierFactory getCachingTierFactory() { + return cachingTierFactory; + } + + @Before + public void setUp() { + cachingTierFactory = new CachingTierFactory() { + + final Serializer defaultSerializer = new JavaSerializer<>(getClass().getClassLoader()); + final Copier defaultCopier = new SerializingCopier<>(defaultSerializer); + + @Override + public CachingTier newCachingTier() { + return newCachingTier(null); + } + + @Override + public CachingTier newCachingTier(long capacity) { + return newCachingTier((Long) capacity); + } + + private CachingTier newCachingTier(Long capacity) { + Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), null, + ClassLoader.getSystemClassLoader(), ExpiryPolicyBuilder.noExpiration(), buildResourcePools(capacity), 0, + new JavaSerializer<>(getSystemClassLoader()), new JavaSerializer<>(getSystemClassLoader())); + + return new OnHeapStore<>(config, SystemTimeSource.INSTANCE, defaultCopier, defaultCopier, new NoopSizeOfEngine(), + NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); + } + + @Override + public Store.ValueHolder newValueHolder(final String value) { + return new SerializedOnHeapValueHolder<>(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, defaultSerializer); + } + + @Override + public Store.Provider newProvider() { + Store.Provider service = new OnHeapStore.Provider(); + service.start(dependencySet().build()); + return service; + } + + private ResourcePools buildResourcePools(Comparable capacityConstraint) { + if (capacityConstraint == null) { + return newResourcePoolsBuilder().heap(Long.MAX_VALUE, EntryUnit.ENTRIES).build(); + } else { + return newResourcePoolsBuilder().heap((Long)capacityConstraint, EntryUnit.ENTRIES).build(); + } + } + + @Override + public Class getKeyType() { + return String.class; + } + + @Override + public Class getValueType() { + return String.class; + } + + @Override + public ServiceConfiguration[] getServiceConfigurations() { + return new ServiceConfiguration[0]; + } + + @Override + public String createKey(long seed) { + return new String("" + seed); + } + + @Override + public String createValue(long seed) { + return new String("" + seed); + } + + @Override + public void disposeOf(CachingTier tier) { + } + + @Override + public ServiceProvider getServiceProvider() { + return dependencySet().build(); + } + + }; + } + +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java new file mode 100644 index 0000000000..3e951670ba --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java @@ -0,0 +1,209 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.heap; + +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.event.EventType; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.copy.IdentityCopier; +import org.ehcache.core.events.NullStoreEventDispatcher; +import org.ehcache.impl.internal.events.TestStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; +import org.ehcache.impl.internal.store.heap.holders.OnHeapValueHolder; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.core.spi.store.Store; +import org.ehcache.internal.TestTimeSource; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.core.spi.store.heap.SizeOfEngine; +import org.junit.Test; + +import java.io.Serializable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Semaphore; + +import static org.ehcache.config.Eviction.noAdvice; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +public class OnHeapStoreEvictionTest { + + protected OnHeapStoreForTests newStore() { + return newStore(SystemTimeSource.INSTANCE, null); + } + + /** eviction tests : asserting the evict method is called **/ + + @Test + public void testComputeCalledEnforceCapacity() throws Exception { + OnHeapStoreForTests store = newStore(); + + store.put("key", "value"); + store.getAndCompute("key", (mappedKey, mappedValue) -> "value2"); + + assertThat(store.enforceCapacityWasCalled(), is(true)); + } + + @Test + public void testComputeIfAbsentCalledEnforceCapacity() throws Exception { + OnHeapStoreForTests store = newStore(); + + store.computeIfAbsent("key", mappedKey -> "value2"); + + assertThat(store.enforceCapacityWasCalled(), is(true)); + } + + @Test + public void testFaultsDoNotGetToEvictionAdvisor() throws StoreAccessException { + final Semaphore semaphore = new Semaphore(0); + + final OnHeapStoreForTests store = newStore(SystemTimeSource.INSTANCE, noAdvice()); + + ExecutorService executor = Executors.newCachedThreadPool(); + try { + executor.submit(() -> store.getOrComputeIfAbsent("prime", key -> { + semaphore.acquireUninterruptibly(); + return new OnHeapValueHolder(0, 0, false) { + @Override + public String get() { + return key; + } + }; + })); + + while (!semaphore.hasQueuedThreads()); + store.put("boom", "boom"); + } finally { + semaphore.release(1); + executor.shutdown(); + } + } + + @Test + public void testEvictionCandidateLimits() throws Exception { + TestTimeSource timeSource = new TestTimeSource(); + StoreConfigurationImpl configuration = new StoreConfigurationImpl<>( + String.class, String.class, noAdvice(), + getClass().getClassLoader(), ExpiryPolicyBuilder.noExpiration(), heap(1).build(), 1, null, null); + TestStoreEventDispatcher eventDispatcher = new TestStoreEventDispatcher<>(); + final String firstKey = "daFirst"; + eventDispatcher.addEventListener(event -> { + if (event.getType().equals(EventType.EVICTED)) { + assertThat(event.getKey(), is(firstKey)); + } + }); + OnHeapStore store = new OnHeapStore<>(configuration, timeSource, + new IdentityCopier<>(), new IdentityCopier<>(), new NoopSizeOfEngine(), eventDispatcher, new DefaultStatisticsService()); + timeSource.advanceTime(10000L); + store.put(firstKey, "daValue"); + timeSource.advanceTime(10000L); + store.put("other", "otherValue"); + } + + protected OnHeapStoreForTests newStore(final TimeSource timeSource, + final EvictionAdvisor evictionAdvisor) { + return new OnHeapStoreForTests<>(new Store.Configuration() { + @SuppressWarnings("unchecked") + @Override + public Class getKeyType() { + return (Class) String.class; + } + + @SuppressWarnings("unchecked") + @Override + public Class getValueType() { + return (Class) Serializable.class; + } + + @Override + public EvictionAdvisor getEvictionAdvisor() { + return evictionAdvisor; + } + + @Override + public ClassLoader getClassLoader() { + return getClass().getClassLoader(); + } + + @Override + public ExpiryPolicy getExpiry() { + return ExpiryPolicyBuilder.noExpiration(); + } + + @Override + public ResourcePools getResourcePools() { + return newResourcePoolsBuilder().heap(1, EntryUnit.ENTRIES).build(); + } + + @Override + public Serializer getKeySerializer() { + throw new AssertionError(); + } + + @Override + public Serializer getValueSerializer() { + throw new AssertionError(); + } + + @Override + public int getDispatcherConcurrency() { + return 1; + } + + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } + }, timeSource); + } + + public static class OnHeapStoreForTests extends OnHeapStore { + + public OnHeapStoreForTests(final Configuration config, final TimeSource timeSource) { + super(config, timeSource, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), new NoopSizeOfEngine(), + NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); + } + + public OnHeapStoreForTests(final Configuration config, final TimeSource timeSource, final SizeOfEngine engine) { + super(config, timeSource, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), engine, + NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); + } + + private boolean enforceCapacityWasCalled = false; + + @Override + protected void enforceCapacity() { + enforceCapacityWasCalled = true; + super.enforceCapacity(); + } + + boolean enforceCapacityWasCalled() { + return enforceCapacityWasCalled; + } + + } + +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java similarity index 86% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java index 57ff5c329b..fbe6a7b7da 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java @@ -17,9 +17,11 @@ package org.ehcache.impl.internal.store.heap; import org.ehcache.Cache; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.units.EntryUnit; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.expiry.Expirations; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.core.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; @@ -33,20 +35,17 @@ import java.util.Arrays; import java.util.Collection; -import java.util.Map; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.function.Function; import java.util.function.Supplier; import static java.util.Collections.singleton; import static java.util.Collections.singletonMap; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; -import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -76,15 +75,14 @@ public static Collection config() { private OnHeapStore store; + @SuppressWarnings({"unchecked", "rawtypes"}) @Before public void setUp() { - Store.Configuration configuration = mock(Store.Configuration.class); + Store.Configuration configuration = mock(Store.Configuration.class); when(configuration.getResourcePools()).thenReturn(newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build()); when(configuration.getKeyType()).thenReturn(Key.class); when(configuration.getValueType()).thenReturn(String.class); - when(configuration.getExpiry()).thenReturn(Expirations.noExpiration()); - @SuppressWarnings("unchecked") - Store.Configuration config = configuration; + when(configuration.getExpiry()).thenReturn((ExpiryPolicy) ExpiryPolicyBuilder.noExpiration()); Copier keyCopier = new Copier() { @Override @@ -104,7 +102,8 @@ public Key copyForWrite(Key obj) { } }; - store = new OnHeapStore<>(config, SystemTimeSource.INSTANCE, keyCopier, new IdentityCopier<>(), new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); + store = new OnHeapStore<>(configuration, SystemTimeSource.INSTANCE, keyCopier, IdentityCopier.identityCopier(), + new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); } @Test @@ -117,23 +116,23 @@ public void testPutAndGet() throws StoreAccessException { Store.ValueHolder firstStoreValue = store.get(KEY); Store.ValueHolder secondStoreValue = store.get(copyKey); if (copyForWrite) { - assertThat(firstStoreValue.value(), is(VALUE)); + assertThat(firstStoreValue.get(), is(VALUE)); assertThat(secondStoreValue, nullValue()); } else { assertThat(firstStoreValue, nullValue()); - assertThat(secondStoreValue.value(), is(VALUE)); + assertThat(secondStoreValue.get(), is(VALUE)); } } @Test public void testCompute() throws StoreAccessException { final Key copyKey = new Key(KEY); - store.compute(copyKey, (key, value) -> { + store.getAndCompute(copyKey, (key, value) -> { assertThat(key, is(copyKey)); return VALUE; }); copyKey.state = "Different!"; - store.compute(copyKey, (key, value) -> { + store.getAndCompute(copyKey, (key, value) -> { if (copyForWrite) { assertThat(value, nullValue()); } else { @@ -154,12 +153,12 @@ public void testCompute() throws StoreAccessException { @Test public void testComputeWithoutReplaceEqual() throws StoreAccessException { final Key copyKey = new Key(KEY); - store.compute(copyKey, (key, value) -> { + store.computeAndGet(copyKey, (key, value) -> { assertThat(key, is(copyKey)); return VALUE; - }, NOT_REPLACE_EQUAL); + }, NOT_REPLACE_EQUAL, () -> false); copyKey.state = "Different!"; - store.compute(copyKey, (key, value) -> { + store.computeAndGet(copyKey, (key, value) -> { if (copyForWrite) { assertThat(value, nullValue()); } else { @@ -170,7 +169,7 @@ public void testComputeWithoutReplaceEqual() throws StoreAccessException { } } return value; - }, NOT_REPLACE_EQUAL); + }, NOT_REPLACE_EQUAL, () -> false); if (copyForRead) { assertThat(copyKey.state, is("Different!")); @@ -180,12 +179,12 @@ public void testComputeWithoutReplaceEqual() throws StoreAccessException { @Test public void testComputeWithReplaceEqual() throws StoreAccessException { final Key copyKey = new Key(KEY); - store.compute(copyKey, (key, value) -> { + store.computeAndGet(copyKey, (key, value) -> { assertThat(key, is(copyKey)); return VALUE; - }, REPLACE_EQUAL); + }, REPLACE_EQUAL, () -> false); copyKey.state = "Different!"; - store.compute(copyKey, (key, value) -> { + store.computeAndGet(copyKey, (key, value) -> { if (copyForWrite) { assertThat(value, nullValue()); } else { @@ -196,7 +195,7 @@ public void testComputeWithReplaceEqual() throws StoreAccessException { } } return value; - }, REPLACE_EQUAL); + }, REPLACE_EQUAL, () -> false); if (copyForRead) { assertThat(copyKey.state, is("Different!")); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java similarity index 92% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java index 30ed69b897..ebfae715fc 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java @@ -18,17 +18,21 @@ import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourceType; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.core.spi.store.Store; import org.ehcache.impl.internal.util.UnmatchedResourceType; import org.ehcache.spi.service.ServiceConfiguration; import org.junit.Test; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import static java.util.Collections.EMPTY_LIST; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; /** @@ -83,7 +87,7 @@ public void testRankCachingTier() throws Exception { private void assertRank(final Store.Provider provider, final int expectedRank, final ResourceType... resources) { assertThat(provider.rank( new HashSet<>(Arrays.asList(resources)), - Collections.>emptyList()), + Collections.>emptyList()), is(expectedRank)); } } diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java new file mode 100644 index 0000000000..28566f5bdb --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java @@ -0,0 +1,257 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.heap; + +import org.ehcache.Cache; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.impl.copy.IdentityCopier; +import org.ehcache.core.events.NullStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.copy.Copier; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Map; +import java.util.function.Supplier; + +import static java.util.Collections.singleton; +import static java.util.Collections.singletonMap; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.test.MockitoUtil.mock; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.when; + +/** + * OnHeapStoreValueCopierTest + */ +@RunWith(Parameterized.class) +public class OnHeapStoreValueCopierTest { + + private static final Long KEY = 42L; + public static final Value VALUE = new Value("TheAnswer"); + public static final Supplier NOT_REPLACE_EQUAL = () -> false; + public static final Supplier REPLACE_EQUAL = () -> true; + + @Parameterized.Parameters(name = "copyForRead: {0} - copyForWrite: {1}") + public static Collection config() { + return Arrays.asList(new Object[][] { + {false, false}, {false, true}, {true, false}, {true, true} + }); + } + + @Parameterized.Parameter(value = 0) + public boolean copyForRead; + + @Parameterized.Parameter(value = 1) + public boolean copyForWrite; + + private OnHeapStore store; + + @SuppressWarnings({"unchecked", "rawtypes"}) + @Before + public void setUp() { + Store.Configuration configuration = mock(Store.Configuration.class); + when(configuration.getResourcePools()).thenReturn(newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build()); + when(configuration.getKeyType()).thenReturn(Long.class); + when(configuration.getValueType()).thenReturn(Value.class); + + ExpiryPolicy expiryPolicy = ExpiryPolicyBuilder.noExpiration(); + when(configuration.getExpiry()).thenReturn(expiryPolicy); + + Copier valueCopier = new Copier() { + @Override + public Value copyForRead(Value obj) { + if (copyForRead) { + return new Value(obj.state); + } + return obj; + } + + @Override + public Value copyForWrite(Value obj) { + if (copyForWrite) { + return new Value(obj.state); + } + return obj; + } + }; + + store = new OnHeapStore<>(configuration, SystemTimeSource.INSTANCE, new IdentityCopier<>(), valueCopier, new NoopSizeOfEngine(), + NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); + } + + @Test + public void testPutAndGet() throws StoreAccessException { + store.put(KEY, VALUE); + + Store.ValueHolder firstStoreValue = store.get(KEY); + Store.ValueHolder secondStoreValue = store.get(KEY); + compareValues(VALUE, firstStoreValue.get()); + compareValues(VALUE, secondStoreValue.get()); + compareReadValues(firstStoreValue.get(), secondStoreValue.get()); + } + + @Test + public void testGetAndCompute() throws StoreAccessException { + store.put(KEY, VALUE); + Store.ValueHolder computedVal = store.getAndCompute(KEY, (aLong, value) -> VALUE); + Store.ValueHolder oldValue = store.get(KEY); + store.getAndCompute(KEY, (aLong, value) -> { + compareReadValues(value, oldValue.get()); + return value; + }); + + compareValues(VALUE, computedVal.get()); + } + + @Test + public void testComputeWithoutReplaceEqual() throws StoreAccessException { + final Store.ValueHolder firstValue = store.computeAndGet(KEY, (aLong, value) -> VALUE, NOT_REPLACE_EQUAL, () -> false); + store.computeAndGet(KEY, (aLong, value) -> { + compareReadValues(value, firstValue.get()); + return value; + }, NOT_REPLACE_EQUAL, () -> false); + + compareValues(VALUE, firstValue.get()); + } + + @Test + public void testComputeWithReplaceEqual() throws StoreAccessException { + final Store.ValueHolder firstValue = store.computeAndGet(KEY, (aLong, value) -> VALUE, REPLACE_EQUAL, () -> false); + store.computeAndGet(KEY, (aLong, value) -> { + compareReadValues(value, firstValue.get()); + return value; + }, REPLACE_EQUAL, () -> false); + + compareValues(VALUE, firstValue.get()); + } + + @Test + public void testComputeIfAbsent() throws StoreAccessException { + Store.ValueHolder computedValue = store.computeIfAbsent(KEY, aLong -> VALUE); + Store.ValueHolder secondComputedValue = store.computeIfAbsent(KEY, aLong -> { + fail("There should have been a mapping"); + return null; + }); + compareValues(VALUE, computedValue.get()); + compareReadValues(computedValue.get(), secondComputedValue.get()); + } + + @Test + public void testBulkCompute() throws StoreAccessException { + final Map> results = store.bulkCompute(singleton(KEY), entries -> singletonMap(KEY, VALUE).entrySet()); + store.bulkCompute(singleton(KEY), entries -> { + compareReadValues(results.get(KEY).get(), entries.iterator().next().getValue()); + return entries; + }); + compareValues(VALUE, results.get(KEY).get()); + } + + @Test + public void testBulkComputeWithoutReplaceEqual() throws StoreAccessException { + final Map> results = store.bulkCompute(singleton(KEY), entries -> singletonMap(KEY, VALUE).entrySet(), NOT_REPLACE_EQUAL); + store.bulkCompute(singleton(KEY), entries -> { + compareReadValues(results.get(KEY).get(), entries.iterator().next().getValue()); + return entries; + }, NOT_REPLACE_EQUAL); + compareValues(VALUE, results.get(KEY).get()); + } + + @Test + public void testBulkComputeWithReplaceEqual() throws StoreAccessException { + final Map> results = store.bulkCompute(singleton(KEY), entries -> singletonMap(KEY, VALUE).entrySet(), REPLACE_EQUAL); + store.bulkCompute(singleton(KEY), entries -> { + compareReadValues(results.get(KEY).get(), entries.iterator().next().getValue()); + return entries; + }, REPLACE_EQUAL); + compareValues(VALUE, results.get(KEY).get()); + } + + @Test + public void testBulkComputeIfAbsent() throws StoreAccessException { + Map> results = store.bulkComputeIfAbsent(singleton(KEY), longs -> singletonMap(KEY, VALUE).entrySet()); + Map> secondResults = store.bulkComputeIfAbsent(singleton(KEY), longs -> { + fail("There should have been a mapping!"); + return null; + }); + compareValues(VALUE, results.get(KEY).get()); + compareReadValues(results.get(KEY).get(), secondResults.get(KEY).get()); + } + + @Test + public void testIterator() throws StoreAccessException { + store.put(KEY, VALUE); + Store.Iterator>> iterator = store.iterator(); + assertThat(iterator.hasNext(), is(true)); + while (iterator.hasNext()) { + Cache.Entry> entry = iterator.next(); + compareValues(entry.getValue().get(), VALUE); + } + } + + private void compareValues(Value first, Value second) { + if (copyForRead || copyForWrite) { + assertThat(first, not(sameInstance(second))); + } else { + assertThat(first, sameInstance(second)); + } + } + + private void compareReadValues(Value first, Value second) { + if (copyForRead) { + assertThat(first, not(sameInstance(second))); + } else { + assertThat(first, sameInstance(second)); + } + } + + public static final class Value { + String state; + + public Value(String state) { + this.state = state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Value value = (Value) o; + return state.equals(value.state); + } + + @Override + public int hashCode() { + return state.hashCode(); + } + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStrategyTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStrategyTest.java new file mode 100644 index 0000000000..a8fec575a1 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStrategyTest.java @@ -0,0 +1,170 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.heap; + +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.internal.store.heap.holders.OnHeapValueHolder; +import org.ehcache.internal.TestTimeSource; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; + +import java.time.Duration; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; + +/** + * @author Henri Tremblay + */ +public class OnHeapStrategyTest { + + @Rule + public MockitoRule rule = MockitoJUnit.rule(); + + @Mock + private OnHeapStore store; + + @Mock + private ExpiryPolicy policy; + + private OnHeapStrategy strategy; + + private static class TestOnHeapValueHolder extends OnHeapValueHolder { + + long now; + Duration expiration; + + protected TestOnHeapValueHolder(long expirationTime) { + super(1, 0, expirationTime, true); + } + + @Override + public String get() { + return "test"; + } + + @Override + public void accessed(long now, Duration expiration) { + this.now = now; + this.expiration = expiration; + super.accessed(now, expiration); + } + } + + private TestTimeSource timeSource = new TestTimeSource(); + + @Test + public void isExpired_10seconds() { + strategy = OnHeapStrategy.strategy(store, policy, timeSource); + + TestOnHeapValueHolder mapping = new TestOnHeapValueHolder(10); + assertThat(strategy.isExpired(mapping)).isFalse(); + timeSource.advanceTime(10); + assertThat(strategy.isExpired(mapping)).isTrue(); + } + + @Test + public void isExpired_TTL10seconds() { + strategy = OnHeapStrategy.strategy(store, policy, timeSource); + + TestOnHeapValueHolder mapping = new TestOnHeapValueHolder(10); + assertThat(strategy.isExpired(mapping)).isFalse(); + timeSource.advanceTime(10); + assertThat(strategy.isExpired(mapping)).isTrue(); + } + + @Test + public void isExpired_neverExpires() { + strategy = OnHeapStrategy.strategy(store, ExpiryPolicy.NO_EXPIRY, timeSource); + + TestOnHeapValueHolder mapping = new TestOnHeapValueHolder(10); + assertThat(strategy.isExpired(mapping)).isFalse(); + timeSource.advanceTime(10); + assertThat(strategy.isExpired(mapping)).isFalse(); + } + + @Test + public void setAccessTimeAndExpiryThenReturnMappingOutsideLock_nullExpiryForAccess() { + strategy = OnHeapStrategy.strategy(store, ExpiryPolicy.NO_EXPIRY, timeSource); + + TestOnHeapValueHolder mapping = new TestOnHeapValueHolder(10); + when(policy.getExpiryForAccess(1, mapping)).thenReturn(null); + + strategy.setAccessAndExpiryTimeWhenCallerOutsideLock(1, mapping, timeSource.getTimeMillis()); + + assertThat(mapping.expiration).isNull(); + assertThat(mapping.now).isEqualTo(timeSource.getTimeMillis()); + + verifyZeroInteractions(store); + } + + @Test + public void setAccessTimeAndExpiryThenReturnMappingOutsideLock_zeroExpiryOnAccess() { + strategy = OnHeapStrategy.strategy(store, policy, timeSource); + + TestOnHeapValueHolder mapping = new TestOnHeapValueHolder(10); + when(policy.getExpiryForAccess(1, mapping)).thenReturn(Duration.ZERO); + + strategy.setAccessAndExpiryTimeWhenCallerOutsideLock(1, mapping, timeSource.getTimeMillis()); + + verify(store).expireMappingUnderLock(1, mapping); + } + + @Test + public void setAccessTimeAndExpiryThenReturnMappingOutsideLock_infiniteExpiryOnAccess() { + strategy = OnHeapStrategy.strategy(store, policy, timeSource); + + TestOnHeapValueHolder mapping = new TestOnHeapValueHolder(10); + when(policy.getExpiryForAccess(1, mapping)).thenReturn(ExpiryPolicy.INFINITE); + + strategy.setAccessAndExpiryTimeWhenCallerOutsideLock(1, mapping, timeSource.getTimeMillis()); + + assertThat(mapping.expiration).isEqualTo(ExpiryPolicy.INFINITE); + assertThat(mapping.now).isEqualTo(timeSource.getTimeMillis()); + + verifyZeroInteractions(store); + } + + @Test + public void setAccessTimeAndExpiryThenReturnMappingOutsideLock_movingTime() { + strategy = OnHeapStrategy.strategy(store, policy, timeSource); + + TestOnHeapValueHolder mapping = new TestOnHeapValueHolder(10); + when(policy.getExpiryForAccess(1, mapping)).thenReturn(Duration.ofMillis(20)); + + strategy.setAccessAndExpiryTimeWhenCallerOutsideLock(1, mapping, timeSource.getTimeMillis()); + + assertThat(mapping.expiration).isEqualTo(Duration.ofMillis(20)); + assertThat(mapping.now).isEqualTo(timeSource.getTimeMillis()); + + verifyZeroInteractions(store); + + timeSource.advanceTime(30); + + strategy.setAccessAndExpiryTimeWhenCallerOutsideLock(1, mapping, timeSource.getTimeMillis()); + + assertThat(mapping.expiration).isEqualTo(Duration.ofMillis(20)); + assertThat(mapping.now).isEqualTo(timeSource.getTimeMillis()); + + verifyZeroInteractions(store); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java similarity index 86% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java index e4e99de1f3..88f678b4ed 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java @@ -18,14 +18,15 @@ import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; import org.ehcache.event.EventType; import org.ehcache.core.events.StoreEventDispatcher; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.heap.LimitExceededException; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; @@ -39,18 +40,15 @@ import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.events.StoreEvent; import org.ehcache.core.spi.store.events.StoreEventListener; -import org.ehcache.spi.copy.Copier; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.store.heap.SizeOfEngine; import org.hamcrest.Matcher; import org.junit.Test; +import java.time.Duration; import java.util.Arrays; -import java.util.concurrent.TimeUnit; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; +import static org.ehcache.config.builders.ExpiryPolicyBuilder.expiry; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.internal.store.StoreCreationEventListenerTest.eventType; import static org.hamcrest.MatcherAssert.assertThat; @@ -68,7 +66,6 @@ */ public class ByteAccountingTest { - private static final Copier DEFAULT_COPIER = new IdentityCopier(); private static final SizeOfEngine SIZE_OF_ENGINE = new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE); private static final String KEY = "key"; @@ -80,22 +77,22 @@ public class ByteAccountingTest { OnHeapStoreForTests newStore() { - return newStore(SystemTimeSource.INSTANCE, Expirations.noExpiration(), Eviction.noAdvice()); + return newStore(SystemTimeSource.INSTANCE, ExpiryPolicyBuilder.noExpiration(), Eviction.noAdvice()); } OnHeapStoreForTests newStore(int capacity) { - return newStore(SystemTimeSource.INSTANCE, Expirations.noExpiration(), Eviction.noAdvice(), capacity); + return newStore(SystemTimeSource.INSTANCE, ExpiryPolicyBuilder.noExpiration(), Eviction.noAdvice(), capacity); } - OnHeapStoreForTests newStore(TimeSource timeSource, Expiry expiry) { + OnHeapStoreForTests newStore(TimeSource timeSource, ExpiryPolicy expiry) { return newStore(timeSource, expiry, Eviction.noAdvice()); } - OnHeapStoreForTests newStore(TimeSource timeSource, Expiry expiry, EvictionAdvisor evictionAdvisor) { + OnHeapStoreForTests newStore(TimeSource timeSource, ExpiryPolicy expiry, EvictionAdvisor evictionAdvisor) { return newStore(timeSource, expiry, evictionAdvisor, 100); } - private OnHeapStoreForTests newStore(final TimeSource timeSource, final Expiry expiry, final EvictionAdvisor evictionAdvisor, + private OnHeapStoreForTests newStore(final TimeSource timeSource, final ExpiryPolicy expiry, final EvictionAdvisor evictionAdvisor, final int capacity) { return new OnHeapStoreForTests<>(new Store.Configuration() { @@ -122,7 +119,7 @@ public ClassLoader getClassLoader() { } @Override - public Expiry getExpiry() { + public ExpiryPolicy getExpiry() { return expiry; } @@ -145,6 +142,11 @@ public Serializer getValueSerializer() { public int getDispatcherConcurrency() { return 0; } + + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } }, timeSource, new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), new TestStoreEventDispatcher<>()); } @@ -173,7 +175,7 @@ public void testPutUpdate() throws StoreAccessException { @Test public void testPutExpiryOnCreate() throws StoreAccessException { TestTimeSource timeSource = new TestTimeSource(1000L); - OnHeapStoreForTests store = newStore(timeSource, Expirations.builder().setCreate(Duration.ZERO).build()); + OnHeapStoreForTests store = newStore(timeSource, expiry().create(Duration.ZERO).build()); store.put(KEY, VALUE); @@ -183,7 +185,7 @@ public void testPutExpiryOnCreate() throws StoreAccessException { @Test public void testPutExpiryOnUpdate() throws StoreAccessException { TestTimeSource timeSource = new TestTimeSource(1000L); - OnHeapStoreForTests store = newStore(timeSource, Expirations.builder().setUpdate(Duration.ZERO).build()); + OnHeapStoreForTests store = newStore(timeSource, expiry().update(Duration.ZERO).build()); store.put(KEY, VALUE); store.put(KEY, "otherValue"); @@ -240,7 +242,7 @@ public void testRemoveTwoArgExpired() throws StoreAccessException { @Test public void testRemoveTwoArgExpiresOnAccess() throws StoreAccessException { TestTimeSource timeSource = new TestTimeSource(1000L); - OnHeapStoreForTests store = newStore(timeSource, Expirations.builder().setAccess(Duration.ZERO).build()); + OnHeapStoreForTests store = newStore(timeSource, expiry().access(Duration.ZERO).build()); store.put(KEY, VALUE); store.remove(KEY, "whatever value, it expires on access"); @@ -282,7 +284,7 @@ public void testReplaceTwoArgExpired() throws StoreAccessException { @Test public void testReplaceTwoArgExpiresOnUpdate() throws StoreAccessException { TestTimeSource timeSource = new TestTimeSource(1000L); - OnHeapStoreForTests store = newStore(timeSource, Expirations.builder().setUpdate(Duration.ZERO).build()); + OnHeapStoreForTests store = newStore(timeSource, expiry().update(Duration.ZERO).build()); store.put(KEY, VALUE); store.replace(KEY, "whatever value, it expires on update"); @@ -324,7 +326,7 @@ public void testReplaceThreeArgExpired() throws StoreAccessException { @Test public void testReplaceThreeArgExpiresOnUpdate() throws StoreAccessException { TestTimeSource timeSource = new TestTimeSource(1000L); - OnHeapStoreForTests store = newStore(timeSource, Expirations.builder().setUpdate(Duration.ZERO).build()); + OnHeapStoreForTests store = newStore(timeSource, expiry().update(Duration.ZERO).build()); store.put(KEY, VALUE); store.replace(KEY, VALUE, "whatever value, it expires on update"); @@ -335,11 +337,11 @@ public void testReplaceThreeArgExpiresOnUpdate() throws StoreAccessException { public void testPutIfAbsent() throws StoreAccessException { OnHeapStoreForTests store = newStore(); - store.putIfAbsent(KEY, VALUE); + store.putIfAbsent(KEY, VALUE, b -> {}); long current = store.getCurrentUsageInBytes(); assertThat(current, is(SIZE_OF_KEY_VALUE_PAIR)); - store.putIfAbsent(KEY, "New Value to Put"); + store.putIfAbsent(KEY, "New Value to Put", b -> {}); assertThat(store.getCurrentUsageInBytes(), is(current)); } @@ -350,17 +352,17 @@ public void testPutIfAbsentOverExpired() throws StoreAccessException { store.put(KEY, "an expired value"); timeSource.advanceTime(1000L); - store.putIfAbsent(KEY, VALUE); + store.putIfAbsent(KEY, VALUE, b -> {}); assertThat(store.getCurrentUsageInBytes(), is(SIZE_OF_KEY_VALUE_PAIR)); } @Test public void testPutIfAbsentExpiresOnAccess() throws StoreAccessException { TestTimeSource timeSource = new TestTimeSource(1000L); - OnHeapStoreForTests store = newStore(timeSource, Expirations.builder().setAccess(Duration.ZERO).build()); + OnHeapStoreForTests store = newStore(timeSource, expiry().access(Duration.ZERO).build()); store.put(KEY, VALUE); - store.putIfAbsent(KEY, "another value ... whatever"); + store.putIfAbsent(KEY, "another value ... whatever", b -> {}); assertThat(store.getCurrentUsageInBytes(), is(0L)); } @@ -392,11 +394,11 @@ public void testComputeRemove() throws StoreAccessException { store.put(KEY, VALUE); assertThat(store.getCurrentUsageInBytes(), is(SIZE_OF_KEY_VALUE_PAIR)); - store.compute("another", (a, b) -> null); + store.getAndCompute("another", (a, b) -> null); assertThat(store.getCurrentUsageInBytes(), is(SIZE_OF_KEY_VALUE_PAIR)); - store.compute(KEY, (a, b) -> null); + store.getAndCompute(KEY, (a, b) -> null); assertThat(store.getCurrentUsageInBytes(), is(0L)); } @@ -405,14 +407,14 @@ public void testComputeRemove() throws StoreAccessException { public void testCompute() throws StoreAccessException { OnHeapStoreForTests store = newStore(); - store.compute(KEY, (a, b) -> VALUE); + store.getAndCompute(KEY, (a, b) -> VALUE); assertThat(store.getCurrentUsageInBytes(), is(SIZE_OF_KEY_VALUE_PAIR)); final String replace = "Replace the original value"; long delta = SIZEOF.deepSizeOf(replace) - SIZEOF.deepSizeOf(VALUE); - store.compute(KEY, (a, b) -> replace); + store.getAndCompute(KEY, (a, b) -> replace); assertThat(store.getCurrentUsageInBytes(), is(SIZE_OF_KEY_VALUE_PAIR + delta)); } @@ -420,21 +422,21 @@ public void testCompute() throws StoreAccessException { @Test public void testComputeExpiryOnAccess() throws StoreAccessException { TestTimeSource timeSource = new TestTimeSource(100L); - OnHeapStoreForTests store = newStore(timeSource, Expirations.builder().setAccess(Duration.ZERO).build()); + OnHeapStoreForTests store = newStore(timeSource, expiry().access(Duration.ZERO).build()); store.put(KEY, VALUE); - store.compute(KEY, (s, s2) -> s2, () -> false); + store.computeAndGet(KEY, (s, s2) -> s2, () -> false, () -> false); assertThat(store.getCurrentUsageInBytes(), is(0L)); } @Test - public void testComputeExpiryOnUpdate() throws StoreAccessException { + public void testGetAndComputeExpiryOnUpdate() throws StoreAccessException { TestTimeSource timeSource = new TestTimeSource(100L); - OnHeapStoreForTests store = newStore(timeSource, Expirations.builder().setUpdate(Duration.ZERO).build()); + OnHeapStoreForTests store = newStore(timeSource, expiry().update(Duration.ZERO).build()); store.put(KEY, VALUE); - store.compute(KEY, (s, s2) -> s2); + store.getAndCompute(KEY, (s, s2) -> s2); assertThat(store.getCurrentUsageInBytes(), is(0L)); } @@ -455,7 +457,7 @@ public void testComputeIfAbsent() throws StoreAccessException { @Test public void testComputeIfAbsentExpireOnCreate() throws StoreAccessException { TestTimeSource timeSource = new TestTimeSource(100L); - OnHeapStoreForTests store = newStore(timeSource, Expirations.builder().setCreate(Duration.ZERO).build()); + OnHeapStoreForTests store = newStore(timeSource, expiry().create(Duration.ZERO).build()); store.computeIfAbsent(KEY, s -> VALUE); @@ -465,7 +467,7 @@ public void testComputeIfAbsentExpireOnCreate() throws StoreAccessException { @Test public void testComputeIfAbsentExpiryOnAccess() throws StoreAccessException { TestTimeSource timeSource = new TestTimeSource(100L); - OnHeapStoreForTests store = newStore(timeSource, Expirations.builder().setAccess(Duration.ZERO).build()); + OnHeapStoreForTests store = newStore(timeSource, expiry().access(Duration.ZERO).build()); store.put(KEY, VALUE); store.computeIfAbsent(KEY, s -> { @@ -479,14 +481,14 @@ public void testComputeIfAbsentExpiryOnAccess() throws StoreAccessException { @Test public void testExpiry() throws StoreAccessException { TestTimeSource timeSource = new TestTimeSource(); - OnHeapStoreForTests store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + OnHeapStoreForTests store = newStore(timeSource, ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(1))); store.put(KEY, VALUE); assertThat(store.getCurrentUsageInBytes(), is(SIZE_OF_KEY_VALUE_PAIR)); timeSource.advanceTime(1); assertThat(store.getCurrentUsageInBytes(), is(SIZE_OF_KEY_VALUE_PAIR)); assertThat(store.get(KEY), nullValue()); - assertThat(store.getCurrentUsageInBytes(), is(0l)); + assertThat(store.getCurrentUsageInBytes(), is(0L)); } @Test @@ -517,13 +519,12 @@ public void testEviction() throws StoreAccessException { } - private Expiry ttlCreation600ms() { - return Expirations.builder().setCreate(new Duration(600L, TimeUnit.MILLISECONDS)).build(); + private ExpiryPolicy ttlCreation600ms() { + return expiry().create(Duration.ofMillis(600L)).build(); } static long getSize(String key, String value) { - @SuppressWarnings("unchecked") - CopiedOnHeapValueHolder valueHolder = new CopiedOnHeapValueHolder(value, 0L, 0L, true, DEFAULT_COPIER); + CopiedOnHeapValueHolder valueHolder = new CopiedOnHeapValueHolder<>(value, 0L, 0L, true, IdentityCopier.identityCopier()); long size = 0L; try { size = SIZE_OF_ENGINE.sizeof(key, valueHolder); @@ -535,12 +536,10 @@ static long getSize(String key, String value) { static class OnHeapStoreForTests extends OnHeapStore { - private static final Copier DEFAULT_COPIER = new IdentityCopier(); - @SuppressWarnings("unchecked") OnHeapStoreForTests(final Configuration config, final TimeSource timeSource, final SizeOfEngine engine, StoreEventDispatcher eventDispatcher) { - super(config, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, engine, eventDispatcher); + super(config, timeSource, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), engine, eventDispatcher, new DefaultStatisticsService()); } long getCurrentUsageInBytes() { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java similarity index 82% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java index 1c102303ad..7f5bcf62c8 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java @@ -22,14 +22,16 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.events.StoreEventDispatcher; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.heap.OnHeapStoreByRefTest; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.serialization.Serializer; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; @@ -37,7 +39,6 @@ public class ByteSizedOnHeapStoreByRefTest extends OnHeapStoreByRefTest { private static final int MAGIC_NUM = 500; - private static final Copier DEFAULT_COPIER = new IdentityCopier(); @Override protected void updateStoreCapacity(OnHeapStore store, int newCapacity) { @@ -50,17 +51,15 @@ protected void updateStoreCapacity(OnHeapStore store, int newCapacity) { @Override @SuppressWarnings("unchecked") protected OnHeapStore newStore(final TimeSource timeSource, - final Expiry expiry, + final ExpiryPolicy expiry, final EvictionAdvisor evictionAdvisor, final int capacity) { return new OnHeapStore(new Store.Configuration() { - @SuppressWarnings("unchecked") @Override public Class getKeyType() { return (Class) String.class; } - @SuppressWarnings("unchecked") @Override public Class getValueType() { return (Class) String.class; @@ -77,7 +76,7 @@ public ClassLoader getClassLoader() { } @Override - public Expiry getExpiry() { + public ExpiryPolicy getExpiry() { return expiry; } @@ -100,7 +99,13 @@ public Serializer getValueSerializer() { public int getDispatcherConcurrency() { return 0; } - }, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), eventDispatcher); + + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } + }, timeSource, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), + new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), (StoreEventDispatcher) eventDispatcher, new DefaultStatisticsService()); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java similarity index 88% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java index 7d45e0683e..04e8218465 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java @@ -22,7 +22,8 @@ import org.ehcache.config.ResourcePools; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.heap.OnHeapStoreByValueTest; @@ -30,6 +31,7 @@ import org.ehcache.core.spi.store.Store; import org.ehcache.impl.serialization.JavaSerializer; import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.serialization.Serializer; import java.io.Serializable; @@ -50,7 +52,7 @@ protected void updateStoreCapacity(OnHeapStore store, int newCapacity) { @Override protected OnHeapStore newStore(final TimeSource timeSource, - final Expiry expiry, + final ExpiryPolicy expiry, final EvictionAdvisor evictionAdvisor, final Copier keyCopier, final Copier valueCopier, final int capacity) { StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); @@ -79,7 +81,7 @@ public ClassLoader getClassLoader() { } @Override - public Expiry getExpiry() { + public ExpiryPolicy getExpiry() { return expiry; } @@ -102,7 +104,12 @@ public Serializer getValueSerializer() { public int getDispatcherConcurrency() { return 0; } - }, timeSource, keyCopier, valueCopier, new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), eventDispatcher); + + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } + }, timeSource, keyCopier, valueCopier, new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), eventDispatcher, new DefaultStatisticsService()); } } diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java new file mode 100644 index 0000000000..e0421375a0 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java @@ -0,0 +1,126 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.heap.bytesized; + +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.impl.copy.IdentityCopier; +import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.core.events.NullStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; +import org.ehcache.impl.internal.store.heap.OnHeapStore; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.core.spi.store.Store; +import org.hamcrest.Matchers; +import org.junit.Test; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.concurrent.ConcurrentMap; + +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class OnHeapStoreBulkMethodsTest extends org.ehcache.impl.internal.store.heap.OnHeapStoreBulkMethodsTest { + + @SuppressWarnings("unchecked") + protected Store.Configuration mockStoreConfig() { + @SuppressWarnings("rawtypes") + Store.Configuration config = mock(Store.Configuration.class); + when(config.getExpiry()).thenReturn(ExpiryPolicyBuilder.noExpiration()); + when(config.getKeyType()).thenReturn(Number.class); + when(config.getValueType()).thenReturn(CharSequence.class); + when(config.getResourcePools()).thenReturn(newResourcePoolsBuilder().heap(100, MemoryUnit.KB).build()); + return config; + } + + @SuppressWarnings("unchecked") + protected OnHeapStore newStore() { + Store.Configuration configuration = mockStoreConfig(); + return new OnHeapStore<>(configuration, SystemTimeSource.INSTANCE, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), + new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); + } + + @SuppressWarnings("unchecked") + @Test + public void testBulkComputeFunctionGetsValuesOfEntries() throws Exception { + @SuppressWarnings("rawtypes") + Store.Configuration config = mock(Store.Configuration.class); + when(config.getExpiry()).thenReturn(ExpiryPolicyBuilder.noExpiration()); + when(config.getKeyType()).thenReturn(Number.class); + when(config.getValueType()).thenReturn(Number.class); + when(config.getResourcePools()).thenReturn(newResourcePoolsBuilder().heap(100, MemoryUnit.KB).build()); + Store.Configuration configuration = config; + + OnHeapStore store = new OnHeapStore<>(configuration, SystemTimeSource.INSTANCE, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), + new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); + store.put(1, 2); + store.put(2, 3); + store.put(3, 4); + + Map> result = store.bulkCompute(new HashSet(Arrays.asList(1, 2, 3, 4, 5, 6)), entries -> { + Map newValues = new HashMap<>(); + for (Map.Entry entry : entries) { + final Number currentValue = entry.getValue(); + if(currentValue == null) { + if(entry.getKey().equals(4)) { + newValues.put(entry.getKey(), null); + } else { + newValues.put(entry.getKey(), 0); + } + } else { + newValues.put(entry.getKey(), currentValue.intValue() * 2); + } + + } + return newValues.entrySet(); + }); + + ConcurrentMap check = new ConcurrentHashMap<>(); + check.put(1, 4); + check.put(2, 6); + check.put(3, 8); + check.put(4, 0); + check.put(5, 0); + check.put(6, 0); + + assertThat(result.get(1).get(), Matchers.is(check.get(1))); + assertThat(result.get(2).get(), Matchers.is(check.get(2))); + assertThat(result.get(3).get(), Matchers.is(check.get(3))); + assertThat(result.get(4), nullValue()); + assertThat(result.get(5).get(), Matchers.is(check.get(5))); + assertThat(result.get(6).get(), Matchers.is(check.get(6))); + + for (Number key : check.keySet()) { + final Store.ValueHolder holder = store.get(key); + if(holder != null) { + check.remove(key, holder.get()); + } + } + assertThat(check.size(), is(1)); + assertThat(check.containsKey(4), is(true)); + + } + +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java new file mode 100644 index 0000000000..70d2b5beea --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java @@ -0,0 +1,132 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.heap.bytesized; + +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.impl.copy.IdentityCopier; +import org.ehcache.core.events.NullStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; +import org.ehcache.impl.internal.store.heap.OnHeapStore; +import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapValueHolder; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.internal.tier.CachingTierFactory; +import org.ehcache.internal.tier.CachingTierSPITest; +import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.tiering.CachingTier; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.junit.Before; + +import java.util.Arrays; + +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; + +public class OnHeapStoreCachingTierByRefSPITest extends CachingTierSPITest { + + private CachingTierFactory cachingTierFactory; + + @Override + protected CachingTierFactory getCachingTierFactory() { + return cachingTierFactory; + } + + @Before + @SuppressWarnings("unchecked") + public void setUp() { + cachingTierFactory = new CachingTierFactory() { + + @Override + public CachingTier newCachingTier() { + return newCachingTier(null); + } + + @Override + public CachingTier newCachingTier(long capacity) { + return newCachingTier((Long) capacity); + } + + private CachingTier newCachingTier(Long capacity) { + Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), null, + ClassLoader.getSystemClassLoader(), ExpiryPolicyBuilder.noExpiration(), buildResourcePools(capacity), 0, null, null); + + return new OnHeapStore<>(config, SystemTimeSource.INSTANCE, IdentityCopier.identityCopier(), IdentityCopier.identityCopier(), + new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); + } + + @Override + public Store.ValueHolder newValueHolder(final String value) { + return new CopiedOnHeapValueHolder<>(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, IdentityCopier.identityCopier()); + } + + @Override + public Store.Provider newProvider() { + return new OnHeapStore.Provider(); + } + + private ResourcePools buildResourcePools(Comparable capacityConstraint) { + if (capacityConstraint == null) { + capacityConstraint = 10L; + } + return newResourcePoolsBuilder().heap((Long)capacityConstraint, MemoryUnit.MB).build(); + } + + @Override + public Class getKeyType() { + return String.class; + } + + @Override + public Class getValueType() { + return String.class; + } + + @Override + public ServiceConfiguration[] getServiceConfigurations() { + return new ServiceConfiguration[0]; + } + + @Override + public String createKey(long seed) { + return Long.toString(seed); + } + + @Override + public String createValue(long seed) { + char[] chars = new char[600 * 1024]; + Arrays.fill(chars, (char) (0x1 + (seed & 0x7e))); + return new String(chars); + } + + @Override + public void disposeOf(CachingTier tier) { + } + + @Override + public ServiceProvider getServiceProvider() { + return dependencySet().build(); + } + + }; + } + +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java new file mode 100644 index 0000000000..7b9caa6ec6 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java @@ -0,0 +1,140 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.heap.bytesized; + +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.impl.copy.SerializingCopier; +import org.ehcache.core.events.NullStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; +import org.ehcache.impl.internal.store.heap.OnHeapStore; +import org.ehcache.impl.internal.store.heap.holders.SerializedOnHeapValueHolder; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.impl.serialization.JavaSerializer; +import org.ehcache.internal.tier.CachingTierFactory; +import org.ehcache.internal.tier.CachingTierSPITest; +import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.tiering.CachingTier; +import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.junit.Before; + +import java.util.Arrays; + +import static java.lang.ClassLoader.getSystemClassLoader; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; + +public class OnHeapStoreCachingTierByValueSPITest extends CachingTierSPITest { + + private CachingTierFactory cachingTierFactory; + + @Override + protected CachingTierFactory getCachingTierFactory() { + return cachingTierFactory; + } + + @Before + public void setUp() { + cachingTierFactory = new CachingTierFactory() { + + final Serializer defaultSerializer = new JavaSerializer<>(getClass().getClassLoader()); + final Copier defaultCopier = new SerializingCopier<>(defaultSerializer); + + @Override + public CachingTier newCachingTier() { + return newCachingTier(null); + } + + @Override + public CachingTier newCachingTier(long capacity) { + return newCachingTier((Long) capacity); + } + + private CachingTier newCachingTier(Long capacity) { + Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), null, + ClassLoader.getSystemClassLoader(), ExpiryPolicyBuilder.noExpiration(), buildResourcePools(capacity), 0, + new JavaSerializer<>(getSystemClassLoader()), new JavaSerializer<>(getSystemClassLoader())); + return new OnHeapStore<>(config, SystemTimeSource.INSTANCE, defaultCopier, defaultCopier, + new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); + } + + @Override + public Store.ValueHolder newValueHolder(final String value) { + return new SerializedOnHeapValueHolder<>(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, defaultSerializer); + } + + @Override + public Store.Provider newProvider() { + Store.Provider service = new OnHeapStore.Provider(); + service.start(dependencySet().build()); + return service; + } + + private ResourcePools buildResourcePools(Comparable capacityConstraint) { + if (capacityConstraint == null) { + capacityConstraint = 10L; + } + return newResourcePoolsBuilder().heap((Long)capacityConstraint, MemoryUnit.MB).build(); + } + + @Override + public Class getKeyType() { + return String.class; + } + + @Override + public Class getValueType() { + return String.class; + } + + @Override + public ServiceConfiguration[] getServiceConfigurations() { + return new ServiceConfiguration[0]; + } + + @Override + public String createKey(long seed) { + return Long.toString(seed); + } + + @Override + public String createValue(long seed) { + char[] chars = new char[600 * 1024]; + Arrays.fill(chars, (char) (0x1 + (seed & 0x7e))); + return new String(chars); + } + + @Override + public void disposeOf(CachingTier tier) { + } + + @Override + public ServiceProvider getServiceProvider() { + return dependencySet().build(); + } + + }; + } + +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreEvictionTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreEvictionTest.java new file mode 100644 index 0000000000..b2574f1a58 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreEvictionTest.java @@ -0,0 +1,92 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.heap.bytesized; + +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; +import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.serialization.Serializer; + +import java.io.Serializable; + +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; + +public class OnHeapStoreEvictionTest extends org.ehcache.impl.internal.store.heap.OnHeapStoreEvictionTest { + + protected OnHeapStoreForTests newStore(final TimeSource timeSource, + final EvictionAdvisor evictionAdvisor) { + return new OnHeapStoreForTests<>(new Store.Configuration() { + @SuppressWarnings("unchecked") + @Override + public Class getKeyType() { + return (Class) String.class; + } + + @SuppressWarnings("unchecked") + @Override + public Class getValueType() { + return (Class) Serializable.class; + } + + @Override + public EvictionAdvisor getEvictionAdvisor() { + return evictionAdvisor; + } + + @Override + public ClassLoader getClassLoader() { + return getClass().getClassLoader(); + } + + @Override + public ExpiryPolicy getExpiry() { + return ExpiryPolicyBuilder.noExpiration(); + } + + @Override + public ResourcePools getResourcePools() { + return newResourcePoolsBuilder().heap(500, MemoryUnit.B).build(); + } + + @Override + public Serializer getKeySerializer() { + throw new AssertionError(); + } + + @Override + public Serializer getValueSerializer() { + throw new AssertionError(); + } + + @Override + public int getDispatcherConcurrency() { + return 0; + } + + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } + }, timeSource, new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE)); + } + +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OversizeMappingTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OversizeMappingTest.java similarity index 84% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OversizeMappingTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OversizeMappingTest.java index cf6e480bc7..7b93d815b7 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OversizeMappingTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OversizeMappingTest.java @@ -19,9 +19,9 @@ import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; import org.ehcache.impl.internal.store.heap.OnHeapStore; @@ -29,12 +29,10 @@ import org.ehcache.core.spi.time.SystemTimeSource; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.serialization.Serializer; import org.junit.Test; -import java.util.function.BiFunction; -import java.util.function.Function; - import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; @@ -51,11 +49,11 @@ public class OversizeMappingTest { private static final String OVER_SIZED_VALUE = new String(new byte[1000]); OnHeapStoreForTests newStore() { - return newStore(SystemTimeSource.INSTANCE, Expirations.noExpiration(), Eviction.noAdvice(), 100); + return newStore(SystemTimeSource.INSTANCE, ExpiryPolicyBuilder.noExpiration(), Eviction.noAdvice(), 100); } - private OnHeapStoreForTests newStore(final TimeSource timeSource, final Expiry expiry, final EvictionAdvisor evictionAdvisor, - final int capacity) { + private OnHeapStoreForTests newStore(final TimeSource timeSource, final ExpiryPolicy expiry, final EvictionAdvisor evictionAdvisor, + final int capacity) { return new OnHeapStoreForTests<>(new Store.Configuration() { @SuppressWarnings("unchecked") @@ -81,7 +79,7 @@ public ClassLoader getClassLoader() { } @Override - public Expiry getExpiry() { + public ExpiryPolicy getExpiry() { return expiry; } @@ -104,6 +102,11 @@ public Serializer getValueSerializer() { public int getDispatcherConcurrency() { return 0; } + + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } }, timeSource, new DefaultSizeOfEngine(Long.MAX_VALUE, 1000), new TestStoreEventDispatcher<>()); } @@ -112,7 +115,7 @@ private static void assertNullMapping(OnHeapStore store) throws } private static void assertNotNullMapping(OnHeapStore store) throws Exception { - assertThat(store.get(KEY).value(), equalTo(VALUE)); + assertThat(store.get(KEY).get(), equalTo(VALUE)); } @Test @@ -129,7 +132,7 @@ public void testPut() throws Exception { public void testPutIfAbsent() throws Exception { OnHeapStore store = newStore(); - store.putIfAbsent(KEY, OVER_SIZED_VALUE); + store.putIfAbsent(KEY, OVER_SIZED_VALUE, b -> {}); assertNullMapping(store); } @@ -155,15 +158,15 @@ public void testThreeArgReplace() throws Exception { public void testCompute() throws Exception { OnHeapStore store = newStore(); - store.compute(KEY, (a, b) -> OVER_SIZED_VALUE); + store.getAndCompute(KEY, (a, b) -> OVER_SIZED_VALUE); assertNullMapping(store); - store.compute(KEY, (a, b) -> VALUE); + store.getAndCompute(KEY, (a, b) -> VALUE); assertNotNullMapping(store); - store.compute(KEY, (a, b) -> OVER_SIZED_VALUE); + store.getAndCompute(KEY, (a, b) -> OVER_SIZED_VALUE); assertNullMapping(store); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapKeyTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapKeyTest.java similarity index 97% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapKeyTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapKeyTest.java index c720b66969..8e0840f94a 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapKeyTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapKeyTest.java @@ -19,8 +19,8 @@ import org.ehcache.impl.copy.ReadWriteCopier; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * Created by alsu on 20/08/15. diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapValueHolderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapValueHolderTest.java similarity index 96% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapValueHolderTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapValueHolderTest.java index a2bc0085fb..99fe7fe8fb 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapValueHolderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/CopiedOnHeapValueHolderTest.java @@ -35,7 +35,7 @@ public void testValueByValue() throws Exception { CopiedOnHeapValueHolder valueHolder = new CopiedOnHeapValueHolder<>(person, -1, false, copier); person.age = 25; - assertNotSame(person, valueHolder.value()); + assertNotSame(person, valueHolder.get()); } @Test @@ -44,7 +44,7 @@ public void testValueByRef() throws Exception { CopiedOnHeapValueHolder valueHolder = new CopiedOnHeapValueHolder<>(person, -1, false, new IdentityCopier<>()); person.age = 25; - assertSame(person, valueHolder.value()); + assertSame(person, valueHolder.get()); } private static class Person { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolderTest.java similarity index 93% rename from impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolderTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolderTest.java index a908f15ccd..fe4c6d68ee 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolderTest.java @@ -26,13 +26,13 @@ import java.nio.ByteBuffer; import java.util.concurrent.Exchanger; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; /** @@ -46,9 +46,9 @@ public void testValue() { String o = "foo"; ValueHolder vh1 = newValueHolder(o); ValueHolder vh2 = newValueHolder(o); - assertFalse(vh1.value() == vh2.value()); - assertEquals(vh1.value(), vh2.value()); - assertNotSame(vh1.value(), vh1.value()); + assertFalse(vh1.get() == vh2.get()); + assertEquals(vh1.get(), vh2.get()); + assertNotSame(vh1.get(), vh1.get()); } @Test @@ -56,10 +56,10 @@ public void testHashCode() { ValueHolder vh1 = newValueHolder(10); ValueHolder vh2 = newValueHolder(10); // make sure reading the value multiple times doesn't change the hashcode - vh1.value(); - vh1.value(); - vh2.value(); - vh2.value(); + vh1.get(); + vh1.get(); + vh2.get(); + vh2.get(); assertThat(vh1.hashCode(), is(vh2.hashCode())); } @@ -87,9 +87,9 @@ public void testSerializerGetsDifferentByteBufferOnRead() { final SerializedOnHeapValueHolder valueHolder = new SerializedOnHeapValueHolder<>("test it!", System .currentTimeMillis(), false, serializer); - new Thread(valueHolder::value).start(); + new Thread(valueHolder::get).start(); - valueHolder.value(); + valueHolder.get(); } private static class ReadExchangeSerializer implements Serializer { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractEhcacheOffHeapBackingMapTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractEhcacheOffHeapBackingMapTest.java similarity index 99% rename from impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractEhcacheOffHeapBackingMapTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractEhcacheOffHeapBackingMapTest.java index 62a7c935d4..89f8f3ffb3 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractEhcacheOffHeapBackingMapTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractEhcacheOffHeapBackingMapTest.java @@ -26,7 +26,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.fail; /** diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java new file mode 100644 index 0000000000..ad9bdad37c --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java @@ -0,0 +1,648 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.offheap; + +import org.ehcache.Cache; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.event.EventType; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.core.spi.store.AbstractValueHolder; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.events.StoreEvent; +import org.ehcache.core.spi.store.events.StoreEventListener; +import org.ehcache.core.statistics.LowerCachingTierOperationsOutcome; +import org.ehcache.core.statistics.StoreOperationOutcomes; +import org.ehcache.expiry.ExpiryPolicy; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; +import org.hamcrest.TypeSafeMatcher; +import org.junit.After; +import org.junit.Test; +import org.terracotta.context.TreeNode; +import org.terracotta.context.query.QueryBuilder; +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.StatisticsManager; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.EnumSet; +import java.util.List; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; + +import static org.ehcache.config.builders.ExpiryPolicyBuilder.expiry; +import static org.ehcache.impl.internal.util.Matchers.valueHeld; +import static org.ehcache.impl.internal.util.StatisticsTestUtils.validateStats; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.lessThan; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.hamcrest.MockitoHamcrest.argThat; + +/** + * + * @author cdennis + */ +public abstract class AbstractOffHeapStoreTest { + + private TestTimeSource timeSource = new TestTimeSource(); + private AbstractOffHeapStore offHeapStore; + + @After + public void after() { + if(offHeapStore != null) { + destroyStore(offHeapStore); + } + } + + @Test + public void testGetAndRemoveNoValue() throws Exception { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.noExpiration()); + + assertThat(offHeapStore.getAndRemove("1"), is(nullValue())); + validateStats(offHeapStore, EnumSet.of(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS)); + } + + @Test + public void testGetAndRemoveValue() throws Exception { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.noExpiration()); + + offHeapStore.put("1", "one"); + assertThat(offHeapStore.getAndRemove("1").get(), equalTo("one")); + validateStats(offHeapStore, EnumSet.of(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.HIT_REMOVED)); + assertThat(offHeapStore.get("1"), is(nullValue())); + } + + @Test + public void testGetAndRemoveExpiredElementReturnsNull() throws Exception { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(15L))); + + assertThat(offHeapStore.getAndRemove("1"), is(nullValue())); + + offHeapStore.put("1", "one"); + + final AtomicReference> invalidated = new AtomicReference<>(); + offHeapStore.setInvalidationListener((key, valueHolder) -> { + valueHolder.get(); + invalidated.set(valueHolder); + }); + + timeSource.advanceTime(20); + assertThat(offHeapStore.getAndRemove("1"), is(nullValue())); + assertThat(invalidated.get().get(), equalTo("one")); + assertThat(invalidated.get().isExpired(timeSource.getTimeMillis()), is(true)); + assertThat(getExpirationStatistic(offHeapStore).count(StoreOperationOutcomes.ExpirationOutcome.SUCCESS), is(1L)); + } + + @Test + public void testInstallMapping() throws Exception { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(15L))); + + assertThat(offHeapStore.installMapping("1", key -> new SimpleValueHolder<>("one", timeSource.getTimeMillis(), 15)).get(), equalTo("one")); + + validateStats(offHeapStore, EnumSet.of(LowerCachingTierOperationsOutcome.InstallMappingOutcome.PUT)); + + timeSource.advanceTime(20); + + try { + offHeapStore.installMapping("1", key -> new SimpleValueHolder<>("un", timeSource.getTimeMillis(), 15)); + fail("expected AssertionError"); + } catch (AssertionError ae) { + // expected + } + } + + @Test + public void testInvalidateKeyAbsent() throws Exception { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(15L))); + + final AtomicReference> invalidated = new AtomicReference<>(); + offHeapStore.setInvalidationListener((key, valueHolder) -> invalidated.set(valueHolder)); + + offHeapStore.invalidate("1"); + assertThat(invalidated.get(), is(nullValue())); + validateStats(offHeapStore, EnumSet.of(LowerCachingTierOperationsOutcome.InvalidateOutcome.MISS)); + } + + @Test + public void testInvalidateKeyPresent() throws Exception { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(15L))); + + offHeapStore.put("1", "one"); + + final AtomicReference> invalidated = new AtomicReference<>(); + offHeapStore.setInvalidationListener((key, valueHolder) -> { + valueHolder.get(); + invalidated.set(valueHolder); + }); + + offHeapStore.invalidate("1"); + assertThat(invalidated.get().get(), equalTo("one")); + validateStats(offHeapStore, EnumSet.of(LowerCachingTierOperationsOutcome.InvalidateOutcome.REMOVED)); + + assertThat(offHeapStore.get("1"), is(nullValue())); + } + + @Test + public void testClear() throws Exception { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(15L))); + + offHeapStore.put("1", "one"); + offHeapStore.put("2", "two"); + offHeapStore.put("3", "three"); + offHeapStore.clear(); + + assertThat(offHeapStore.get("1"), is(nullValue())); + assertThat(offHeapStore.get("2"), is(nullValue())); + assertThat(offHeapStore.get("3"), is(nullValue())); + } + + @Test + public void testWriteBackOfValueHolder() throws StoreAccessException { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(15L))); + + offHeapStore.put("key1", "value1"); + timeSource.advanceTime(10); + OffHeapValueHolder valueHolder = (OffHeapValueHolder)offHeapStore.get("key1"); + assertThat(valueHolder.lastAccessTime(), is(10L)); + timeSource.advanceTime(10); + assertThat(offHeapStore.get("key1"), notNullValue()); + timeSource.advanceTime(16); + assertThat(offHeapStore.get("key1"), nullValue()); + } + + @Test + public void testEvictionAdvisor() throws StoreAccessException { + ExpiryPolicy expiry = ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(15L)); + EvictionAdvisor evictionAdvisor = (key, value) -> true; + + performEvictionTest(timeSource, expiry, evictionAdvisor); + } + + @Test + public void testBrokenEvictionAdvisor() throws StoreAccessException { + ExpiryPolicy expiry = ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(15L)); + EvictionAdvisor evictionAdvisor = (key, value) -> { + throw new UnsupportedOperationException("Broken advisor!"); + }; + + performEvictionTest(timeSource, expiry, evictionAdvisor); + } + + @Test + public void testFlushUpdatesAccessStats() throws StoreAccessException { + ExpiryPolicy expiry = ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(15L)); + offHeapStore = createAndInitStore(timeSource, expiry); + try { + final String key = "foo"; + final String value = "bar"; + offHeapStore.put(key, value); + final Store.ValueHolder firstValueHolder = offHeapStore.getAndFault(key); + offHeapStore.put(key, value); + final Store.ValueHolder secondValueHolder = offHeapStore.getAndFault(key); + timeSource.advanceTime(10); + ((AbstractValueHolder) firstValueHolder).accessed(timeSource.getTimeMillis(), expiry.getExpiryForAccess(key, () -> value)); + timeSource.advanceTime(10); + ((AbstractValueHolder) secondValueHolder).accessed(timeSource.getTimeMillis(), expiry.getExpiryForAccess(key, () -> value)); + assertThat(offHeapStore.flush(key, new DelegatingValueHolder<>(firstValueHolder)), is(false)); + assertThat(offHeapStore.flush(key, new DelegatingValueHolder<>(secondValueHolder)), is(true)); + timeSource.advanceTime(10); // this should NOT affect + assertThat(offHeapStore.getAndFault(key).lastAccessTime(), is(secondValueHolder.creationTime() + 20)); + } finally { + destroyStore(offHeapStore); + } + } + + @Test + public void testExpiryEventFiredOnExpiredCachedEntry() throws StoreAccessException { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(10L))); + + final List expiredKeys = new ArrayList<>(); + offHeapStore.getStoreEventSource().addEventListener(event -> { + if (event.getType() == EventType.EXPIRED) { + expiredKeys.add(event.getKey()); + } + }); + + offHeapStore.put("key1", "value1"); + offHeapStore.put("key2", "value2"); + + offHeapStore.get("key1"); // Bring the entry to the caching tier + + timeSource.advanceTime(11); // Expire the elements + + offHeapStore.get("key1"); + offHeapStore.get("key2"); + assertThat(expiredKeys, containsInAnyOrder("key1", "key2")); + assertThat(getExpirationStatistic(offHeapStore).count(StoreOperationOutcomes.ExpirationOutcome.SUCCESS), is(2L)); + } + + @Test + public void testGetWithExpiryOnAccess() throws Exception { + offHeapStore = createAndInitStore(timeSource, expiry().access(Duration.ZERO).build()); + offHeapStore.put("key", "value"); + final AtomicReference expired = new AtomicReference<>(); + offHeapStore.getStoreEventSource().addEventListener(event -> { + if (event.getType() == EventType.EXPIRED) { + expired.set(event.getKey()); + } + }); + assertThat(offHeapStore.get("key"), valueHeld("value")); + assertThat(expired.get(), is("key")); + } + + @Test + public void testExpiryCreateException() throws Exception { + offHeapStore = createAndInitStore(timeSource, new ExpiryPolicy() { + @Override + public Duration getExpiryForCreation(String key, String value) { + throw new RuntimeException(); + } + + @Override + public Duration getExpiryForAccess(String key, Supplier value) { + throw new AssertionError(); + } + + @Override + public Duration getExpiryForUpdate(String key, Supplier oldValue, String newValue) { + throw new AssertionError(); + } + }); + offHeapStore.put("key", "value"); + assertNull(offHeapStore.get("key")); + } + + @Test + public void testExpiryAccessException() throws Exception { + offHeapStore = createAndInitStore(timeSource, new ExpiryPolicy() { + @Override + public Duration getExpiryForCreation(String key, String value) { + return ExpiryPolicy.INFINITE; + } + + @Override + public Duration getExpiryForAccess(String key, Supplier value) { + throw new RuntimeException(); + } + + @Override + public Duration getExpiryForUpdate(String key, Supplier oldValue, String newValue) { + return null; + } + }); + + offHeapStore.put("key", "value"); + assertThat(offHeapStore.get("key"), valueHeld("value")); + assertNull(offHeapStore.get("key")); + } + + @Test + public void testExpiryUpdateException() throws Exception { + offHeapStore = createAndInitStore(timeSource, new ExpiryPolicy() { + @Override + public Duration getExpiryForCreation(String key, String value) { + return ExpiryPolicy.INFINITE; + } + + @Override + public Duration getExpiryForAccess(String key, Supplier value) { + return ExpiryPolicy.INFINITE; + } + + @Override + public Duration getExpiryForUpdate(String key, Supplier oldValue, String newValue) { + if (timeSource.getTimeMillis() > 0) { + throw new RuntimeException(); + } + return ExpiryPolicy.INFINITE; + } + }); + + offHeapStore.put("key", "value"); + assertThat(offHeapStore.get("key").get(), is("value")); + timeSource.advanceTime(1000); + offHeapStore.put("key", "newValue"); + assertNull(offHeapStore.get("key")); + } + + @Test + public void testGetAndFaultOnExpiredEntry() throws StoreAccessException { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(10L))); + try { + offHeapStore.put("key", "value"); + timeSource.advanceTime(20L); + + Store.ValueHolder valueHolder = offHeapStore.getAndFault("key"); + assertThat(valueHolder, nullValue()); + assertThat(getExpirationStatistic(offHeapStore).count(StoreOperationOutcomes.ExpirationOutcome.SUCCESS), is(1L)); + } finally { + destroyStore(offHeapStore); + } + } + + @Test + public void testComputeExpiresOnAccess() throws StoreAccessException { + timeSource.advanceTime(1000L); + offHeapStore = createAndInitStore(timeSource, + expiry().access(Duration.ZERO).update(Duration.ZERO).build()); + + offHeapStore.put("key", "value"); + Store.ValueHolder result = offHeapStore.computeAndGet("key", (s, s2) -> s2, () -> false, () -> false); + + assertThat(result, valueHeld("value")); + } + + @Test + public void testComputeExpiresOnUpdate() throws StoreAccessException { + timeSource.advanceTime(1000L); + + offHeapStore = createAndInitStore(timeSource, + expiry().access(Duration.ZERO).update(Duration.ZERO).build()); + + offHeapStore.put("key", "value"); + Store.ValueHolder result = offHeapStore.computeAndGet("key", (s, s2) -> "newValue", () -> false, () -> false); + + assertThat(result, valueHeld("newValue")); + } + + @Test + public void testComputeOnExpiredEntry() throws StoreAccessException { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(10L))); + + offHeapStore.put("key", "value"); + timeSource.advanceTime(20L); + + offHeapStore.getAndCompute("key", (mappedKey, mappedValue) -> { + assertThat(mappedKey, is("key")); + assertThat(mappedValue, Matchers.nullValue()); + return "value2"; + }); + assertThat(getExpirationStatistic(offHeapStore).count(StoreOperationOutcomes.ExpirationOutcome.SUCCESS), is(1L)); + } + + @Test + public void testComputeIfAbsentOnExpiredEntry() throws StoreAccessException { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(10L))); + + offHeapStore.put("key", "value"); + timeSource.advanceTime(20L); + + offHeapStore.computeIfAbsent("key", mappedKey -> { + assertThat(mappedKey, is("key")); + return "value2"; + }); + assertThat(getExpirationStatistic(offHeapStore).count(StoreOperationOutcomes.ExpirationOutcome.SUCCESS), is(1L)); + } + + @Test + public void testIteratorDoesNotSkipOrExpiresEntries() throws Exception { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(10L))); + + offHeapStore.put("key1", "value1"); + offHeapStore.put("key2", "value2"); + + timeSource.advanceTime(11L); + + offHeapStore.put("key3", "value3"); + offHeapStore.put("key4", "value4"); + + final List expiredKeys = new ArrayList<>(); + offHeapStore.getStoreEventSource().addEventListener(event -> { + if (event.getType() == EventType.EXPIRED) { + expiredKeys.add(event.getKey()); + } + }); + + List iteratedKeys = new ArrayList<>(); + Store.Iterator>> iterator = offHeapStore.iterator(); + while(iterator.hasNext()) { + iteratedKeys.add(iterator.next().getKey()); + } + + assertThat(iteratedKeys, containsInAnyOrder("key1", "key2", "key3", "key4")); + assertThat(expiredKeys.isEmpty(), is(true)); + } + + @Test + public void testIteratorWithSingleExpiredEntry() throws Exception { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(10L))); + + offHeapStore.put("key1", "value1"); + + timeSource.advanceTime(11L); + + Store.Iterator>> iterator = offHeapStore.iterator(); + assertTrue(iterator.hasNext()); + assertThat(iterator.next().getKey(), equalTo("key1")); + assertFalse(iterator.hasNext()); + } + + @Test + public void testIteratorWithSingleNonExpiredEntry() throws Exception { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(10L))); + + offHeapStore.put("key1", "value1"); + + timeSource.advanceTime(5L); + + Store.Iterator>> iterator = offHeapStore.iterator(); + assertTrue(iterator.hasNext()); + assertThat(iterator.next().getKey(), is("key1")); + } + + @Test + public void testIteratorOnEmptyStore() throws Exception { + offHeapStore = createAndInitStore(timeSource, ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(10L))); + + Store.Iterator>> iterator = offHeapStore.iterator(); + assertFalse(iterator.hasNext()); + } + + protected abstract AbstractOffHeapStore createAndInitStore(final TimeSource timeSource, final ExpiryPolicy expiry); + + protected abstract AbstractOffHeapStore createAndInitStore(final TimeSource timeSource, final ExpiryPolicy expiry, EvictionAdvisor evictionAdvisor); + + protected abstract void destroyStore(AbstractOffHeapStore store); + + private void performEvictionTest(TestTimeSource timeSource, ExpiryPolicy expiry, EvictionAdvisor evictionAdvisor) throws StoreAccessException { + AbstractOffHeapStore offHeapStore = createAndInitStore(timeSource, expiry, evictionAdvisor); + try { + @SuppressWarnings("unchecked") + StoreEventListener listener = mock(StoreEventListener.class); + offHeapStore.getStoreEventSource().addEventListener(listener); + + byte[] value = getBytes(MemoryUnit.KB.toBytes(200)); + offHeapStore.put("key1", value); + offHeapStore.put("key2", value); + offHeapStore.put("key3", value); + offHeapStore.put("key4", value); + offHeapStore.put("key5", value); + offHeapStore.put("key6", value); + + Matcher> matcher = eventType(EventType.EVICTED); + verify(listener, atLeast(1)).onEvent(argThat(matcher)); + } finally { + destroyStore(offHeapStore); + } + } + + public static Matcher> eventType(final EventType type) { + return new TypeSafeMatcher>() { + @Override + protected boolean matchesSafely(StoreEvent item) { + return item.getType().equals(type); + } + + @Override + public void describeTo(Description description) { + description.appendText("store event of type '").appendValue(type).appendText("'"); + } + }; + } + + @SuppressWarnings("unchecked") + private OperationStatistic getExpirationStatistic(Store store) { + StatisticsManager statisticsManager = new StatisticsManager(); + statisticsManager.root(store); + TreeNode treeNode = statisticsManager.queryForSingleton(QueryBuilder.queryBuilder() + .descendants() + .filter(org.terracotta.context.query.Matchers.context( + org.terracotta.context.query.Matchers.allOf(org.terracotta.context.query.Matchers.identifier(org.terracotta.context.query.Matchers + .subclassOf(OperationStatistic.class)), + org.terracotta.context.query.Matchers.attributes(org.terracotta.context.query.Matchers.hasAttribute("name", "expiration"))))) + .build()); + return (OperationStatistic) treeNode.getContext().attributes().get("this"); + } + + private byte[] getBytes(long valueLength) { + assertThat(valueLength, lessThan((long) Integer.MAX_VALUE)); + int valueLengthInt = (int) valueLength; + byte[] value = new byte[valueLengthInt]; + new Random().nextBytes(value); + return value; + } + + private static class TestTimeSource implements TimeSource { + + private long time = 0; + + @Override + public long getTimeMillis() { + return time; + } + + public void advanceTime(long step) { + time += step; + } + } + + public static class DelegatingValueHolder implements Store.ValueHolder { + + private final Store.ValueHolder valueHolder; + + public DelegatingValueHolder(final Store.ValueHolder valueHolder) { + this.valueHolder = valueHolder; + } + + @Override + public T get() { + return valueHolder.get(); + } + + @Override + public long creationTime() { + return valueHolder.creationTime(); + } + + @Override + public long expirationTime() { + return valueHolder.expirationTime(); + } + + @Override + public boolean isExpired(long expirationTime) { + return valueHolder.isExpired(expirationTime); + } + + @Override + public long lastAccessTime() { + return valueHolder.lastAccessTime(); + } + + @Override + public long getId() { + return valueHolder.getId(); + } + } + + static class SimpleValueHolder extends AbstractValueHolder { + + private final T value; + + public SimpleValueHolder(T v, long creationTime, long expirationTime) { + super(-1, creationTime, expirationTime); + this.value = v; + } + + @Override + public T get() { + return value; + } + + @Override + public long creationTime() { + return 0; + } + + @Override + public long expirationTime() { + return 0; + } + + @Override + public boolean isExpired(long expirationTime) { + return false; + } + + @Override + public long lastAccessTime() { + return 0; + } + + @Override + public long getId() { + return 0; + } + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/AssertingOffHeapValueHolder.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/AssertingOffHeapValueHolder.java new file mode 100644 index 0000000000..35a9e6df03 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/AssertingOffHeapValueHolder.java @@ -0,0 +1,187 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.offheap; + +import org.ehcache.spi.serialization.Serializer; +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.Label; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Type; +import org.objectweb.asm.commons.InstructionAdapter; +import org.objectweb.asm.commons.Method; +import org.terracotta.offheapstore.storage.portability.WriteContext; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.NavigableMap; +import java.util.TreeMap; +import java.util.concurrent.locks.Lock; +import java.util.function.Predicate; + +import static java.util.Arrays.asList; +import static java.util.Arrays.stream; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.objectweb.asm.Opcodes.ASM6; +import static org.objectweb.asm.Type.getObjectType; +import static org.objectweb.asm.Type.getType; +import static org.objectweb.asm.commons.Method.getMethod; + +public class AssertingOffHeapValueHolder extends LazyOffHeapValueHolder { + + /** + * This is a set of 'patterns' that capture a subset of the lock scopes that the `OffHeapValueHolder` can be + * called from. You might end up looking at these patterns for one of three reasons: + * + * 1. (Most Likely) You introduced new code or new testing that access a pre-existing lock scope that is not listed + * here. Find the lock scope in the stack trace of the thrown exception and add an appropriate stanza here. + * 2. (Less Likely) You introduced a new call path that leaks a still serialized, attached value holder that someone + * else tried to access later (outside lock scope). In this case you must locate the source of the value holder + * and either force deserialization or detach the value under the pre-existing lock scope. + * 3. (Least Likely) You introduced a new method to OffHeapStore that needs locking properly. + */ + private static final Collection> LOCK_SCOPES = asList( + className("org.terracotta.offheapstore.AbstractLockedOffHeapHashMap").methodName("shrink"), + className("org.terracotta.offheapstore.AbstractLockedOffHeapHashMap").methodName("computeWithMetadata"), + className("org.terracotta.offheapstore.AbstractLockedOffHeapHashMap").methodName("computeIfPresentWithMetadata"), + className("org.ehcache.impl.internal.store.offheap.factories.EhcacheSegmentFactory$EhcacheSegment$EntrySet").methodName("iterator"), + className("org.ehcache.impl.internal.store.disk.factories.EhcachePersistentSegmentFactory$EhcachePersistentSegment$EntrySet").methodName("iterator"), + className("org.terracotta.offheapstore.AbstractLockedOffHeapHashMap$LockedEntryIterator").methodName("next") + ); + + private static void assertStackTraceContainsLockScope() { + assertThat(stream(Thread.currentThread().getStackTrace()).filter(ste -> LOCK_SCOPES.stream().anyMatch(p -> p.test(ste))).anyMatch(ste -> isLockedInFrame(ste)), is(true)); + } + + private static StePredicateBuilderOne className(String className) { + return new StePredicateBuilderOne() { + @Override + public Predicate methodName(String methodName) { + StePredicateBuilderOne outer = this; + return ste -> outer.test(ste) && methodName.equals(ste.getMethodName()); + } + + @Override + public boolean test(StackTraceElement ste) { + return className.equals(ste.getClassName()); + } + }; + } + + interface StePredicateBuilderOne extends Predicate { + + Predicate methodName(String computeIfPresentWithMetadata); + } + + public AssertingOffHeapValueHolder(long id, ByteBuffer binaryValue, Serializer serializer, long creationTime, long expireTime, long lastAccessTime, WriteContext writeContext) { + super(id, binaryValue, serializer, creationTime, expireTime, lastAccessTime, writeContext); + } + + @Override + void writeBack() { + assertStackTraceContainsLockScope(); + super.writeBack(); + } + + @Override + V deserialize() { + if (!isBinaryValueAvailable()) { + assertStackTraceContainsLockScope(); + } + return super.deserialize(); + } + + @Override + void detach() { + assertStackTraceContainsLockScope(); + super.detach(); + } + + private static final Type LOCK_CLASS; + private static final Method LOCK_METHOD; + private static final Method UNLOCK_METHOD; + + static { + try { + LOCK_CLASS = getType(Lock.class); + LOCK_METHOD = getMethod(Lock.class.getMethod("lock")); + UNLOCK_METHOD = getMethod(Lock.class.getMethod("unlock")); + } catch (NoSuchMethodException e) { + throw new AssertionError(e); + } + } + + private static boolean isLockedInFrame(StackTraceElement ste) { + try { + ClassReader reader = new ClassReader(ste.getClassName()); + + NavigableMap lockLevels = new TreeMap<>(); + + reader.accept(new ClassVisitor(ASM6) { + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + if (ste.getMethodName().equals(name)) { + return new InstructionAdapter(ASM6, new MethodVisitor(ASM6) {}) { + + private final Map levels = new HashMap<>(); + + private int lockLevel; + + @Override + public void invokeinterface(String owner, String name, String descriptor) { + if (LOCK_CLASS.equals(getObjectType(owner))) { + if (LOCK_METHOD.equals(new Method(name, descriptor))) { + lockLevel++; + } else if (UNLOCK_METHOD.equals(new Method(name, descriptor))) { + lockLevel--; + } + } + } + + @Override + public void visitJumpInsn(int opcode, Label label) { + levels.merge(label, lockLevel, Integer::max); + } + + @Override + public void visitLabel(Label label) { + lockLevel = levels.merge(label, lockLevel, Integer::max); + } + + @Override + public void visitLineNumber(int line, Label start) { + lockLevels.merge(line, levels.get(start), Integer::max); + } + }; + } else { + return null; + } + } + }, 0); + + Map.Entry entry = lockLevels.floorEntry(ste.getLineNumber()); + + return entry.getValue() > 0; + } catch (IOException e) { + throw new AssertionError(e); + } + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/AssertingOffHeapValueHolderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/AssertingOffHeapValueHolderTest.java new file mode 100644 index 0000000000..f51bd82b3e --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/AssertingOffHeapValueHolderTest.java @@ -0,0 +1,62 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.offheap; + +import org.ehcache.spi.serialization.Serializer; +import org.junit.Test; +import org.terracotta.offheapstore.storage.portability.WriteContext; + +import java.nio.ByteBuffer; + +import static org.junit.Assert.fail; +import static org.mockito.Mockito.mock; + +public class AssertingOffHeapValueHolderTest { + + @Test @SuppressWarnings("unchecked") + public void testLockingAssertionsOnDetach() { + OffHeapValueHolder valueHolder = new AssertingOffHeapValueHolder<>(1L, ByteBuffer.allocate(1), mock(Serializer.class), 10L, 20L, 15L, mock(WriteContext.class)); + try { + valueHolder.detach(); + fail("Expected AssertionError"); + } catch (AssertionError e) { + //expected + } + } + + @Test @SuppressWarnings("unchecked") + public void testLockingAssertionsOnForceDeserialize() { + OffHeapValueHolder valueHolder = new AssertingOffHeapValueHolder<>(1L, ByteBuffer.allocate(1), mock(Serializer.class), 10L, 20L, 15L, mock(WriteContext.class)); + try { + valueHolder.forceDeserialization(); + fail("Expected AssertionError"); + } catch (AssertionError e) { + //expected + } + } + + @Test @SuppressWarnings("unchecked") + public void testLockingAssertionsOnWriteBack() { + OffHeapValueHolder valueHolder = new AssertingOffHeapValueHolder<>(1L, ByteBuffer.allocate(1), mock(Serializer.class), 10L, 20L, 15L, mock(WriteContext.class)); + try { + valueHolder.writeBack(); + fail("Expected AssertionError"); + } catch (AssertionError e) { + //expected + } + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/BasicOffHeapValueHolderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/BasicOffHeapValueHolderTest.java similarity index 94% rename from impl/src/test/java/org/ehcache/impl/internal/store/offheap/BasicOffHeapValueHolderTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/BasicOffHeapValueHolderTest.java index b25a893be4..5bbdff41da 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/BasicOffHeapValueHolderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/BasicOffHeapValueHolderTest.java @@ -19,8 +19,8 @@ import org.junit.Before; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * BasicOffHeapValueHolderTest @@ -38,7 +38,7 @@ public void setUp() { @Test public void testCanAccessValue() { - assertThat(valueHolder.value(), is(value)); + assertThat(valueHolder.get(), is(value)); } @Test(expected = UnsupportedOperationException.class) diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/BinaryOffHeapValueHolderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/BinaryOffHeapValueHolderTest.java similarity index 93% rename from impl/src/test/java/org/ehcache/impl/internal/store/offheap/BinaryOffHeapValueHolderTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/BinaryOffHeapValueHolderTest.java index 56f393f518..28c858eeb7 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/BinaryOffHeapValueHolderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/BinaryOffHeapValueHolderTest.java @@ -22,8 +22,8 @@ import java.nio.ByteBuffer; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * BinaryOffHeapValueHolderTest @@ -38,7 +38,7 @@ public class BinaryOffHeapValueHolderTest { public void setUp() { serializer = new StringSerializer(); value = "aValue"; - valueHolder = new BinaryOffHeapValueHolder<>(-1, value, serializer.serialize(value), 0, 0, 0, 0); + valueHolder = new BinaryOffHeapValueHolder<>(-1, value, serializer.serialize(value), 0, 0, 0); } @Test @@ -50,7 +50,7 @@ public void testCanAccessBinaryValue() throws ClassNotFoundException { @Test public void testCanAccessValue() { - assertThat(valueHolder.value(), is(value)); + assertThat(valueHolder.get(), is(value)); } @Test(expected = UnsupportedOperationException.class) diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCacheTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCacheTest.java similarity index 100% rename from impl/src/test/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCacheTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCacheTest.java diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/LazyOffHeapValueHolderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/LazyOffHeapValueHolderTest.java similarity index 87% rename from impl/src/test/java/org/ehcache/impl/internal/store/offheap/LazyOffHeapValueHolderTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/LazyOffHeapValueHolderTest.java index 93830501cb..76f6dc0466 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/LazyOffHeapValueHolderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/LazyOffHeapValueHolderTest.java @@ -22,9 +22,9 @@ import java.nio.ByteBuffer; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; @@ -38,11 +38,11 @@ public void testDelayedDeserialization() { JavaSerializer serializer = new JavaSerializer<>(getClass().getClassLoader()); String testValue = "Let's get binary!"; ByteBuffer serialized = serializer.serialize(testValue); - OffHeapValueHolder valueHolder = new LazyOffHeapValueHolder<>(1L, serialized, serializer, 10L, 20L, 15L, 3, mock(WriteContext.class)); + OffHeapValueHolder valueHolder = new LazyOffHeapValueHolder<>(1L, serialized, serializer, 10L, 20L, 15L, mock(WriteContext.class)); valueHolder.detach(); serialized.clear(); - assertThat(valueHolder.value(), is(testValue)); + assertThat(valueHolder.get(), is(testValue)); } @Test @@ -50,7 +50,7 @@ public void testCanAccessBinaryValue() throws ClassNotFoundException { JavaSerializer serializer = new JavaSerializer<>(getClass().getClassLoader()); String testValue = "Let's get binary!"; ByteBuffer serialized = serializer.serialize(testValue); - LazyOffHeapValueHolder valueHolder = new LazyOffHeapValueHolder<>(1L, serialized, serializer, 10L, 20L, 15L, 3, mock(WriteContext.class)); + LazyOffHeapValueHolder valueHolder = new LazyOffHeapValueHolder<>(1L, serialized, serializer, 10L, 20L, 15L, mock(WriteContext.class)); valueHolder.detach(); @@ -63,7 +63,7 @@ public void testPreventAccessToBinaryValueIfNotPrepared() { JavaSerializer serializer = new JavaSerializer<>(getClass().getClassLoader()); String testValue = "Let's get binary!"; ByteBuffer serialized = serializer.serialize(testValue); - LazyOffHeapValueHolder valueHolder = new LazyOffHeapValueHolder<>(1L, serialized, serializer, 10L, 20L, 15L, 3, mock(WriteContext.class)); + LazyOffHeapValueHolder valueHolder = new LazyOffHeapValueHolder<>(1L, serialized, serializer, 10L, 20L, 15L, mock(WriteContext.class)); try { valueHolder.getBinaryValue(); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/MemorySizeParserTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/MemorySizeParserTest.java similarity index 100% rename from impl/src/test/java/org/ehcache/impl/internal/store/offheap/MemorySizeParserTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/MemorySizeParserTest.java diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreLifecycleHelper.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreLifecycleHelper.java new file mode 100644 index 0000000000..7ba4f0bf90 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreLifecycleHelper.java @@ -0,0 +1,37 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.offheap; + +import org.terracotta.statistics.StatisticsManager; + +/** + * @author Ludovic Orban + */ +public class OffHeapStoreLifecycleHelper { + + private OffHeapStoreLifecycleHelper() { + } + + public static void init(OffHeapStore offHeapStore) { + OffHeapStore.Provider.init(offHeapStore); + } + + public static void close(OffHeapStore offHeapStore) { + OffHeapStore.Provider.close(offHeapStore); + StatisticsManager.nodeFor(offHeapStore).clean(); + } + +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java similarity index 81% rename from impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java index 456488d4c2..60bb1ca087 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java @@ -18,12 +18,13 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; -import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.SizedResourcePool; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.core.spi.time.SystemTimeSource; import org.ehcache.core.spi.time.TimeSource; @@ -31,17 +32,18 @@ import org.ehcache.internal.store.StoreFactory; import org.ehcache.internal.tier.AuthoritativeTierFactory; import org.ehcache.internal.tier.AuthoritativeTierSPITest; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.service.ServiceConfiguration; import org.junit.Before; +import org.terracotta.statistics.StatisticsManager; import java.util.Arrays; import static org.ehcache.config.ResourceType.Core.OFFHEAP; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; /** * OffHeapStoreSPITest @@ -55,26 +57,26 @@ public void setUp() { authoritativeTierFactory = new AuthoritativeTierFactory() { @Override public AuthoritativeTier newStore() { - return newStore(null, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(null, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } @Override public AuthoritativeTier newStoreWithCapacity(long capacity) { - return newStore(capacity, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(capacity, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } @Override - public AuthoritativeTier newStoreWithExpiry(Expiry expiry, TimeSource timeSource) { + public AuthoritativeTier newStoreWithExpiry(ExpiryPolicy expiry, TimeSource timeSource) { return newStore(null, null, expiry, timeSource); } @Override public AuthoritativeTier newStoreWithEvictionAdvisor(EvictionAdvisor evictionAdvisor) { - return newStore(null, evictionAdvisor, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(null, evictionAdvisor, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } - private AuthoritativeTier newStore(Long capacity, EvictionAdvisor evictionAdvisor, Expiry expiry, TimeSource timeSource) { + private AuthoritativeTier newStore(Long capacity, EvictionAdvisor evictionAdvisor, ExpiryPolicy expiry, TimeSource timeSource) { Serializer keySerializer = new JavaSerializer<>(getClass().getClassLoader()); Serializer valueSerializer = new JavaSerializer<>(getClass().getClassLoader()); @@ -85,7 +87,7 @@ private AuthoritativeTier newStore(Long capacity, EvictionAdviso Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), evictionAdvisor, getClass().getClassLoader(), expiry, resourcePools, 0, keySerializer, valueSerializer); OffHeapStore store = new OffHeapStore<>(config, timeSource, new TestStoreEventDispatcher<>(), unit - .toBytes(offheapPool.getSize())); + .toBytes(offheapPool.getSize()), new DefaultStatisticsService()); OffHeapStore.Provider.init(store); return store; } @@ -113,8 +115,8 @@ public Class getValueType() { } @Override - public ServiceConfiguration[] getServiceConfigurations() { - return new ServiceConfiguration[0]; + public ServiceConfiguration[] getServiceConfigurations() { + return new ServiceConfiguration[0]; } @Override @@ -143,6 +145,7 @@ public String createValue(long seed) { @Override public void close(final Store store) { OffHeapStore.Provider.close((OffHeapStore)store); + StatisticsManager.nodeFor(store).clean(); } }; } @@ -163,6 +166,7 @@ public static void initStore(OffHeapStore offHeapStore) { public static void closeStore(OffHeapStore offHeapStore) { OffHeapStore.Provider.close(offHeapStore); + StatisticsManager.nodeFor(offHeapStore).clean(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java similarity index 76% rename from impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java index 402d7dd006..20b2449444 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java @@ -18,19 +18,23 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourceType; -import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.spi.store.Store; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.spi.serialization.DefaultSerializationProvider; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.impl.internal.util.UnmatchedResourceType; +import org.ehcache.impl.internal.store.offheap.portability.AssertingOffHeapValueHolderPortability; +import org.ehcache.impl.internal.store.offheap.portability.OffHeapValueHolderPortability; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; import org.ehcache.spi.service.ServiceConfiguration; import org.junit.Test; +import org.terracotta.statistics.StatisticsManager; import java.util.Arrays; import java.util.Collections; @@ -38,13 +42,13 @@ import static java.util.Collections.EMPTY_LIST; import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; public class OffHeapStoreTest extends AbstractOffHeapStoreTest { @Override - protected OffHeapStore createAndInitStore(TimeSource timeSource, Expiry expiry) { + protected OffHeapStore createAndInitStore(TimeSource timeSource, ExpiryPolicy expiry) { try { SerializationProvider serializationProvider = new DefaultSerializationProvider(null); serializationProvider.start(providerContaining()); @@ -53,8 +57,13 @@ protected OffHeapStore createAndInitStore(TimeSource timeSource, Serializer valueSerializer = serializationProvider.createValueSerializer(String.class, classLoader); StoreConfigurationImpl storeConfiguration = new StoreConfigurationImpl<>(String.class, String.class, null, classLoader, expiry, null, 0, keySerializer, valueSerializer); - OffHeapStore offHeapStore = new OffHeapStore<>(storeConfiguration, timeSource, new TestStoreEventDispatcher<>(), MemoryUnit.MB - .toBytes(1)); + OffHeapStore offHeapStore = new OffHeapStore(storeConfiguration, timeSource, new TestStoreEventDispatcher<>(), MemoryUnit.MB + .toBytes(1), new DefaultStatisticsService()) { + @Override + protected OffHeapValueHolderPortability createValuePortability(Serializer serializer) { + return new AssertingOffHeapValueHolderPortability<>(serializer); + } + }; OffHeapStore.Provider.init(offHeapStore); return offHeapStore; } catch (UnsupportedTypeException e) { @@ -63,7 +72,7 @@ protected OffHeapStore createAndInitStore(TimeSource timeSource, } @Override - protected OffHeapStore createAndInitStore(TimeSource timeSource, Expiry expiry, EvictionAdvisor evictionAdvisor) { + protected OffHeapStore createAndInitStore(TimeSource timeSource, ExpiryPolicy expiry, EvictionAdvisor evictionAdvisor) { try { SerializationProvider serializationProvider = new DefaultSerializationProvider(null); serializationProvider.start(providerContaining()); @@ -72,8 +81,13 @@ protected OffHeapStore createAndInitStore(TimeSource timeSource, Serializer valueSerializer = serializationProvider.createValueSerializer(byte[].class, classLoader); StoreConfigurationImpl storeConfiguration = new StoreConfigurationImpl<>(String.class, byte[].class, evictionAdvisor, getClass().getClassLoader(), expiry, null, 0, keySerializer, valueSerializer); - OffHeapStore offHeapStore = new OffHeapStore<>(storeConfiguration, timeSource, new TestStoreEventDispatcher<>(), MemoryUnit.MB - .toBytes(1)); + OffHeapStore offHeapStore = new OffHeapStore(storeConfiguration, timeSource, new TestStoreEventDispatcher<>(), MemoryUnit.MB + .toBytes(1), new DefaultStatisticsService()) { + @Override + protected OffHeapValueHolderPortability createValuePortability(Serializer serializer) { + return new AssertingOffHeapValueHolderPortability<>(serializer); + } + }; OffHeapStore.Provider.init(offHeapStore); return offHeapStore; } catch (UnsupportedTypeException e) { @@ -109,12 +123,13 @@ public void testRank() throws Exception { private void assertRank(final Store.Provider provider, final int expectedRank, final ResourceType... resources) { assertThat(provider.rank( new HashSet<>(Arrays.asList(resources)), - Collections.>emptyList()), + Collections.>emptyList()), is(expectedRank)); } @Override protected void destroyStore(AbstractOffHeapStore store) { OffHeapStore.Provider.close((OffHeapStore) store); + StatisticsManager.nodeFor(store).clean(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreUtilsTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreUtilsTest.java similarity index 100% rename from impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreUtilsTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreUtilsTest.java diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapValueHolderPortabilityTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapValueHolderPortabilityTest.java new file mode 100644 index 0000000000..0721e250ff --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapValueHolderPortabilityTest.java @@ -0,0 +1,123 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.offheap; + +import org.ehcache.impl.internal.store.offheap.portability.OffHeapValueHolderPortability; +import org.ehcache.impl.internal.spi.serialization.DefaultSerializationProvider; +import org.ehcache.impl.serialization.StringSerializer; +import org.ehcache.spi.serialization.SerializationProvider; +import org.ehcache.spi.serialization.UnsupportedTypeException; +import org.junit.Before; +import org.junit.Test; +import org.terracotta.offheapstore.storage.portability.WriteContext; + +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.nio.ByteBuffer; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; + +import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +public class OffHeapValueHolderPortabilityTest { + + private OffHeapValueHolderPortability valueHolderPortability; + private OffHeapValueHolder originalValue; + + @Before + public void setup() throws UnsupportedTypeException { + SerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + valueHolderPortability = new OffHeapValueHolderPortability<>(provider + .createValueSerializer(String.class, getClass().getClassLoader())); + + originalValue = new BasicOffHeapValueHolder<>(-1, "aValue", 1L, 2L, 3L); + + } + + @Test + public void testEncodeDecode() throws IOException { + ByteBuffer encoded = valueHolderPortability.encode(originalValue); + + // uncomment to perform backward compatibility tests + // passThroughAFile(encoded); + + OffHeapValueHolder decoded = valueHolderPortability.decode(encoded); + + assertThat(originalValue, equalTo(decoded)); + } + + /** + * This method can be used to test backward compatibility punctually. You run the test once and it will save the content + * of the buffer to a file. You then run it again with the new version by commenting the file writing. If it works, + * it means you are backward compatible. + * + * @param encoded the buffer to save to file + * @throws IOException if something goes wrong + */ + private void passThroughAFile(ByteBuffer encoded) throws IOException { + Path path = Paths.get("build/offheap.dat"); + Files.write(path, encoded.array()); // comment this line when running the second time + encoded.position(0); + encoded.put(Files.readAllBytes(path)); + encoded.flip(); + } + + @Test + public void testDecodingAPreviousVersionWithTheHits() { + StringSerializer serializer = new StringSerializer(); + ByteBuffer serialized = serializer.serialize("test"); + + long time = System.currentTimeMillis(); + + ByteBuffer byteBuffer = ByteBuffer.allocate(serialized.remaining() + 40); + byteBuffer.putLong(123L); // id + byteBuffer.putLong(time); // creation time + byteBuffer.putLong(time + 1); // last access time + byteBuffer.putLong(time + 2); // expiration time + byteBuffer.putLong(100L); // hits + byteBuffer.put(serialized); // the held value + byteBuffer.flip(); + + OffHeapValueHolder decoded = valueHolderPortability.decode(byteBuffer); + assertThat(decoded.getId(), equalTo(123L)); + assertThat(decoded.creationTime(), equalTo(time)); + assertThat(decoded.lastAccessTime(), equalTo(time + 1)); + assertThat(decoded.expirationTime(), equalTo(time + 2)); + assertThat(decoded.get(), equalTo("test")); + } + + @Test + public void testWriteBackSupport() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { + ByteBuffer encoded = valueHolderPortability.encode(originalValue); + WriteContext writeContext = mock(WriteContext.class); + OffHeapValueHolder decoded = valueHolderPortability.decode(encoded, writeContext); + + decoded.setExpirationTime(4L); + decoded.setLastAccessTime(6L); + decoded.writeBack(); + + verify(writeContext).setLong(OffHeapValueHolderPortability.ACCESS_TIME_OFFSET, 6L); + verify(writeContext).setLong(OffHeapValueHolderPortability.EXPIRE_TIME_OFFSET, 4L); + } + +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java similarity index 86% rename from impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java index 2fcf40e305..f146989f7a 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java @@ -36,7 +36,7 @@ import static org.ehcache.impl.internal.store.offheap.OffHeapStoreUtils.getBufferSource; import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -44,19 +44,19 @@ public class EhcacheSegmentTest { @SuppressWarnings("unchecked") private EhcacheSegmentFactory.EhcacheSegment createTestSegment() { - return createTestSegment(Eviction.noAdvice(), mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class)); + return createTestSegmentWithAdvisorAndListener(Eviction.noAdvice(), mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class)); } @SuppressWarnings("unchecked") - private EhcacheSegmentFactory.EhcacheSegment createTestSegment(EvictionAdvisor evictionPredicate) { - return createTestSegment(evictionPredicate, mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class)); + private EhcacheSegmentFactory.EhcacheSegment createTestSegmentWithAdvisor(EvictionAdvisor evictionPredicate) { + return createTestSegmentWithAdvisorAndListener(evictionPredicate, mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class)); } - private EhcacheSegmentFactory.EhcacheSegment createTestSegment(EhcacheSegmentFactory.EhcacheSegment.EvictionListener evictionListener) { - return createTestSegment(Eviction.noAdvice(), evictionListener); + private EhcacheSegmentFactory.EhcacheSegment createTestSegmentWithListener(EhcacheSegmentFactory.EhcacheSegment.EvictionListener evictionListener) { + return createTestSegmentWithAdvisorAndListener(Eviction.noAdvice(), evictionListener); } - private EhcacheSegmentFactory.EhcacheSegment createTestSegment(final EvictionAdvisor evictionPredicate, EhcacheSegmentFactory.EhcacheSegment.EvictionListener evictionListener) { + private EhcacheSegmentFactory.EhcacheSegment createTestSegmentWithAdvisorAndListener(final EvictionAdvisor evictionPredicate, EhcacheSegmentFactory.EhcacheSegment.EvictionListener evictionListener) { try { HeuristicConfiguration configuration = new HeuristicConfiguration(1024 * 1024); SerializationProvider serializationProvider = new DefaultSerializationProvider(null); @@ -94,7 +94,7 @@ public void setSwitchedOn(boolean switchedOn) { @Test public void testPutAdvisedAgainstEvictionComputesMetadata() { - EhcacheSegmentFactory.EhcacheSegment segment = createTestSegment((key, value) -> { + EhcacheSegmentFactory.EhcacheSegment segment = createTestSegmentWithAdvisor((key, value) -> { return "please-do-not-evict-me".equals(key); }); try { @@ -107,7 +107,7 @@ public void testPutAdvisedAgainstEvictionComputesMetadata() { @Test public void testPutPinnedAdvisedAgainstComputesMetadata() { - EhcacheSegmentFactory.EhcacheSegment segment = createTestSegment((key, value) -> { + EhcacheSegmentFactory.EhcacheSegment segment = createTestSegmentWithAdvisor((key, value) -> { return "please-do-not-evict-me".equals(key); }); try { @@ -133,7 +133,7 @@ public void testAdviceAgainstEvictionPreventsEviction() { public void testEvictionFiresEvent() { @SuppressWarnings("unchecked") EhcacheSegmentFactory.EhcacheSegment.EvictionListener evictionListener = mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class); - EhcacheSegmentFactory.EhcacheSegment segment = createTestSegment(evictionListener); + EhcacheSegmentFactory.EhcacheSegment segment = createTestSegmentWithListener(evictionListener); try { segment.put("key", "value"); segment.evict(segment.getEvictionIndex(), false); diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/portability/AssertingOffHeapValueHolderPortability.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/portability/AssertingOffHeapValueHolderPortability.java new file mode 100644 index 0000000000..e42c7e0672 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/offheap/portability/AssertingOffHeapValueHolderPortability.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.offheap.portability; + +import org.ehcache.impl.internal.store.offheap.AssertingOffHeapValueHolder; +import org.ehcache.impl.internal.store.offheap.OffHeapValueHolder; +import org.ehcache.spi.serialization.Serializer; +import org.terracotta.offheapstore.storage.portability.WriteContext; + +import java.nio.ByteBuffer; + +public class AssertingOffHeapValueHolderPortability extends OffHeapValueHolderPortability { + + public AssertingOffHeapValueHolderPortability(Serializer serializer) { + super(serializer); + } + + @Override + protected OffHeapValueHolder createLazyOffHeapValueHolder(long id, ByteBuffer byteBuffer, Serializer serializer, long creationTime, long expireTime, long lastAccessTime, WriteContext writeContext) { + return new AssertingOffHeapValueHolder<>(id, byteBuffer, serializer, creationTime, expireTime, lastAccessTime, writeContext); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java similarity index 85% rename from impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java index 6b715ceb60..fe073da77f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java @@ -17,11 +17,12 @@ package org.ehcache.impl.internal.store.tiering; import org.ehcache.config.ResourcePools; -import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; -import org.ehcache.expiry.Expirations; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.core.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; @@ -44,7 +45,7 @@ import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; /** * This factory instantiates a CachingTier @@ -77,13 +78,13 @@ public CachingTier newCachingTier(long capacity) { private CachingTier newCachingTier(Long capacity) { Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), null, - ClassLoader.getSystemClassLoader(), Expirations.noExpiration(), buildResourcePools(capacity), 0, new JavaSerializer<>(getSystemClassLoader()), new JavaSerializer<>(getSystemClassLoader())); + ClassLoader.getSystemClassLoader(), ExpiryPolicyBuilder.noExpiration(), buildResourcePools(capacity), 0, new JavaSerializer<>(getSystemClassLoader()), new JavaSerializer<>(getSystemClassLoader())); StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OffHeapStore offHeapStore = new OffHeapStore<>(config, SystemTimeSource.INSTANCE, eventDispatcher, 10 * 1024 * 1024); + OffHeapStore offHeapStore = new OffHeapStore<>(config, SystemTimeSource.INSTANCE, eventDispatcher, 10 * 1024 * 1024, new DefaultStatisticsService()); OffHeapStoreLifecycleHelper.init(offHeapStore); IdentityCopier copier = new IdentityCopier<>(); - OnHeapStore onHeapStore = new OnHeapStore<>(config, SystemTimeSource.INSTANCE, copier, copier, new NoopSizeOfEngine(), eventDispatcher); + OnHeapStore onHeapStore = new OnHeapStore<>(config, SystemTimeSource.INSTANCE, copier, copier, new NoopSizeOfEngine(), eventDispatcher, new DefaultStatisticsService()); CompoundCachingTier compoundCachingTier = new CompoundCachingTier<>(onHeapStore, offHeapStore); map.put(compoundCachingTier, offHeapStore); return compoundCachingTier; @@ -118,22 +119,22 @@ public Class getValueType() { } @Override - public ServiceConfiguration[] getServiceConfigurations() { - return new ServiceConfiguration[0]; + public ServiceConfiguration[] getServiceConfigurations() { + return new ServiceConfiguration[0]; } @Override public String createKey(long seed) { - return new String("" + seed); + return "" + seed; } @Override public String createValue(long seed) { - return new String("" + seed); + return "" + seed; } @Override - public void disposeOf(CachingTier tier) { + public void disposeOf(CachingTier tier) { OffHeapStore offHeapStore = map.remove(tier); OffHeapStoreLifecycleHelper.close(offHeapStore); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java similarity index 75% rename from impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java index c89d08bb4a..d94f97bba4 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java @@ -23,8 +23,6 @@ import org.ehcache.impl.internal.util.UnmatchedResourceType; import org.junit.Test; import org.mockito.ArgumentCaptor; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; import java.util.EnumSet; import java.util.HashSet; @@ -34,9 +32,9 @@ import java.util.function.Function; import static java.util.Collections.EMPTY_LIST; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doAnswer; @@ -56,10 +54,10 @@ public class CompoundCachingTierTest { public void testGetOrComputeIfAbsentComputesWhenBothTiersEmpty() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); - final Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); + Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); - final ArgumentCaptor functionArg = ArgumentCaptor.forClass(Function.class); - final ArgumentCaptor keyArg = ArgumentCaptor.forClass(String.class); + ArgumentCaptor>> functionArg = ArgumentCaptor.forClass(Function.class); + ArgumentCaptor keyArg = ArgumentCaptor.forClass(String.class); when(higherTier.getOrComputeIfAbsent(keyArg.capture(), functionArg.capture())).then(invocation -> functionArg.getValue().apply(keyArg.getValue())); when(lowerTier.getAndRemove(anyString())).thenReturn(null); @@ -102,10 +100,10 @@ public void testGetOrComputeIfAbsentDoesNotComputesWhenHigherTierContainsValue() public void testGetOrComputeIfAbsentDoesNotComputesWhenLowerTierContainsValue() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); - final Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); + Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); - final ArgumentCaptor functionArg = ArgumentCaptor.forClass(Function.class); - final ArgumentCaptor keyArg = ArgumentCaptor.forClass(String.class); + ArgumentCaptor>> functionArg = ArgumentCaptor.forClass(Function.class); + ArgumentCaptor keyArg = ArgumentCaptor.forClass(String.class); when(higherTier.getOrComputeIfAbsent(keyArg.capture(), functionArg.capture())).then(invocation -> functionArg.getValue().apply(keyArg.getValue())); when(lowerTier.getAndRemove(anyString())).thenReturn(valueHolder); @@ -125,17 +123,18 @@ public void testGetOrComputeIfAbsentDoesNotComputesWhenLowerTierContainsValue() @SuppressWarnings("unchecked") public void testGetOrComputeIfAbsentComputesWhenLowerTierExpires() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); - final LowerCachingTier lowerTier = mock(LowerCachingTier.class); - final Store.ValueHolder originalValueHolder = mock(Store.ValueHolder.class); - final Store.ValueHolder newValueHolder = mock(Store.ValueHolder.class); + LowerCachingTier lowerTier = mock(LowerCachingTier.class); + Store.ValueHolder originalValueHolder = mock(Store.ValueHolder.class); + Store.ValueHolder newValueHolder = mock(Store.ValueHolder.class); - final ArgumentCaptor functionArg = ArgumentCaptor.forClass(Function.class); - final ArgumentCaptor keyArg = ArgumentCaptor.forClass(String.class); + ArgumentCaptor>> functionArg = ArgumentCaptor.forClass(Function.class); + ArgumentCaptor keyArg = ArgumentCaptor.forClass(String.class); when(higherTier.getOrComputeIfAbsent(keyArg.capture(), functionArg.capture())).then(invocation -> functionArg.getValue().apply(keyArg.getValue())); - final ArgumentCaptor invalidationListenerArg = ArgumentCaptor.forClass(CachingTier.InvalidationListener.class); + ArgumentCaptor> invalidationListenerArg = ArgumentCaptor.forClass(CachingTier.InvalidationListener.class); doNothing().when(lowerTier).setInvalidationListener(invalidationListenerArg.capture()); when(lowerTier.getAndRemove(anyString())).thenAnswer(invocation -> { - invalidationListenerArg.getValue().onInvalidation(invocation.getArguments()[0], originalValueHolder); + String key = (String) invocation.getArguments()[0]; + invalidationListenerArg.getValue().onInvalidation(key, originalValueHolder); return null; }); @@ -189,35 +188,36 @@ public void testInvalidateWhenValueInLowerTierFiresListener() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); - final Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); - final AtomicReference> higherTierValueHolder = new AtomicReference<>(); - final AtomicReference> lowerTierValueHolder = new AtomicReference<>(valueHolder); + Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); + AtomicReference> higherTierValueHolder = new AtomicReference<>(); + AtomicReference> lowerTierValueHolder = new AtomicReference<>(valueHolder); - final ArgumentCaptor higherTierInvalidationListenerArg = ArgumentCaptor.forClass(CachingTier.InvalidationListener.class); + ArgumentCaptor> higherTierInvalidationListenerArg = ArgumentCaptor.forClass(CachingTier.InvalidationListener.class); doNothing().when(higherTier).setInvalidationListener(higherTierInvalidationListenerArg.capture()); doAnswer(invocation -> { - higherTierInvalidationListenerArg.getValue().onInvalidation(invocation.getArguments()[0], higherTierValueHolder.getAndSet(null)); + String key = (String) invocation.getArguments()[0]; + higherTierInvalidationListenerArg.getValue().onInvalidation(key, higherTierValueHolder.getAndSet(null)); ((Function) invocation.getArguments()[1]).apply(higherTierValueHolder.get()); return null; }).when(higherTier).silentInvalidate(anyString(), any(Function.class)); - final ArgumentCaptor functionArg = ArgumentCaptor.forClass(Function.class); - final ArgumentCaptor keyArg = ArgumentCaptor.forClass(String.class); + ArgumentCaptor>> functionArg = ArgumentCaptor.forClass(Function.class); + ArgumentCaptor keyArg = ArgumentCaptor.forClass(String.class); when(lowerTier.installMapping(keyArg.capture(), functionArg.capture())).then(invocation -> lowerTierValueHolder.get()); - final ArgumentCaptor lowerTierInvalidationListenerArg = ArgumentCaptor.forClass(CachingTier.InvalidationListener.class); + ArgumentCaptor> lowerTierInvalidationListenerArg = ArgumentCaptor.forClass(CachingTier.InvalidationListener.class); doNothing().when(lowerTier).setInvalidationListener(lowerTierInvalidationListenerArg.capture()); doAnswer(invocation -> { - lowerTierInvalidationListenerArg.getValue().onInvalidation(invocation.getArguments()[0], lowerTierValueHolder.getAndSet(null)); + String key = (String) invocation.getArguments()[0]; + lowerTierInvalidationListenerArg.getValue().onInvalidation(key, lowerTierValueHolder.getAndSet(null)); return null; }).when(lowerTier).invalidate(anyString()); - final AtomicReference> invalidated = new AtomicReference<>(); + AtomicReference> invalidated = new AtomicReference<>(); CompoundCachingTier compoundCachingTier = new CompoundCachingTier<>(higherTier, lowerTier); compoundCachingTier.setInvalidationListener((key, valueHolder1) -> invalidated.set(valueHolder1)); - compoundCachingTier.invalidate("1"); assertThat(invalidated.get(), is(valueHolder)); @@ -231,29 +231,31 @@ public void testInvalidateWhenValueInHigherTierFiresListener() throws Exception HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); - final Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); - final AtomicReference> higherTierValueHolder = new AtomicReference<>(valueHolder); - final AtomicReference> lowerTierValueHolder = new AtomicReference<>(); + Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); + AtomicReference> higherTierValueHolder = new AtomicReference<>(valueHolder); + AtomicReference> lowerTierValueHolder = new AtomicReference<>(); - final ArgumentCaptor higherTierInvalidationListenerArg = ArgumentCaptor.forClass(CachingTier.InvalidationListener.class); + ArgumentCaptor> higherTierInvalidationListenerArg = ArgumentCaptor.forClass(CachingTier.InvalidationListener.class); doNothing().when(higherTier).setInvalidationListener(higherTierInvalidationListenerArg.capture()); doAnswer(invocation -> { - higherTierInvalidationListenerArg.getValue().onInvalidation(invocation.getArguments()[0], higherTierValueHolder.getAndSet(null)); + String key = (String) invocation.getArguments()[0]; + higherTierInvalidationListenerArg.getValue().onInvalidation(key, higherTierValueHolder.getAndSet(null)); ((Function) invocation.getArguments()[1]).apply(higherTierValueHolder.get()); return null; }).when(higherTier).silentInvalidate(anyString(), any(Function.class)); - final ArgumentCaptor functionArg = ArgumentCaptor.forClass(Function.class); - final ArgumentCaptor keyArg = ArgumentCaptor.forClass(String.class); + ArgumentCaptor>> functionArg = ArgumentCaptor.forClass(Function.class); + ArgumentCaptor keyArg = ArgumentCaptor.forClass(String.class); when(lowerTier.installMapping(keyArg.capture(), functionArg.capture())).then(invocation -> { Object apply = functionArg.getValue().apply(keyArg.getValue()); lowerTierValueHolder.set((Store.ValueHolder) apply); return apply; }); - final ArgumentCaptor lowerTierInvalidationListenerArg = ArgumentCaptor.forClass(CachingTier.InvalidationListener.class); + final ArgumentCaptor> lowerTierInvalidationListenerArg = ArgumentCaptor.forClass(CachingTier.InvalidationListener.class); doNothing().when(lowerTier).setInvalidationListener(lowerTierInvalidationListenerArg.capture()); doAnswer(invocation -> { - lowerTierInvalidationListenerArg.getValue().onInvalidation(invocation.getArguments()[0], lowerTierValueHolder.getAndSet(null)); + String key = (String) invocation.getArguments()[0]; + lowerTierInvalidationListenerArg.getValue().onInvalidation(key, lowerTierValueHolder.getAndSet(null)); return null; }).when(lowerTier).invalidate(anyString()); @@ -288,8 +290,7 @@ public void testInvalidateAllCoversBothTiers() throws Exception { @SuppressWarnings("unchecked") public void testRankCachingTier() throws Exception { CompoundCachingTier.Provider provider = new CompoundCachingTier.Provider(); - HashSet> resourceTypes = new HashSet<>(); - resourceTypes.addAll(EnumSet.of(ResourceType.Core.HEAP, ResourceType.Core.OFFHEAP)); + HashSet> resourceTypes = new HashSet<>(EnumSet.of(ResourceType.Core.HEAP, ResourceType.Core.OFFHEAP)); assertThat(provider.rankCachingTier(resourceTypes, EMPTY_LIST), is(2)); resourceTypes.clear(); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java similarity index 77% rename from impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java index 4699b16a23..bede68ca2c 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java @@ -19,34 +19,36 @@ import org.ehcache.config.CacheConfiguration; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; import org.ehcache.impl.persistence.DefaultDiskResourceService; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.heap.OnHeapStore; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.core.spi.store.Store; import org.ehcache.impl.persistence.DefaultLocalPersistenceService; import org.ehcache.impl.serialization.JavaSerializer; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.persistence.PersistableResourceService.PersistenceSpaceIdentifier; -import org.ehcache.spi.service.ServiceConfiguration; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; +import org.mockito.Answers; +import org.mockito.Mockito; import java.io.File; -import java.io.Serializable; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; +import static org.ehcache.test.MockitoUtil.mock; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class TieredStoreFlushWhileShutdownTest { @@ -81,8 +83,8 @@ public ClassLoader getClassLoader() { } @Override - public Expiry getExpiry() { - return Expirations.noExpiration(); + public ExpiryPolicy getExpiry() { + return ExpiryPolicyBuilder.noExpiration(); } @Override @@ -104,6 +106,11 @@ public Serializer getValueSerializer() { public int getDispatcherConcurrency() { return 1; } + + @Override + public CacheLoaderWriter getCacheLoaderWriter() { + return null; + } }; ServiceLocator serviceLocator = getServiceLocator(persistenceLocation); @@ -112,11 +119,11 @@ public int getDispatcherConcurrency() { tieredStoreProvider.start(serviceLocator); - CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, true).build()); DiskResourceService diskResourceService = serviceLocator.getService(DiskResourceService.class); - PersistenceSpaceIdentifier persistenceSpace = diskResourceService.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); + PersistenceSpaceIdentifier persistenceSpace = diskResourceService.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); Store tieredStore = tieredStoreProvider.createStore(configuration, persistenceSpace); tieredStoreProvider.initStore(tieredStore); for (int i = 0; i < 100; i++) { @@ -129,6 +136,12 @@ public int getDispatcherConcurrency() { } } + // Keep the creation time to make sure we have them at restart + long[] creationTimes = new long[20]; + for (int i = 0; i < 20; i++) { + creationTimes[i] = tieredStore.get(i).creationTime(); + } + tieredStoreProvider.releaseStore(tieredStore); tieredStoreProvider.stop(); @@ -139,12 +152,12 @@ public int getDispatcherConcurrency() { tieredStoreProvider.start(serviceLocator1); DiskResourceService diskResourceService1 = serviceLocator1.getService(DiskResourceService.class); - PersistenceSpaceIdentifier persistenceSpace1 = diskResourceService1.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); + PersistenceSpaceIdentifier persistenceSpace1 = diskResourceService1.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); tieredStore = tieredStoreProvider.createStore(configuration, persistenceSpace1); tieredStoreProvider.initStore(tieredStore); for(int i = 0; i < 20; i++) { - assertThat(tieredStore.get(i).hits(), is(21L)); + assertThat(tieredStore.get(i).creationTime(), is(creationTimes[i])); } } @@ -157,6 +170,7 @@ private ServiceLocator getServiceLocator(File location) throws Exception { dependencySet.with(diskResourceService); dependencySet.with(new OnHeapStore.Provider()); dependencySet.with(new OffHeapDiskStore.Provider()); + dependencySet.with(Mockito.mock(CacheManagerProviderService.class, Answers.RETURNS_DEEP_STUBS)); return dependencySet.build(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java similarity index 94% rename from impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java index e8938e6dc0..bc3245bbfe 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java @@ -16,16 +16,17 @@ package org.ehcache.impl.internal.store.tiering; import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.core.spi.store.tiering.CachingTier; import org.ehcache.core.spi.time.SystemTimeSource; import org.ehcache.docs.plugs.StringCopier; -import org.ehcache.expiry.Expirations; import org.ehcache.core.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; import org.ehcache.impl.internal.store.basic.NopStore; @@ -39,11 +40,12 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; /** * Tests for {@link TieredStore}. These tests are mainly to validate that @@ -116,7 +118,7 @@ public ValueHolder getAndFault(String key) throws StoreAccessException { } @Override - public ValueHolder putIfAbsent(String key, String value) throws StoreAccessException { + public ValueHolder putIfAbsent(String key, String value, Consumer put) throws StoreAccessException { return createValueHolder(map.putIfAbsent(key, value)); } @@ -173,12 +175,12 @@ public void setUp() throws Exception { // Not relevant to the test, just used to instantiate the OnHeapStore Store.Configuration config = new StoreConfigurationImpl<>(String.class, String.class, - null, getClass().getClassLoader(), Expirations.noExpiration(), resourcePools, 0, null, null); + null, getClass().getClassLoader(), ExpiryPolicyBuilder.noExpiration(), resourcePools, 0, null, null); // Here again, all parameters are useless, we only care about the beforeCompletingTheFault implementation CachingTier cachingTier = new OnHeapStore<>(config, SystemTimeSource.INSTANCE, StringCopier.copier(), StringCopier.copier(), new NoopSizeOfEngine(), NullStoreEventDispatcher. - nullStoreEventDispatcher()); + nullStoreEventDispatcher(), new DefaultStatisticsService()); tieredStore = new TieredStore<>(cachingTier, authoritativeTier); } @@ -212,7 +214,7 @@ public void testPutIfAbsent() throws Exception { // 4. Test Thread receives a value from putIfAbsent. We would expect the get to receive the same value right after // a. Test Thread -> TieredStore.putIfAbsent // b. Test Thread -> AuthoritativeTierMock.putIfAbsent - returns VALUE - assertThat(putIfAbsentToTieredStore().value(), is(VALUE)); + assertThat(putIfAbsentToTieredStore().get(), is(VALUE)); // 5. Test Thread -> TieredStore.get() // If Test Thread bugged -> Fault.get() - synchronized - blocked on the fault because thread 2 already locks the fault @@ -222,7 +224,7 @@ public void testPutIfAbsent() throws Exception { // These assertions will in fact work most of the time even if a failure occurred. Because as soon as the latches are // released by thread 3, the thread 2 will invalidate the fault assertThat(value, notNullValue()); - assertThat(value.value(), is(VALUE)); + assertThat(value.get(), is(VALUE)); // If the Test thread was blocked, Thread 3 will eventually flag the failure assertThat(failed, is(false)); @@ -251,7 +253,7 @@ public void testRemoveKeyValue() throws Exception { // Else Test Thread fixed -> new Fault ... correct value Store.ValueHolder value = getFromTieredStore(); assertThat(value, notNullValue()); - assertThat(value.value(), is(VALUE)); + assertThat(value.get(), is(VALUE)); assertThat(failed, is(false)); } @@ -343,7 +345,7 @@ private boolean removeKeyFromTieredStore() { private Store.ValueHolder putIfAbsentToTieredStore() { try { - return tieredStore.putIfAbsent(KEY, VALUE); + return tieredStore.putIfAbsent(KEY, VALUE, b -> {}); } catch (StoreAccessException e) { throw new RuntimeException(e); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java similarity index 83% rename from impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java index 98a6b07f8b..f3605deb02 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java @@ -20,14 +20,15 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; -import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.SizedResourcePool; import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.CachePersistenceException; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.core.events.NullStoreEventDispatcher; @@ -44,7 +45,7 @@ import org.ehcache.impl.serialization.JavaSerializer; import org.ehcache.internal.store.StoreFactory; import org.ehcache.internal.store.StoreSPITest; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; @@ -64,15 +65,14 @@ import java.util.Collection; import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.MemoryUnit.MB; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; import static org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration.DEFAULT_DISK_SEGMENTS; import static org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration.DEFAULT_WRITER_CONCURRENCY; -import static org.mockito.Mockito.mock; +import static org.ehcache.test.MockitoUtil.mock; import static org.mockito.Mockito.when; /** @@ -105,38 +105,38 @@ public void setUp() throws IOException { @Override public Store newStore() { - return newStore(null, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(null, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } @Override public Store newStoreWithCapacity(long capacity) { - return newStore(capacity, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(capacity, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } @Override - public Store newStoreWithExpiry(Expiry expiry, TimeSource timeSource) { + public Store newStoreWithExpiry(ExpiryPolicy expiry, TimeSource timeSource) { return newStore(null, null, expiry, timeSource); } @Override public Store newStoreWithEvictionAdvisor(EvictionAdvisor evictionAdvisor) { - return newStore(null, evictionAdvisor, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(null, evictionAdvisor, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } - private Store newStore(Long capacity, EvictionAdvisor evictionAdvisor, Expiry expiry, TimeSource timeSource) { + private Store newStore(Long capacity, EvictionAdvisor evictionAdvisor, ExpiryPolicy expiry, TimeSource timeSource) { Serializer keySerializer = new JavaSerializer<>(getClass().getClassLoader()); Serializer valueSerializer = new JavaSerializer<>(getClass().getClassLoader()); Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), evictionAdvisor, getClass().getClassLoader(), expiry, buildResourcePools(capacity), 0, keySerializer, valueSerializer); - @SuppressWarnings("unchecked") - final Copier defaultCopier = new IdentityCopier(); - OnHeapStore onHeapStore = new OnHeapStore<>(config, timeSource, defaultCopier, defaultCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); + Copier defaultCopier = IdentityCopier.identityCopier(); + OnHeapStore onHeapStore = new OnHeapStore<>(config, timeSource, defaultCopier, defaultCopier, + new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()); try { - CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MB, false).build()); String spaceName = "alias-" + aliasCounter.getAndIncrement(); - DiskResourceService.PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); + DiskResourceService.PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); FileBasedPersistenceContext persistenceContext = diskResourceService.createPersistenceContextWithin(space, "store"); SizedResourcePool diskPool = config.getResourcePools().getPoolForResource(ResourceType.Core.DISK); @@ -148,11 +148,11 @@ private Store newStore(Long capacity, EvictionAdvisor(), - sizeInBytes); + sizeInBytes, new DefaultStatisticsService()); TieredStore tieredStore = new TieredStore<>(onHeapStore, diskStore); provider.registerStore(tieredStore, new CachingTier.Provider() { @Override - public CachingTier createCachingTier(final Store.Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { + public CachingTier createCachingTier(final Store.Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { throw new UnsupportedOperationException("Implement me!"); } @@ -167,7 +167,7 @@ public void initCachingTier(final CachingTier resource) { } @Override - public int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs) { + public int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs) { throw new UnsupportedOperationException("Implement me!"); } @@ -182,7 +182,7 @@ public void stop() { } }, new AuthoritativeTier.Provider() { @Override - public AuthoritativeTier createAuthoritativeTier(final Store.Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { + public AuthoritativeTier createAuthoritativeTier(final Store.Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { throw new UnsupportedOperationException("Implement me!"); } @@ -197,7 +197,7 @@ public void initAuthoritativeTier(final AuthoritativeTier resource) { } @Override - public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { + public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { throw new UnsupportedOperationException("Implement me!"); } @@ -225,40 +225,30 @@ public Store.ValueHolder newValueHolder(final String value) { return new Store.ValueHolder() { @Override - public String value() { + public String get() { return value; } @Override - public long creationTime(TimeUnit unit) { + public long creationTime() { return creationTime; } @Override - public long expirationTime(TimeUnit unit) { + public long expirationTime() { return 0; } @Override - public boolean isExpired(long expirationTime, TimeUnit unit) { + public boolean isExpired(long expirationTime) { return false; } @Override - public long lastAccessTime(TimeUnit unit) { + public long lastAccessTime() { return 0; } - @Override - public float hitRate(long now, TimeUnit unit) { - return 0; - } - - @Override - public long hits() { - throw new UnsupportedOperationException("Implement me!"); - } - @Override public long getId() { throw new UnsupportedOperationException("Implement me!"); @@ -277,8 +267,8 @@ public Class getValueType() { } @Override - public ServiceConfiguration[] getServiceConfigurations() { - return new ServiceConfiguration[0]; + public ServiceConfiguration[] getServiceConfigurations() { + return new ServiceConfiguration[0]; } @Override @@ -334,7 +324,7 @@ private ResourcePools buildResourcePools(Comparable capacityConstraint) { public static class FakeCachingTierProvider implements CachingTier.Provider { @Override @SuppressWarnings("unchecked") - public CachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + public CachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return mock(CachingTier.class); } @@ -349,7 +339,7 @@ public void initCachingTier(CachingTier resource) { } @Override - public int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs) { + public int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs) { throw new UnsupportedOperationException(); } @@ -367,7 +357,7 @@ public void stop() { public static class FakeAuthoritativeTierProvider implements AuthoritativeTier.Provider { @Override @SuppressWarnings("unchecked") - public AuthoritativeTier createAuthoritativeTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + public AuthoritativeTier createAuthoritativeTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return mock(AuthoritativeTier.class); } @@ -382,7 +372,7 @@ public void initAuthoritativeTier(AuthoritativeTier resource) { } @Override - public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { + public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { throw new UnsupportedOperationException(); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java similarity index 76% rename from impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java index aa87d16da6..63b1601e33 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java @@ -19,27 +19,29 @@ import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; import org.ehcache.config.SizedResourcePool; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.exceptions.StorePassThroughException; +import org.ehcache.core.spi.ServiceLocator; +import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.core.spi.service.StatisticsService; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.Store.RemoveStatus; import org.ehcache.core.spi.store.Store.ReplaceStatus; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.core.spi.store.tiering.CachingTier; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.offheap.OffHeapStore; import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceProvider; import org.hamcrest.Matchers; -import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.mockito.Answers; import org.mockito.ArgumentMatchers; import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import java.util.AbstractMap; @@ -54,15 +56,18 @@ import java.util.Set; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; -import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; @@ -98,7 +103,7 @@ public void testGetHitsCachingTier() throws Exception { TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); - assertThat(tieredStore.get(1).value(), Matchers.equalTo("one")); + assertThat(tieredStore.get(1).get(), Matchers.equalTo("one")); verify(numberAuthoritativeTier, times(0)).getAndFault(any(Number.class)); } @@ -116,7 +121,7 @@ public void testGetHitsAuthoritativeTier() throws Exception { TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); - assertThat(tieredStore.get(1).value(), Matchers.equalTo("one")); + assertThat(tieredStore.get(1).get(), Matchers.equalTo("one")); verify(numberCachingTier, times(1)).getOrComputeIfAbsent(eq(1), any(Function.class)); verify(numberAuthoritativeTier, times(1)).getAndFault(any(Number.class)); @@ -140,6 +145,71 @@ public void testGetMisses() throws Exception { verify(numberAuthoritativeTier, times(1)).getAndFault(any(Number.class)); } + @Test + @SuppressWarnings("unchecked") + public void testGetThrowsRuntimeException() throws Exception { + RuntimeException error = new RuntimeException(); + when(numberCachingTier.getOrComputeIfAbsent(any(Number.class), any(Function.class))).thenThrow(new StoreAccessException(error)); + + TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); + + try { + tieredStore.get(1); + fail("We should get an Error"); + } catch (RuntimeException e) { + assertSame(error, e); + } + } + + @Test + @SuppressWarnings("unchecked") + public void testGetThrowsError() throws Exception { + Error error = new Error(); + when(numberCachingTier.getOrComputeIfAbsent(any(Number.class), any(Function.class))).thenThrow(new StoreAccessException(error)); + + TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); + + try { + tieredStore.get(1); + fail("We should get an Error"); + } catch (Error e) { + assertSame(error, e); + } + } + + @Test + @SuppressWarnings("unchecked") + public void testGetThrowsException() throws Exception { + Exception error = new Exception(); + when(numberCachingTier.getOrComputeIfAbsent(any(Number.class), any(Function.class))).thenThrow(new StoreAccessException(error)); + + TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); + + try { + tieredStore.get(1); + fail("We should get an Error"); + } catch (RuntimeException e) { + assertSame(error, e.getCause()); + assertEquals("Unexpected checked exception wrapped in StoreAccessException", e.getMessage()); + } + } + + @Test + @SuppressWarnings("unchecked") + public void testGetThrowsPassthrough() throws Exception { + StoreAccessException error = new StoreAccessException("inner"); + when(numberCachingTier.getOrComputeIfAbsent(any(Number.class), any(Function.class))).thenThrow(new StoreAccessException(new StorePassThroughException(error))); + + TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); + + try { + tieredStore.get(1); + fail("We should get an Error"); + } catch (StoreAccessException e) { + assertSame(error, e); + } + } + @Test public void testPut() throws Exception { TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); @@ -154,22 +224,24 @@ public void testPut() throws Exception { public void testPutIfAbsent_whenAbsent() throws Exception { TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); - assertThat(tieredStore.putIfAbsent(1, "one"), is(nullValue())); + assertThat(tieredStore.putIfAbsent(1, "one", b -> {}), is(nullValue())); verify(numberCachingTier, times(1)).invalidate(eq(1)); - verify(numberAuthoritativeTier, times(1)).putIfAbsent(eq(1), eq("one")); + verify(numberAuthoritativeTier, times(1)).putIfAbsent(eq(1), eq("one"), any()); } @Test public void testPutIfAbsent_whenPresent() throws Exception { - when(numberAuthoritativeTier.putIfAbsent(1, "one")).thenReturn(newValueHolder("un")); + Consumer booleanConsumer = b -> { + }; + when(numberAuthoritativeTier.putIfAbsent(1, "one", booleanConsumer)).thenReturn(newValueHolder("un")); TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); - assertThat(tieredStore.putIfAbsent(1, "one").value(), Matchers.equalTo("un")); + assertThat(tieredStore.putIfAbsent(1, "one", booleanConsumer).get(), Matchers.equalTo("un")); verify(numberCachingTier, times(1)).invalidate(1); - verify(numberAuthoritativeTier, times(1)).putIfAbsent(1, "one"); + verify(numberAuthoritativeTier, times(1)).putIfAbsent(1, "one", booleanConsumer); } @Test @@ -212,7 +284,7 @@ public void testReplace2Args_replaces() throws Exception { TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); - assertThat(tieredStore.replace(1, "one").value(), Matchers.equalTo("un")); + assertThat(tieredStore.replace(1, "one").get(), Matchers.equalTo("un")); verify(numberCachingTier, times(1)).invalidate(eq(1)); verify(numberAuthoritativeTier, times(1)).replace(eq(1), eq("one")); @@ -267,7 +339,7 @@ public void testClear() throws Exception { @Test @SuppressWarnings("unchecked") public void testCompute2Args() throws Exception { - when(numberAuthoritativeTier.compute(any(Number.class), any(BiFunction.class))).then((Answer>) invocation -> { + when(numberAuthoritativeTier.getAndCompute(any(Number.class), any(BiFunction.class))).then((Answer>) invocation -> { Number key = (Number) invocation.getArguments()[0]; BiFunction function = (BiFunction) invocation.getArguments()[1]; return newValueHolder(function.apply(key, null)); @@ -275,16 +347,16 @@ public void testCompute2Args() throws Exception { TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); - assertThat(tieredStore.compute(1, (number, charSequence) -> "one").value(), Matchers.equalTo("one")); + assertThat(tieredStore.getAndCompute(1, (number, charSequence) -> "one").get(), Matchers.equalTo("one")); verify(numberCachingTier, times(1)).invalidate(any(Number.class)); - verify(numberAuthoritativeTier, times(1)).compute(eq(1), any(BiFunction.class)); + verify(numberAuthoritativeTier, times(1)).getAndCompute(eq(1), any(BiFunction.class)); } @Test @SuppressWarnings("unchecked") public void testCompute3Args() throws Exception { - when(numberAuthoritativeTier.compute(any(Number.class), any(BiFunction.class), any(Supplier.class))).then((Answer>) invocation -> { + when(numberAuthoritativeTier.computeAndGet(any(Number.class), any(BiFunction.class), any(Supplier.class), any(Supplier.class))).then((Answer>) invocation -> { Number key = (Number) invocation.getArguments()[0]; BiFunction function = (BiFunction) invocation.getArguments()[1]; return newValueHolder(function.apply(key, null)); @@ -292,10 +364,10 @@ public void testCompute3Args() throws Exception { TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); - assertThat(tieredStore.compute(1, (number, charSequence) -> "one", () -> true).value(), Matchers.equalTo("one")); + assertThat(tieredStore.computeAndGet(1, (number, charSequence) -> "one", () -> true, () -> false).get(), Matchers.equalTo("one")); verify(numberCachingTier, times(1)).invalidate(any(Number.class)); - verify(numberAuthoritativeTier, times(1)).compute(eq(1), any(BiFunction.class), any(Supplier.class)); + verify(numberAuthoritativeTier, times(1)).computeAndGet(eq(1), any(BiFunction.class), any(Supplier.class), any(Supplier.class)); } @Test @@ -314,7 +386,7 @@ public void testComputeIfAbsent_computes() throws Exception { TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); - assertThat(tieredStore.computeIfAbsent(1, number -> "one").value(), Matchers.equalTo("one")); + assertThat(tieredStore.computeIfAbsent(1, number -> "one").get(), Matchers.equalTo("one")); verify(numberCachingTier, times(1)).getOrComputeIfAbsent(eq(1), any(Function.class)); verify(numberAuthoritativeTier, times(1)).computeIfAbsentAndFault(eq(1), any(Function.class)); @@ -328,12 +400,28 @@ public void testComputeIfAbsent_doesNotCompute() throws Exception { TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); - assertThat(tieredStore.computeIfAbsent(1, number -> "one").value(), Matchers.equalTo("one")); + assertThat(tieredStore.computeIfAbsent(1, number -> "one").get(), Matchers.equalTo("one")); verify(numberCachingTier, times(1)).getOrComputeIfAbsent(eq(1), any(Function.class)); verify(numberAuthoritativeTier, times(0)).computeIfAbsentAndFault(eq(1), any(Function.class)); } + @Test + @SuppressWarnings("unchecked") + public void testComputeIfAbsentThrowsError() throws Exception { + Error error = new Error(); + when(numberCachingTier.getOrComputeIfAbsent(any(Number.class), any(Function.class))).thenThrow(new StoreAccessException(error)); + + TieredStore tieredStore = new TieredStore<>(numberCachingTier, numberAuthoritativeTier); + + try { + tieredStore.computeIfAbsent(1, n -> null); + fail("We should get an Error"); + } catch (Error e) { + assertSame(error, e); + } + } + @Test @SuppressWarnings("unchecked") public void testBulkCompute2Args() throws Exception { @@ -362,9 +450,9 @@ public void testBulkCompute2Args() throws Exception { .asList(newMapEntry(1, "one"), newMapEntry(2, "two"), newMapEntry(3, "three")))); assertThat(result.size(), is(3)); - assertThat(result.get(1).value(), Matchers.equalTo("one")); - assertThat(result.get(2).value(), Matchers.equalTo("two")); - assertThat(result.get(3).value(), Matchers.equalTo("three")); + assertThat(result.get(1).get(), Matchers.equalTo("one")); + assertThat(result.get(2).get(), Matchers.equalTo("two")); + assertThat(result.get(3).get(), Matchers.equalTo("three")); verify(numberCachingTier, times(1)).invalidate(1); verify(numberCachingTier, times(1)).invalidate(2); @@ -401,9 +489,9 @@ public void testBulkCompute3Args() throws Exception { .asList(newMapEntry(1, "one"), newMapEntry(2, "two"), newMapEntry(3, "three"))), () -> true); assertThat(result.size(), is(3)); - assertThat(result.get(1).value(), Matchers.equalTo("one")); - assertThat(result.get(2).value(), Matchers.equalTo("two")); - assertThat(result.get(3).value(), Matchers.equalTo("three")); + assertThat(result.get(1).get(), Matchers.equalTo("one")); + assertThat(result.get(2).get(), Matchers.equalTo("two")); + assertThat(result.get(3).get(), Matchers.equalTo("three")); verify(numberCachingTier, times(1)).invalidate(1); verify(numberCachingTier, times(1)).invalidate(2); @@ -439,9 +527,9 @@ public void testBulkComputeIfAbsent() throws Exception { Map> result = tieredStore.bulkComputeIfAbsent(new HashSet(Arrays.asList(1, 2, 3)), numbers -> Arrays.asList(newMapEntry(1, "one"), newMapEntry(2, "two"), newMapEntry(3, "three"))); assertThat(result.size(), is(3)); - assertThat(result.get(1).value(), Matchers.equalTo("one")); - assertThat(result.get(2).value(), Matchers.equalTo("two")); - assertThat(result.get(3).value(), Matchers.equalTo("three")); + assertThat(result.get(1).get(), Matchers.equalTo("one")); + assertThat(result.get(2).get(), Matchers.equalTo("two")); + assertThat(result.get(3).get(), Matchers.equalTo("three")); verify(numberCachingTier, times(1)).invalidate(1); verify(numberCachingTier, times(1)).invalidate(2); @@ -474,7 +562,7 @@ public void CachingTierDoesNotSeeAnyOperationDuringClear() throws StoreAccessExc barrier.await(); t.join(); verify(stringCachingTier, never()).getOrComputeIfAbsent( - ArgumentMatchers.any(), ArgumentMatchers.>>any()); + ArgumentMatchers.any(), ArgumentMatchers.any()); } @Test @@ -493,7 +581,7 @@ public void testReleaseStoreFlushes() throws Exception { Set> singleton = Collections.>singleton( ResourceType.Core.HEAP); when(onHeapStoreProvider.rankCachingTier(eq(singleton), any(Collection.class))).thenReturn(1); when(onHeapStoreProvider.createCachingTier(any(Store.Configuration.class), - ArgumentMatchers.[]>any())) + ArgumentMatchers.any())) .thenReturn(stringCachingTier); SizedResourcePool offHeapPool = mock(SizedResourcePool.class); @@ -501,8 +589,8 @@ public void testReleaseStoreFlushes() throws Exception { when(resourcePools.getPoolForResource(ResourceType.Core.OFFHEAP)).thenReturn(offHeapPool); OffHeapStore.Provider offHeapStoreProvider = mock(OffHeapStore.Provider.class); when(offHeapStoreProvider.rankAuthority(eq(ResourceType.Core.OFFHEAP), any(Collection.class))).thenReturn(1); - when(offHeapStoreProvider.createAuthoritativeTier(any(Store.Configuration.class), - ArgumentMatchers.[]>any())) + when(offHeapStoreProvider.createAuthoritativeTier( + any(Store.Configuration.class), ArgumentMatchers.any())) .thenReturn(stringAuthoritativeTier); Store.Configuration configuration = mock(Store.Configuration.class); @@ -517,6 +605,7 @@ public void testReleaseStoreFlushes() throws Exception { when(serviceProvider.getService(OffHeapStore.Provider.class)).thenReturn(offHeapStoreProvider); when(serviceProvider.getServicesOfType(AuthoritativeTier.Provider.class)).thenReturn(authorities); when(serviceProvider.getServicesOfType(CachingTier.Provider.class)).thenReturn(cachingTiers); + when(serviceProvider.getService(StatisticsService.class)).thenReturn(new DefaultStatisticsService()); tieredStoreProvider.start(serviceProvider); final Store tieredStore = tieredStoreProvider.createStore(configuration); @@ -528,7 +617,8 @@ public void testReleaseStoreFlushes() throws Exception { @Test public void testRank() throws Exception { TieredStore.Provider provider = new TieredStore.Provider(); - ServiceLocator serviceLocator = dependencySet().with(provider).with(mock(DiskResourceService.class)).build(); + ServiceLocator serviceLocator = dependencySet().with(provider).with(mock(DiskResourceService.class)) + .with(mock(CacheManagerProviderService.class, Answers.RETURNS_DEEP_STUBS)).build(); serviceLocator.startAllServices(); assertRank(provider, 0, ResourceType.Core.DISK); @@ -561,10 +651,28 @@ public int getTierHeight() { assertRank(provider, 0, ResourceType.Core.DISK, ResourceType.Core.OFFHEAP, ResourceType.Core.HEAP, unmatchedResourceType); } + @Test + @SuppressWarnings("unchecked") + public void testGetAuthoritativeTierProvider() { + TieredStore.Provider provider = new TieredStore.Provider(); + ServiceProvider serviceProvider = mock(ServiceProvider.class); + provider.start(serviceProvider); + + AuthoritativeTier.Provider provider1 = mock(AuthoritativeTier.Provider.class); + when(provider1.rankAuthority(any(ResourceType.class), any())).thenReturn(1); + AuthoritativeTier.Provider provider2 = mock(AuthoritativeTier.Provider.class); + when(provider2.rankAuthority(any(ResourceType.class), any())).thenReturn(2); + + when(serviceProvider.getServicesOfType(AuthoritativeTier.Provider.class)).thenReturn(Arrays.asList(provider1, + provider2)); + + assertSame(provider.getAuthoritativeTierProvider(mock(ResourceType.class), Collections.emptyList()), provider2); + } + private void assertRank(final Store.Provider provider, final int expectedRank, final ResourceType... resources) { - Assert.assertThat(provider.rank( + assertThat(provider.rank( new HashSet<>(Arrays.asList(resources)), - Collections.>emptyList()), + Collections.emptyList()), Matchers.is(expectedRank)); } @@ -576,40 +684,30 @@ public Store.ValueHolder newValueHolder(final CharSequence v) { return new Store.ValueHolder() { @Override - public CharSequence value() { + public CharSequence get() { return v; } @Override - public long creationTime(TimeUnit unit) { + public long creationTime() { return 0; } @Override - public long expirationTime(TimeUnit unit) { + public long expirationTime() { return 0; } @Override - public boolean isExpired(long expirationTime, TimeUnit unit) { + public boolean isExpired(long expirationTime) { return false; } @Override - public long lastAccessTime(TimeUnit unit) { + public long lastAccessTime() { return 0; } - @Override - public float hitRate(long now, TimeUnit unit) { - return 0; - } - - @Override - public long hits() { - throw new UnsupportedOperationException("Implement me!"); - } - @Override public long getId() { throw new UnsupportedOperationException("Implement me!"); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java similarity index 84% rename from impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java index 354b3a2e91..6d556d805f 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java @@ -20,15 +20,16 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; -import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.SizedResourcePool; import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.CachePersistenceException; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.core.events.NullStoreEventDispatcher; @@ -47,7 +48,7 @@ import org.ehcache.impl.serialization.JavaSerializer; import org.ehcache.internal.store.StoreFactory; import org.ehcache.internal.store.StoreSPITest; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; @@ -67,14 +68,13 @@ import java.util.Collection; import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; import static org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration.DEFAULT_DISK_SEGMENTS; import static org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration.DEFAULT_WRITER_CONCURRENCY; -import static org.mockito.Mockito.mock; +import static org.ehcache.test.MockitoUtil.mock; import static org.mockito.Mockito.when; /** @@ -106,45 +106,46 @@ public void setUp() throws IOException { @Override public Store newStore() { - return newStore(null, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(null, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } @Override public Store newStoreWithCapacity(long capacity) { - return newStore(capacity, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(capacity, null, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } @Override - public Store newStoreWithExpiry(Expiry expiry, TimeSource timeSource) { + public Store newStoreWithExpiry(ExpiryPolicy expiry, TimeSource timeSource) { return newStore(null, null, expiry, timeSource); } @Override public Store newStoreWithEvictionAdvisor(EvictionAdvisor evictionAdvisor) { - return newStore(null, evictionAdvisor, Expirations.noExpiration(), SystemTimeSource.INSTANCE); + return newStore(null, evictionAdvisor, ExpiryPolicyBuilder.noExpiration(), SystemTimeSource.INSTANCE); } - private Store newStore(Long capacity, EvictionAdvisor evictionAdvisor, Expiry expiry, TimeSource timeSource) { + private Store newStore(Long capacity, EvictionAdvisor evictionAdvisor, ExpiryPolicy expiry, TimeSource timeSource) { Serializer keySerializer = new JavaSerializer<>(getClass().getClassLoader()); Serializer valueSerializer = new JavaSerializer<>(getClass().getClassLoader()); Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), evictionAdvisor, getClass().getClassLoader(), expiry, buildResourcePools(capacity), 0, keySerializer, valueSerializer); - @SuppressWarnings("unchecked") - final Copier defaultCopier = new IdentityCopier(); + + final Copier defaultCopier = new IdentityCopier<>(); StoreEventDispatcher noOpEventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - final OnHeapStore onHeapStore = new OnHeapStore<>(config, timeSource, defaultCopier, defaultCopier, new NoopSizeOfEngine(), noOpEventDispatcher); + final OnHeapStore onHeapStore = new OnHeapStore<>(config, timeSource, defaultCopier, defaultCopier, + new NoopSizeOfEngine(), noOpEventDispatcher, new DefaultStatisticsService()); SizedResourcePool offheapPool = config.getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP); long offheapSize = ((MemoryUnit) offheapPool.getUnit()).toBytes(offheapPool.getSize()); - final OffHeapStore offHeapStore = new OffHeapStore<>(config, timeSource, noOpEventDispatcher, offheapSize); + final OffHeapStore offHeapStore = new OffHeapStore<>(config, timeSource, noOpEventDispatcher, offheapSize, new DefaultStatisticsService()); try { - CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); String spaceName = "alias-" + aliasCounter.getAndIncrement(); - DiskResourceService.PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); + DiskResourceService.PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); FileBasedPersistenceContext persistenceContext = diskResourceService.createPersistenceContextWithin(space, "store"); SizedResourcePool diskPool = config.getResourcePools().getPoolForResource(ResourceType.Core.DISK); @@ -155,7 +156,7 @@ private Store newStore(Long capacity, EvictionAdvisor(), - diskSize); + diskSize, new DefaultStatisticsService()); CompoundCachingTier compoundCachingTier = new CompoundCachingTier<>(onHeapStore, offHeapStore); @@ -164,7 +165,7 @@ private Store newStore(Long capacity, EvictionAdvisor CachingTier createCachingTier(final Store.Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { + public CachingTier createCachingTier(final Store.Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { throw new UnsupportedOperationException("Implement me!"); } @@ -181,7 +182,7 @@ public void initCachingTier(final CachingTier resource) { } @Override - public int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs) { + public int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs) { throw new UnsupportedOperationException("Implement me!"); } @@ -196,7 +197,7 @@ public void stop() { } }, new AuthoritativeTier.Provider() { @Override - public AuthoritativeTier createAuthoritativeTier(final Store.Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { + public AuthoritativeTier createAuthoritativeTier(final Store.Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { throw new UnsupportedOperationException("Implement me!"); } @@ -211,7 +212,7 @@ public void initAuthoritativeTier(final AuthoritativeTier resource) { } @Override - public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { + public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { throw new UnsupportedOperationException("Implement me!"); } @@ -239,40 +240,30 @@ public Store.ValueHolder newValueHolder(final String value) { return new Store.ValueHolder() { @Override - public String value() { + public String get() { return value; } @Override - public long creationTime(TimeUnit unit) { + public long creationTime() { return creationTime; } @Override - public long expirationTime(TimeUnit unit) { + public long expirationTime() { return 0; } @Override - public boolean isExpired(long expirationTime, TimeUnit unit) { + public boolean isExpired(long expirationTime) { return false; } @Override - public long lastAccessTime(TimeUnit unit) { - return 0; - } - - @Override - public float hitRate(long now, TimeUnit unit) { + public long lastAccessTime() { return 0; } - @Override - public long hits() { - throw new UnsupportedOperationException("Implement me!"); - } - @Override public long getId() { throw new UnsupportedOperationException("Implement me!"); @@ -291,8 +282,8 @@ public Class getValueType() { } @Override - public ServiceConfiguration[] getServiceConfigurations() { - return new ServiceConfiguration[0]; + public ServiceConfiguration[] getServiceConfigurations() { + return new ServiceConfiguration[0]; } @Override @@ -353,13 +344,13 @@ private ResourcePools buildResourcePools(Long capacityConstraint) { } else { offheapSize = MemoryUnit.KB.convert(capacityConstraint, MemoryUnit.MB) / 2; } - return newResourcePoolsBuilder().heap(5, EntryUnit.ENTRIES).offheap(offheapSize, MemoryUnit.KB).disk((Long) capacityConstraint, MemoryUnit.MB).build(); + return newResourcePoolsBuilder().heap(5, EntryUnit.ENTRIES).offheap(offheapSize, MemoryUnit.KB).disk(capacityConstraint, MemoryUnit.MB).build(); } public static class FakeCachingTierProvider implements CachingTier.Provider { @Override @SuppressWarnings("unchecked") - public CachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + public CachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return mock(CachingTier.class); } @@ -374,7 +365,7 @@ public void initCachingTier(CachingTier resource) { } @Override - public int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs) { + public int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs) { throw new UnsupportedOperationException(); } @@ -392,7 +383,7 @@ public void stop() { public static class FakeAuthoritativeTierProvider implements AuthoritativeTier.Provider { @Override @SuppressWarnings("unchecked") - public AuthoritativeTier createAuthoritativeTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + public AuthoritativeTier createAuthoritativeTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return mock(AuthoritativeTier.class); } @@ -407,7 +398,7 @@ public void initAuthoritativeTier(AuthoritativeTier resource) { } @Override - public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { + public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { throw new UnsupportedOperationException(); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/ByteBufferInputStreamTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/ByteBufferInputStreamTest.java similarity index 92% rename from impl/src/test/java/org/ehcache/impl/internal/util/ByteBufferInputStreamTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/util/ByteBufferInputStreamTest.java index bfc042ed88..9ee202286e 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/util/ByteBufferInputStreamTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/ByteBufferInputStreamTest.java @@ -18,11 +18,12 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.util.Random; -import org.junit.Assert; + +import org.ehcache.core.util.ByteBufferInputStream; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; public class ByteBufferInputStreamTest { @@ -106,7 +107,7 @@ public void testZeroOffsetAndMaximalLength() { byte[] read = new byte[32]; stream.read(read, 0, 32); for (int i = 0; i < read.length; i++) { - Assert.assertThat(read[i], is((byte) i)); + assertThat(read[i], is((byte) i)); } } @@ -116,7 +117,7 @@ public void testMaximalOffsetAndZeroLength() { byte[] read = new byte[32]; stream.read(read, 32, 0); for (int i = 0; i < read.length; i++) { - Assert.assertThat(read[i], is((byte) 0)); + assertThat(read[i], is((byte) 0)); } } @@ -137,13 +138,13 @@ public void testLegalOffsetAndLegalLength() { byte[] read = new byte[32]; stream.read(read, 4, 16); for (int i = 0; i < 4; i++) { - Assert.assertThat(read[i], is((byte) 0)); + assertThat(read[i], is((byte) 0)); } for (int i = 4; i < 20; i++) { - Assert.assertThat(read[i], is((byte) (i - 4))); + assertThat(read[i], is((byte) (i - 4))); } for (int i = 20; i < read.length; i++) { - Assert.assertThat(read[i], is((byte) 0)); + assertThat(read[i], is((byte) 0)); } } @@ -152,9 +153,9 @@ public void testMinimalOffsetAndMaximalLength() { ByteBufferInputStream stream = createStream(); byte[] read = new byte[32]; stream.read(read, 1, 31); - Assert.assertThat(read[0], is((byte) 0)); + assertThat(read[0], is((byte) 0)); for (int i = 1; i < read.length; i++) { - Assert.assertThat(read[i], is((byte) (i - 1))); + assertThat(read[i], is((byte) (i - 1))); } } @@ -164,7 +165,7 @@ public void testZeroOffsetAndZeroLength() { byte[] read = new byte[32]; stream.read(read, 0, 0); for (int i = 0; i < read.length; i++) { - Assert.assertThat(read[i], is((byte) 0)); + assertThat(read[i], is((byte) 0)); } } @@ -290,11 +291,7 @@ private ByteBufferInputStream createStream(boolean prePad, boolean postPad) { } ByteBufferInputStream bin = new ByteBufferInputStream(buffer); for (int i = 0; i < prePadSize; i++) { - try { - assertThat(bin.read(), is(0)); - } catch (IOException e) { - throw new AssertionError(e); - } + assertThat(bin.read(), is(0)); } return bin; } diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java similarity index 100% rename from impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java similarity index 98% rename from impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java index 67f03f6464..eb2d8053f0 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java @@ -26,7 +26,7 @@ import static org.ehcache.impl.internal.util.FileExistenceMatchers.containsCacheDirectory; import static org.ehcache.impl.internal.util.FileExistenceMatchers.isLocked; import static org.hamcrest.CoreMatchers.not; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; /** * @author Henri Tremblay diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/Matchers.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/Matchers.java new file mode 100644 index 0000000000..0b15d42f4d --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/Matchers.java @@ -0,0 +1,91 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.util; + +import org.ehcache.Cache; +import org.ehcache.event.EventType; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.events.StoreEvent; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +import java.util.function.Supplier; + +/** + * + * @author cdennis + */ +public class Matchers { + + public static Matcher> hasEntry(final K key, final V value) { + return new TypeSafeMatcher>() { + + @Override + protected boolean matchesSafely(Cache item) { + return value.equals(item.get(key)); + } + + @Override + public void describeTo(Description description) { + description.appendText("cache containing entry {").appendValue(key).appendText(", ").appendValue(value).appendText("}"); + } + }; + } + + public static Matcher> valueHeld(final V value) { + return new TypeSafeMatcher>() { + @Override + protected boolean matchesSafely(Store.ValueHolder item) { + return item.get().equals(value); + } + + @Override + public void describeTo(Description description) { + description.appendText("value holder containing value '").appendValue(value).appendText("'"); + } + }; + } + + public static Matcher> holding(final V value) { + return new TypeSafeMatcher>() { + @Override + protected boolean matchesSafely(Supplier item) { + return item.get().equals(value); + } + + @Override + public void describeTo(Description description) { + description.appendText("holder containing value '").appendValue(value).appendText("'"); + } + }; + } + + public static Matcher> eventOfType(final EventType type) { + return new TypeSafeMatcher>() { + @Override + protected boolean matchesSafely(StoreEvent item) { + return item.getType().equals(type); + } + + @Override + public void describeTo(Description description) { + description.appendText("event of type '").appendValue(type).appendText("'"); + } + }; + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/PacerTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/PacerTest.java new file mode 100644 index 0000000000..9ee33cd86e --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/PacerTest.java @@ -0,0 +1,72 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.util; + +import org.ehcache.core.spi.time.TimeSource; +import org.junit.Before; +import org.junit.Test; + +import java.util.concurrent.atomic.AtomicLong; + +import static org.assertj.core.api.Assertions.assertThat; + +public class PacerTest { + + private static final int CALL_INDEX = 0; + private static final int OR_ELSE_INDEX = 1; + + private final long now = System.currentTimeMillis(); + + private int[] counters = { 0, 0}; + + private Pacer pacer; + + private AtomicLong time = new AtomicLong(now); + + @Before + public void before() { + TimeSource source = () -> time.get(); + pacer = new Pacer(source, 10); + } + + @Test + public void pacedCall() { + callAndAssert(1, 0); + callAndAssert(1, 1); + + time.set(now + 9); + + callAndAssert(1, 2); + + time.set(now + 10); + + callAndAssert(2, 2); + + time.set(now + 21); + + callAndAssert(3, 2); + } + + private void callAndAssert(int callCalls, int orElseCalls) { + Runnable call = () -> counters[CALL_INDEX]++; + Runnable orElse = () -> counters[OR_ELSE_INDEX]++; + + pacer.pacedCall(call, orElse); + + assertThat(counters[CALL_INDEX]).isEqualTo(callCalls); + assertThat(counters[OR_ELSE_INDEX]).isEqualTo(orElseCalls); + } +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/StatisticsTestUtils.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/StatisticsTestUtils.java new file mode 100644 index 0000000000..6e1da8bf8d --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/StatisticsTestUtils.java @@ -0,0 +1,162 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.util; + +import org.ehcache.core.spi.store.Store; +import org.hamcrest.Description; +import org.hamcrest.Factory; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; +import org.terracotta.context.ContextManager; +import org.terracotta.context.TreeNode; +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.ValueStatistic; + +import java.util.Arrays; +import java.util.EnumSet; +import java.util.List; + +import static org.hamcrest.MatcherAssert.assertThat; + +/** + * StatisticsTestUtils + */ +public class StatisticsTestUtils { + /** + * Validates expected {@link org.terracotta.statistics.OperationStatistic} updates for the + * indicated {@code Ehcache} instance. The statistics identified in {@code changed} are + * checked for a value of {@code 1}; all other statistics in the same enumeration class are + * checked for a value of {@code 0}. + * + * @param store the store instance to check + * @param changed the statistics values that should have updated values + * @param the statistics enumeration type + */ + public static > void validateStats(final Store store, final EnumSet changed) { + assert changed != null; + final EnumSet unchanged = EnumSet.complementOf(changed); + + @SuppressWarnings("unchecked") + final List> sets = Arrays.asList(changed, unchanged); + Class statsClass = null; + for (final EnumSet set : sets) { + if (!set.isEmpty()) { + statsClass = set.iterator().next().getDeclaringClass(); + break; + } + } + assert statsClass != null; + + final OperationStatistic operationStatistic = getOperationStatistic(store, statsClass); + for (final E statId : changed) { + assertThat(String.format("Value for %s.%s", statId.getDeclaringClass().getName(), statId.name()), + getStatistic(operationStatistic, statId), StatisticMatcher.equalTo(1L)); + } + for (final E statId : unchanged) { + assertThat(String.format("Value for %s.%s", statId.getDeclaringClass().getName(), statId.name()), + getStatistic(operationStatistic, statId), StatisticMatcher.equalTo(0L)); + } + } + + public static > void validateStat(final Store store, E outcome, long count) { + OperationStatistic operationStatistic = getOperationStatistic(store, outcome.getDeclaringClass()); + assertThat(getStatistic(operationStatistic, outcome), StatisticMatcher.equalTo(count)); + } + + /** + * Gets the value of the statistic indicated from an {@link OperationStatistic} + * instance. + * + * @param operationStatistic the {@code OperationStatistic} instance from which the statistic is to + * be obtained + * @param statId the {@code Enum} constant identifying the statistic for which the value must be obtained + * @param The {@code Enum} type for the statistics + * + * @return the value, possibly null, for {@code statId} about {@code ehcache} + */ + private static > Number getStatistic(final OperationStatistic operationStatistic, final E statId) { + if (operationStatistic != null) { + final ValueStatistic valueStatistic = operationStatistic.statistic(statId); + return (valueStatistic == null ? null : valueStatistic.value()); + } + return null; + } + + /** + * Gets a reference to the {@link OperationStatistic} instance holding the + * class of statistics specified for the {@code Ehcache} instance provided. + * + * @param store the store instance for which the {@code OperationStatistic} instance + * should be obtained + * @param statsClass the {@code Class} of statistics for which the {@code OperationStatistic} instance + * should be obtained + * @param the {@code Enum} type for the statistics + * + * @return a reference to the {@code OperationStatistic} instance holding the {@code statsClass} statistics; + * may be {@code null} if {@code statsClass} statistics do not exist for {@code ehcache} + */ + private static > OperationStatistic getOperationStatistic(final Store store, final Class statsClass) { + for (final TreeNode statNode : ContextManager.nodeFor(store).getChildren()) { + final Object statObj = statNode.getContext().attributes().get("this"); + if (statObj instanceof OperationStatistic) { + @SuppressWarnings("unchecked") + final OperationStatistic statistic = (OperationStatistic)statObj; + if (statistic.type().equals(statsClass)) { + return statistic; + } + } + } + return null; + } + + /** + * Local {@code org.hamcrest.TypeSafeMatcher} implementation for testing + * {@code org.terracotta.statistics.OperationStatistic} values. + */ + private static final class StatisticMatcher extends TypeSafeMatcher { + + final Number expected; + + private StatisticMatcher(final Class expectedType, final Number expected) { + super(expectedType); + this.expected = expected; + } + + @Override + protected boolean matchesSafely(final Number value) { + if (value != null) { + return (value.longValue() == this.expected.longValue()); + } else { + return this.expected.longValue() == 0L; + } + } + + @Override + public void describeTo(final Description description) { + if (this.expected.longValue() == 0L) { + description.appendText("zero or null"); + } else { + description.appendValue(this.expected); + } + } + + @Factory + public static Matcher equalTo(final Number expected) { + return new StatisticMatcher(Number.class, expected); + } + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/ThreadFactoryUtilTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/ThreadFactoryUtilTest.java similarity index 97% rename from impl/src/test/java/org/ehcache/impl/internal/util/ThreadFactoryUtilTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/util/ThreadFactoryUtilTest.java index 33a029737d..e6a14773d7 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/util/ThreadFactoryUtilTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/ThreadFactoryUtilTest.java @@ -20,9 +20,9 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.not; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; /** * @author Ludovic Orban diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/UnmatchedResourceType.java b/ehcache-impl/src/test/java/org/ehcache/impl/internal/util/UnmatchedResourceType.java similarity index 100% rename from impl/src/test/java/org/ehcache/impl/internal/util/UnmatchedResourceType.java rename to ehcache-impl/src/test/java/org/ehcache/impl/internal/util/UnmatchedResourceType.java diff --git a/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java similarity index 82% rename from impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java index 41486008cb..1825d9e7c6 100644 --- a/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java @@ -22,13 +22,12 @@ import org.ehcache.spi.service.ServiceProvider; import org.junit.After; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.runners.Enclosed; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -41,9 +40,6 @@ public class DefaultDiskResourceServiceTest { public static abstract class AbstractDefaultDiskResourceServiceTest { - @Rule - public ExpectedException expectedException = ExpectedException.none(); - protected DefaultDiskResourceService service = new DefaultDiskResourceService(); @SuppressWarnings("unchecked") protected ServiceProvider serviceProvider = mock(ServiceProvider.class); @@ -108,9 +104,8 @@ public void testDestroy() throws CachePersistenceException { @Test public void testCreatePersistenceContextWithin() throws CachePersistenceException { - expectedException.expect(CachePersistenceException.class); - expectedException.expectMessage("Unknown space: null"); - service.createPersistenceContextWithin(null, "test"); + assertThatThrownBy(() -> service.createPersistenceContextWithin(null, "test")) + .isInstanceOf(CachePersistenceException.class).withFailMessage("Unknown space: null"); } @Test @@ -121,16 +116,14 @@ public void testGetPersistenceSpaceIdentifier() throws CachePersistenceException @Test public void testGetStateRepositoryWithin() throws CachePersistenceException { - expectedException.expect(CachePersistenceException.class); - expectedException.expectMessage("Unknown space: null"); - assertThat(service.getStateRepositoryWithin(null, "test")).isNull(); + assertThatThrownBy(() -> service.getStateRepositoryWithin(null, "test")) + .isInstanceOf(CachePersistenceException.class).withFailMessage("Unknown space: null"); } @Test public void testReleasePersistenceSpaceIdentifier() throws CachePersistenceException { - expectedException.expect(CachePersistenceException.class); - expectedException.expectMessage("Unknown space: null"); - assertThat(service.getStateRepositoryWithin(null, "test")).isNull(); + assertThatThrownBy(() -> service.getStateRepositoryWithin(null, "test")) + .isInstanceOf(CachePersistenceException.class).withFailMessage("Unknown space: null"); } } diff --git a/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java similarity index 94% rename from impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java index ad22995665..4281cc9345 100644 --- a/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java @@ -22,7 +22,6 @@ import org.junit.Before; import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import java.io.File; @@ -32,16 +31,15 @@ import static org.ehcache.impl.internal.util.FileExistenceMatchers.isLocked; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasProperty; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; import static org.junit.Assume.assumeTrue; public class DefaultLocalPersistenceServiceTest { - @Rule - public final ExpectedException expectedException = ExpectedException.none(); - @Rule public final TemporaryFolder folder = new TemporaryFolder(); @@ -139,7 +137,7 @@ public void testExclusiveLock() throws IOException { // We should not be able to lock the same directory twice // And we should receive a meaningful exception about it - expectedException.expectMessage("Persistence directory already locked by this process: " + testFolder.getAbsolutePath()); - service2.start(null); + RuntimeException thrown = assertThrows(RuntimeException.class, () -> service2.start(null)); + assertThat(thrown, hasProperty("message", is("Persistence directory already locked by this process: " + testFolder.getAbsolutePath()))); } } diff --git a/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java similarity index 94% rename from impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java index 97dc77ca63..55ba86bb69 100644 --- a/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java @@ -25,12 +25,11 @@ import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; -import java.io.FilenameFilter; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.*; /** * FileBasedStateRepositoryTest @@ -47,7 +46,7 @@ public void testHolderSave() throws Exception { File directory = folder.newFolder("testSave"); FileBasedStateRepository stateRepository = new FileBasedStateRepository(directory); String holderName = "myHolder"; - StateHolder myHolder = stateRepository.getPersistentStateHolder(holderName, Long.class, String.class); + StateHolder myHolder = stateRepository.getPersistentStateHolder(holderName, Long.class, String.class, c -> true, null); myHolder.putIfAbsent(42L, "TheAnswer!"); @@ -79,7 +78,7 @@ public void testHolderLoad() throws Exception { } FileBasedStateRepository stateRepository = new FileBasedStateRepository(directory); - StateHolder myHolder = stateRepository.getPersistentStateHolder(holderName, Long.class, String.class); + StateHolder myHolder = stateRepository.getPersistentStateHolder(holderName, Long.class, String.class, c -> true, null); assertThat(myHolder, is(map)); } @@ -96,7 +95,7 @@ public void testIndexProperlySetAfterLoad() throws Exception { } FileBasedStateRepository stateRepository = new FileBasedStateRepository(directory); - stateRepository.getPersistentStateHolder("otherHolder", Long.class, Long.class); + stateRepository.getPersistentStateHolder("otherHolder", Long.class, Long.class, c -> true, null); stateRepository.close(); File[] files = directory.listFiles((dir, name) -> name.contains("otherHolder") && name.contains("-1-")); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java similarity index 90% rename from impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java index a81c79ba38..6fb918d639 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java @@ -18,7 +18,6 @@ import org.ehcache.spi.serialization.StatefulSerializer; import org.hamcrest.core.Is; -import org.junit.Assert; import org.junit.Test; import java.io.Externalizable; @@ -32,6 +31,7 @@ import static org.ehcache.impl.serialization.SerializerTestUtilities.newClassName; import static org.ehcache.impl.serialization.SerializerTestUtilities.popTccl; import static org.ehcache.impl.serialization.SerializerTestUtilities.pushTccl; +import static org.hamcrest.MatcherAssert.assertThat; /** * @@ -41,8 +41,7 @@ public class AddedFieldTest { @Test public void addingSerializableField() throws Exception { - @SuppressWarnings("unchecked") - StatefulSerializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer<>(null); serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(A_write.class, IncompatibleSerializable_write.class, Serializable_write.class); @@ -52,7 +51,7 @@ public void addingSerializableField() throws Exception { pushTccl(createClassNameRewritingLoader(A_read.class, IncompatibleSerializable_read.class)); try { Serializable out = serializer.read(encodedA); - Assert.assertThat(out.getClass().getField("bar").getInt(out), Is.is(4)); + assertThat(out.getClass().getField("bar").getInt(out), Is.is(4)); } finally { popTccl(); } @@ -60,8 +59,7 @@ public void addingSerializableField() throws Exception { @Test public void addingExternalizableField() throws Exception { - @SuppressWarnings("unchecked") - StatefulSerializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer<>(null); serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(B_write.class, Externalizable_write.class); @@ -71,7 +69,7 @@ public void addingExternalizableField() throws Exception { pushTccl(createClassNameRewritingLoader(B_read.class)); try { Serializable out = serializer.read(encodedA); - Assert.assertThat(out.getClass().getField("bar").getInt(out), Is.is(4)); + assertThat(out.getClass().getField("bar").getInt(out), Is.is(4)); } finally { popTccl(); } @@ -79,6 +77,8 @@ public void addingExternalizableField() throws Exception { public static class Serializable_write implements Serializable { + private static final long serialVersionUID = 1L; + int k; Serializable_write(int value) { @@ -88,13 +88,15 @@ public static class Serializable_write implements Serializable { public static class IncompatibleSerializable_write implements Serializable { - private static long serialVersionUID = 3L; + private static final long serialVersionUID = 4L; + int x = 5; }; public static class IncompatibleSerializable_read implements Serializable { - private static long serialVersionUID = 4L; + private static final long serialVersionUID = 4L; + int x = 5; }; @@ -121,6 +123,8 @@ public static class A_read implements Serializable { public static class Externalizable_write implements Externalizable { + private static final long serialVersionUID = 1L; + byte l; public Externalizable_write() { diff --git a/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java similarity index 96% rename from impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java index 57266bc738..1ba7a7b713 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java @@ -35,8 +35,7 @@ public class AddedSuperClassTest { @Test public void testAddedSuperClass() throws Exception { - @SuppressWarnings("unchecked") - StatefulSerializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer<>(null); serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(A_2.class, AddedSuperClass_Hidden.class); @@ -53,8 +52,7 @@ public void testAddedSuperClass() throws Exception { @Test public void testAddedSuperClassNotHidden() throws Exception { - @SuppressWarnings("unchecked") - StatefulSerializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer<>(null); serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(A_2.class, AddedSuperClass_Hidden.class); @@ -70,6 +68,9 @@ public void testAddedSuperClassNotHidden() throws Exception { } public static class AddedSuperClass_Hidden implements Serializable { + + private static final long serialVersionUID = 1L; + int field; } diff --git a/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java similarity index 97% rename from impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java index 631a42ad1c..d25cefb4a7 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java @@ -37,8 +37,7 @@ public class ArrayPackageScopeTest { @Test public void testArrayPackageScope() throws Exception { - @SuppressWarnings("unchecked") - StatefulSerializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer<>(null); serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(Foo_A.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java similarity index 81% rename from impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java index 47e47f351e..2fbbd3755b 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java @@ -32,6 +32,8 @@ import org.junit.Assert; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; + /** * * @author cdennis @@ -40,8 +42,7 @@ public class BasicSerializationTest { @Test public void testSimpleObject() throws ClassNotFoundException { - @SuppressWarnings("unchecked") - StatefulSerializer test = new CompactJavaSerializer(null); + StatefulSerializer test = new CompactJavaSerializer<>(null); test.init(new TransientStateRepository()); String input = ""; @@ -53,8 +54,7 @@ public void testSimpleObject() throws ClassNotFoundException { @Test public void testComplexObject() throws ClassNotFoundException { - @SuppressWarnings("unchecked") - StatefulSerializer test = new CompactJavaSerializer(null); + StatefulSerializer test = new CompactJavaSerializer<>(null); test.init(new TransientStateRepository()); HashMap input = new HashMap<>(); @@ -69,21 +69,20 @@ public void testComplexObject() throws ClassNotFoundException { } - private static final Class[] PRIMITIVE_CLASSES = new Class[] { + private static final Class[] PRIMITIVE_CLASSES = new Class[] { boolean.class, byte.class, char.class, short.class, int.class, long.class, float.class, double.class, void.class }; @Test public void testPrimitiveClasses() throws ClassNotFoundException { - @SuppressWarnings("unchecked") - StatefulSerializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer<>(null); s.init(new TransientStateRepository()); - Class[] out = (Class[]) s.read(s.serialize(PRIMITIVE_CLASSES)); + Class[] out = (Class[]) s.read(s.serialize(PRIMITIVE_CLASSES)); - Assert.assertThat(out, IsNot.not(IsSame.sameInstance(PRIMITIVE_CLASSES))); - Assert.assertThat(out, IsEqual.equalTo(PRIMITIVE_CLASSES)); + assertThat(out, IsNot.not(IsSame.sameInstance(PRIMITIVE_CLASSES))); + assertThat(out, IsEqual.equalTo(PRIMITIVE_CLASSES)); } @Test @@ -92,14 +91,13 @@ public void testProxyInstance() throws ClassNotFoundException { int foo = rand.nextInt(); float bar = rand.nextFloat(); - @SuppressWarnings("unchecked") - StatefulSerializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer<>(null); s.init(new TransientStateRepository()); - Object proxy = s.read(s.serialize((Serializable) Proxy.newProxyInstance(BasicSerializationTest.class.getClassLoader(), new Class[]{Foo.class, Bar.class}, new Handler(foo, bar)))); + Object proxy = s.read(s.serialize((Serializable) Proxy.newProxyInstance(BasicSerializationTest.class.getClassLoader(), new Class[]{Foo.class, Bar.class}, new Handler(foo, bar)))); - Assert.assertThat(((Foo) proxy).foo(), Is.is(foo)); - Assert.assertThat(((Bar) proxy).bar(), Is.is(bar)); + assertThat(((Foo) proxy).foo(), Is.is(foo)); + assertThat(((Bar) proxy).bar(), Is.is(bar)); } interface Foo { @@ -112,12 +110,14 @@ interface Bar { static class Handler implements InvocationHandler, Serializable { + private static final long serialVersionUID = 1L; + static Method fooMethod, barMethod; static { try { - fooMethod = Foo.class.getDeclaredMethod("foo", new Class[0]); - barMethod = Bar.class.getDeclaredMethod("bar", new Class[0]); + fooMethod = Foo.class.getDeclaredMethod("foo"); + barMethod = Bar.class.getDeclaredMethod("bar"); } catch (NoSuchMethodException ex) { throw new Error(); } @@ -131,8 +131,7 @@ static class Handler implements InvocationHandler, Serializable { } @Override - public Object invoke(Object proxy, Method method, Object[] args) - throws Throwable { + public Object invoke(Object proxy, Method method, Object[] args) { if (method.equals(fooMethod)) { return foo; } else if (method.equals(barMethod)) { diff --git a/impl/src/test/java/org/ehcache/impl/serialization/ByteArraySerializerTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/ByteArraySerializerTest.java similarity index 97% rename from impl/src/test/java/org/ehcache/impl/serialization/ByteArraySerializerTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/ByteArraySerializerTest.java index ff81e9c78f..8a1662781e 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/ByteArraySerializerTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/ByteArraySerializerTest.java @@ -24,8 +24,8 @@ import java.util.Arrays; import java.util.Random; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; public class ByteArraySerializerTest { @@ -76,4 +76,4 @@ public void testReadThrowsOnNullInput() throws ClassNotFoundException { public void testSerializeThrowsOnNullInput() { new ByteArraySerializer().serialize(null); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/CharSerializerTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/CharSerializerTest.java similarity index 96% rename from impl/src/test/java/org/ehcache/impl/serialization/CharSerializerTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/CharSerializerTest.java index effd7aa568..a2699ff34d 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/CharSerializerTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/CharSerializerTest.java @@ -22,8 +22,8 @@ import java.util.Random; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; public class CharSerializerTest { @@ -52,4 +52,4 @@ public void testReadThrowsOnNullInput() throws ClassNotFoundException { public void testSerializeThrowsOnNullInput() { new CharSerializer().serialize(null); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java similarity index 95% rename from impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java index df72be4c0b..1cb4e49f0c 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java @@ -36,8 +36,7 @@ private static ClassLoader newLoader() { @Test public void testThreadContextLoader() throws Exception { - @SuppressWarnings("unchecked") - StatefulSerializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer<>(null); serializer.init(new TransientStateRepository()); ClassLoader loader = newLoader(); @@ -54,8 +53,7 @@ public void testThreadContextLoader() throws Exception { @Test public void testExplicitLoader() throws Exception { ClassLoader loader = newLoader(); - @SuppressWarnings("unchecked") - StatefulSerializer serializer = new CompactJavaSerializer(loader); + StatefulSerializer serializer = new CompactJavaSerializer<>(loader); serializer.init(new TransientStateRepository()); ByteBuffer encoded = serializer.serialize((Serializable) loader.loadClass(Foo.class.getName()).newInstance()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java similarity index 97% rename from impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java index dc8360abee..7264824edc 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java @@ -52,8 +52,7 @@ public void createSpecialObject() throws Exception { @Test public void testClassUnloadingAfterSerialization() throws Exception { - @SuppressWarnings("unchecked") - StatefulSerializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer<>(null); serializer.init(new TransientStateRepository()); serializer.serialize(specialObject); @@ -74,8 +73,7 @@ public void testClassUnloadingAfterSerialization() throws Exception { public void testClassUnloadingAfterSerializationAndDeserialization() throws Exception { Thread.currentThread().setContextClassLoader(specialObject.getClass().getClassLoader()); try { - @SuppressWarnings("unchecked") - StatefulSerializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer<>(null); serializer.init(new TransientStateRepository()); specialObject = serializer.read(serializer.serialize(specialObject)); Assert.assertEquals(SpecialClass.class.getName(), specialObject.getClass().getName()); diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerTest.java new file mode 100644 index 0000000000..9de204d921 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerTest.java @@ -0,0 +1,86 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.serialization; + +import org.ehcache.spi.persistence.StateHolder; +import org.ehcache.spi.persistence.StateRepository; +import org.junit.Test; +import org.mockito.stubbing.Answer; + +import java.io.ObjectStreamClass; +import java.nio.ByteBuffer; +import java.util.Date; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +public class CompactJavaSerializerTest { + + @Test + public void testStateHolderFailureRereadBehavior() throws ClassNotFoundException { + StateHolder stateMap = spy(new TransientStateHolder<>()); + StateRepository stateRepository = mock(StateRepository.class); + when(stateRepository.getPersistentStateHolder(eq("CompactJavaSerializer-ObjectStreamClassIndex"), eq(Integer.class), eq(ObjectStreamClass.class), any(), any())).thenReturn(stateMap); + + AtomicBoolean failing = new AtomicBoolean(); + Answer optionalFailure = invocation -> { + try { + return invocation.callRealMethod(); + } finally { + if (failing.get()) { + throw new RuntimeException(); + } + } + }; + + doAnswer(optionalFailure).when(stateMap).entrySet(); + doAnswer(optionalFailure).when(stateMap).get(any()); + doAnswer(optionalFailure).when(stateMap).putIfAbsent(any(), any()); + + CompactJavaSerializer serializerA = new CompactJavaSerializer(getClass().getClassLoader()); + serializerA.init(stateRepository); + + Date object = new Date(); + + failing.set(true); + try { + serializerA.serialize(object); + fail("Expected RuntimeException"); + } catch (RuntimeException e) { + //expected + } + + failing.set(false); + ByteBuffer serialized = serializerA.serialize(object); + assertThat(serializerA.read(serialized), is(object)); + + assertThat(stateMap.entrySet(), hasSize(1)); + + CompactJavaSerializer serializerB = new CompactJavaSerializer(getClass().getClassLoader()); + serializerB.init(stateRepository); + + assertThat(serializerB.read(serialized), is(object)); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/DoubleSerializerTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/DoubleSerializerTest.java similarity index 96% rename from impl/src/test/java/org/ehcache/impl/serialization/DoubleSerializerTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/DoubleSerializerTest.java index 10c68d0428..ac4d8ea9d3 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/DoubleSerializerTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/DoubleSerializerTest.java @@ -22,8 +22,8 @@ import java.util.Random; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; public class DoubleSerializerTest { @@ -52,4 +52,4 @@ public void testReadThrowsOnNullInput() throws ClassNotFoundException { public void testSerializeThrowsOnNullInput() { new DoubleSerializer().serialize(null); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/DuplicateClassLoader.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/DuplicateClassLoader.java similarity index 100% rename from impl/src/test/java/org/ehcache/impl/serialization/DuplicateClassLoader.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/DuplicateClassLoader.java diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java new file mode 100644 index 0000000000..e2d0b90255 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java @@ -0,0 +1,87 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.serialization; + +import org.ehcache.spi.serialization.StatefulSerializer; +import org.hamcrest.core.IsSame; +import org.junit.Test; + +import java.io.Serializable; + +import static org.ehcache.impl.serialization.SerializerTestUtilities.createClassNameRewritingLoader; +import static org.ehcache.impl.serialization.SerializerTestUtilities.newClassName; +import static org.ehcache.impl.serialization.SerializerTestUtilities.popTccl; +import static org.ehcache.impl.serialization.SerializerTestUtilities.pushTccl; +import static org.hamcrest.MatcherAssert.assertThat; + +/** + * + * @author cdennis + */ +public class EnumTest { + + @Test + public void basicInstanceSerialization() throws ClassNotFoundException { + StatefulSerializer s = new CompactJavaSerializer<>(null); + s.init(new TransientStateRepository()); + + assertThat(s.read(s.serialize(People.Alice)), IsSame.sameInstance(People.Alice)); + assertThat(s.read(s.serialize(People.Bob)), IsSame.sameInstance(People.Bob)); + assertThat(s.read(s.serialize(People.Eve)), IsSame.sameInstance(People.Eve)); + } + + @Test + public void classSerialization() throws ClassNotFoundException { + StatefulSerializer s = new CompactJavaSerializer<>(null); + s.init(new TransientStateRepository()); + + assertThat(s.read(s.serialize(Enum.class)), IsSame.sameInstance(Enum.class)); + assertThat(s.read(s.serialize(Dogs.Handel.getClass())), IsSame.sameInstance(Dogs.Handel.getClass())); + assertThat(s.read(s.serialize(Dogs.Cassie.getClass())), IsSame.sameInstance(Dogs.Cassie.getClass())); + assertThat(s.read(s.serialize(Dogs.Penny.getClass())), IsSame.sameInstance(Dogs.Penny.getClass())); + } + + @Test + public void shiftingInstanceSerialization() throws ClassNotFoundException { + StatefulSerializer s = new CompactJavaSerializer<>(null); + s.init(new TransientStateRepository()); + + ClassLoader wLoader = createClassNameRewritingLoader(Foo_W.class); + ClassLoader rLoader = createClassNameRewritingLoader(Foo_R.class); + + Class wClass = wLoader.loadClass(newClassName(Foo_W.class)); + Class rClass = rLoader.loadClass(newClassName(Foo_R.class)); + + Object[] wInstances = wClass.getEnumConstants(); + Object[] rInstances = rClass.getEnumConstants(); + + pushTccl(rLoader); + try { + for (int i = 0; i < wInstances.length; i++) { + assertThat(s.read(s.serialize((Serializable) wInstances[i])), IsSame.sameInstance(rInstances[i])); + } + } finally { + popTccl(); + } + } + + public static enum Foo_W { a, b, c { int i = 5; }, d { float f = 5.0f; } } + public static enum Foo_R { a, b { byte b = 3; }, c, d { double d = 6.0; } } +} + +enum People { Alice, Bob, Eve } +enum Dogs { Handel, Cassie { int i = 0; }, Penny { double d = 3.0; } } diff --git a/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java similarity index 96% rename from impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java index f609534878..eff6024602 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java @@ -35,8 +35,7 @@ public class FieldTypeChangeTest { @Test public void fieldTypeChangeWithOkayObject() throws Exception { - @SuppressWarnings("unchecked") - StatefulSerializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer<>(null); s.init(new TransientStateRepository()); ClassLoader loaderW = createClassNameRewritingLoader(Foo_W.class); @@ -52,8 +51,7 @@ public void fieldTypeChangeWithOkayObject() throws Exception { @Test public void fieldTypeChangeWithIncompatibleObject() throws Exception { - @SuppressWarnings("unchecked") - StatefulSerializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer<>(null); s.init(new TransientStateRepository()); ClassLoader loaderW = createClassNameRewritingLoader(Foo_W.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/FloatSerializerTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/FloatSerializerTest.java similarity index 96% rename from impl/src/test/java/org/ehcache/impl/serialization/FloatSerializerTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/FloatSerializerTest.java index b927433767..83dca80e62 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/FloatSerializerTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/FloatSerializerTest.java @@ -22,8 +22,8 @@ import java.util.Random; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; public class FloatSerializerTest { @@ -52,4 +52,4 @@ public void testReadThrowsOnNullInput() throws ClassNotFoundException { public void testSerializeThrowsOnNullInput() { new FloatSerializer().serialize(null); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java similarity index 99% rename from impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java index 13ee6e60f3..edd6bb1a71 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java @@ -38,7 +38,7 @@ public class GetFieldTest { @Test public void testGetField() throws Exception { @SuppressWarnings("unchecked") - StatefulSerializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer<>(null); s.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(Foo_A.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/IntegerSerializerTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/IntegerSerializerTest.java similarity index 96% rename from impl/src/test/java/org/ehcache/impl/serialization/IntegerSerializerTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/IntegerSerializerTest.java index e976a9188b..127bbe552d 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/IntegerSerializerTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/IntegerSerializerTest.java @@ -22,8 +22,8 @@ import java.util.Random; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; public class IntegerSerializerTest { @@ -52,4 +52,4 @@ public void testReadThrowsOnNullInput() throws ClassNotFoundException { public void testSerializeThrowsOnNullInput() { new IntegerSerializer().serialize(null); } -} \ No newline at end of file +} diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/serialization/JavaSerializer.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/JavaSerializer.java new file mode 100644 index 0000000000..ebff194cdb --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/JavaSerializer.java @@ -0,0 +1,133 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.serialization; + +import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.core.util.ByteBufferInputStream; +import org.ehcache.spi.serialization.Serializer; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.ObjectStreamClass; +import java.lang.reflect.Proxy; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +/** + * + * @author cdennis + */ +public class JavaSerializer implements Serializer { + + private final ClassLoader classLoader; + + public JavaSerializer(ClassLoader classLoader) { + this.classLoader = classLoader; + } + + @Override + public ByteBuffer serialize(T object) { + ByteArrayOutputStream bout = new ByteArrayOutputStream(); + try { + ObjectOutputStream oout = new ObjectOutputStream(bout); + oout.writeObject(object); + } catch (IOException e) { + throw new SerializerException(e); + } finally { + try { + bout.close(); + } catch (IOException e) { + throw new AssertionError(e); + } + } + return ByteBuffer.wrap(bout.toByteArray()); + } + + @SuppressWarnings("unchecked") + @Override + public T read(ByteBuffer entry) throws SerializerException, ClassNotFoundException { + ByteBufferInputStream bin = new ByteBufferInputStream(entry); + try { + try (OIS ois = new OIS(bin, classLoader)) { + return (T) ois.readObject(); + } + } catch (IOException e) { + throw new SerializerException(e); + } finally { + try { + bin.close(); + } catch (IOException e) { + throw new AssertionError(e); + } + } + } + + @Override + public boolean equals(T object, ByteBuffer binary) throws SerializerException, ClassNotFoundException { + return object.equals(read(binary)); + } + + private static class OIS extends ObjectInputStream { + + private final ClassLoader classLoader; + + public OIS(InputStream in, ClassLoader classLoader) throws IOException { + super(in); + this.classLoader = classLoader; + } + + @Override + protected Class resolveClass(ObjectStreamClass desc) throws IOException, ClassNotFoundException { + try { + return Class.forName(desc.getName(), false, classLoader); + } catch (ClassNotFoundException cnfe) { + Class primitive = primitiveClasses.get(desc.getName()); + if (primitive != null) { + return primitive; + } + throw cnfe; + } + } + + @Override + protected Class resolveProxyClass(String[] interfaces) throws IOException, ClassNotFoundException { + Class[] interfaceClasses = new Class[interfaces.length]; + for (int i = 0; i < interfaces.length; i++) { + interfaceClasses[i] = Class.forName(interfaces[i], false, classLoader); + } + + return Proxy.getProxyClass(classLoader, interfaceClasses); + } + + private static final Map> primitiveClasses = new HashMap<>(); + static { + primitiveClasses.put("boolean", boolean.class); + primitiveClasses.put("byte", byte.class); + primitiveClasses.put("char", char.class); + primitiveClasses.put("double", double.class); + primitiveClasses.put("float", float.class); + primitiveClasses.put("int", int.class); + primitiveClasses.put("long", long.class); + primitiveClasses.put("short", short.class); + primitiveClasses.put("void", void.class); + } + } + +} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/LongSerializerTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/LongSerializerTest.java similarity index 96% rename from impl/src/test/java/org/ehcache/impl/serialization/LongSerializerTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/LongSerializerTest.java index 0d87007126..87769a44a2 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/LongSerializerTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/LongSerializerTest.java @@ -22,8 +22,8 @@ import java.util.Random; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * LongSerializerTest @@ -55,4 +55,4 @@ public void testReadThrowsOnNullInput() throws ClassNotFoundException { public void testSerializeThrowsOnNullInput() { new LongSerializer().serialize(null); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java similarity index 98% rename from impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java index a9af5ef625..0cb58e4e6c 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java @@ -39,8 +39,7 @@ public class PutFieldTest { @Test public void testWithAllPrimitivesAndString() throws Exception { - @SuppressWarnings("unchecked") - StatefulSerializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer<>(null); s.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(Foo_A.class); @@ -67,8 +66,7 @@ public void testWithAllPrimitivesAndString() throws Exception { @Test public void testWithTwoStrings() throws Exception { - @SuppressWarnings("unchecked") - StatefulSerializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer<>(null); s.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(Bar_A.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java similarity index 98% rename from impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java index 1742c8bb5d..32a9e5163d 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java @@ -37,8 +37,7 @@ public class ReadObjectNoDataTest { @Test public void test() throws Exception { - @SuppressWarnings("unchecked") - StatefulSerializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer<>(null); s.init(new TransientStateRepository()); ClassLoader loaderW = createClassNameRewritingLoader(C_W.class, B_W.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java similarity index 90% rename from impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java index 6bddd70c76..33ca475626 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java @@ -22,19 +22,20 @@ import org.ehcache.spi.serialization.StatefulSerializer; import org.hamcrest.core.Is; -import org.junit.Assert; import org.junit.Test; import static org.ehcache.impl.serialization.SerializerTestUtilities.createClassNameRewritingLoader; import static org.ehcache.impl.serialization.SerializerTestUtilities.newClassName; import static org.ehcache.impl.serialization.SerializerTestUtilities.popTccl; import static org.ehcache.impl.serialization.SerializerTestUtilities.pushTccl; +import static org.hamcrest.MatcherAssert.assertThat; + public class SerializeAfterEvolutionTest { @Test public void test() throws Exception { @SuppressWarnings("unchecked") - StatefulSerializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer<>(null); s.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(A_old.class); @@ -45,11 +46,11 @@ public void test() throws Exception { pushTccl(loaderB); try { Serializable outA = s.read(encodedA); - Assert.assertThat((Integer) outA.getClass().getField("integer").get(outA), Is.is(42)); + assertThat((Integer) outA.getClass().getField("integer").get(outA), Is.is(42)); Serializable b = (Serializable) loaderB.loadClass(newClassName(A_new.class)).newInstance(); Serializable outB = s.read(s.serialize(b)); - Assert.assertThat((Integer) outB.getClass().getField("integer").get(outB), Is.is(42)); + assertThat((Integer) outB.getClass().getField("integer").get(outB), Is.is(42)); } finally { popTccl(); } diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/serialization/SerializerTestUtilities.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/SerializerTestUtilities.java new file mode 100644 index 0000000000..09bec883a0 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/SerializerTestUtilities.java @@ -0,0 +1,166 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.serialization; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Collections; +import java.util.Deque; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.Map; +import java.util.Optional; + +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassWriter; +import org.objectweb.asm.commons.ClassRemapper; +import org.objectweb.asm.commons.Remapper; + +/** + * + * @author cdennis + */ +public final class SerializerTestUtilities { + + private SerializerTestUtilities() { + //no instances please + } + + public static ClassLoader createClassNameRewritingLoader(Class initial, Class ... more) { + ClassLoader loader = initial.getClassLoader(); + Map remapping = new HashMap<>(); + remapping.putAll(createRemappings(initial)); + for (Class klazz : more) { + remapping.putAll(createRemappings(klazz)); + } + return new RewritingClassloader(loader, remapping); + } + + private static Map createRemappings(Class initial) { + Map remappings = new HashMap<>(); + remappings.put(initial.getName(), newClassName(initial)); + for (Class inner : initial.getDeclaredClasses()) { + remappings.put(inner.getName(), newClassName(inner)); + } + if (initial.isEnum()) { + for (Object e : initial.getEnumConstants()) { + Class eClass = e.getClass(); + if (eClass != initial) { + remappings.put(eClass.getName(), newClassName(eClass)); + } + } + } + return remappings; + } + + public static String newClassName(Class initial) { + String initialName = initial.getName(); + int lastUnderscore = initialName.lastIndexOf('_'); + if (lastUnderscore == -1) { + return initialName; + } else { + int nextDollar = initialName.indexOf('$', lastUnderscore); + if (nextDollar == -1) { + return initialName.substring(0, lastUnderscore); + } else { + return initialName.substring(0, lastUnderscore).concat(initialName.substring(nextDollar)); + } + } + } + + private static final ThreadLocal> tcclStacks = new ThreadLocal>() { + @Override + protected Deque initialValue() { + return new LinkedList<>(); + } + }; + + public static void pushTccl(ClassLoader loader) { + tcclStacks.get().push(Thread.currentThread().getContextClassLoader()); + Thread.currentThread().setContextClassLoader(loader); + } + + public static void popTccl() { + Thread.currentThread().setContextClassLoader(tcclStacks.get().pop()); + } + + static class RewritingClassloader extends ClassLoader { + + private final Map remappings; + + RewritingClassloader(ClassLoader parent, Map remappings) { + super(parent); + this.remappings = Collections.unmodifiableMap(new HashMap<>(remappings)); + } + + @Override + protected synchronized Class loadClass(String name, boolean resolve) throws ClassNotFoundException { + Class c = findLoadedClass(name); + if (c == null) { + if (remappings.containsValue(name)) { + c = findClass(name); + if (resolve) { + resolveClass(c); + } + } else { + return super.loadClass(name, resolve); + } + } + return c; + } + + private Optional findKeyFromValue(String value) { + return remappings.entrySet().stream() + .filter(e -> e.getValue().equals(value)) + .findAny() + .map(e -> e.getKey()); + } + + @Override + protected Class findClass(String name) throws ClassNotFoundException { + String key = findKeyFromValue(name).orElseGet(() -> null); + if(key == null) { + return super.findClass(name); + } + + String path = key.replace('.', '/').concat(".class"); + try (InputStream resource = getResourceAsStream(path)) { + ClassReader reader = new ClassReader(resource); + ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS); + + Remapper remapper = new Remapper() { + @Override + public String map(String from) { + String to = remappings.get(from.replace('/', '.')); + if (to == null) { + return from; + } + return to.replace('.', '/'); + } + }; + + reader.accept(new ClassRemapper(writer, remapper), ClassReader.EXPAND_FRAMES); + + byte[] classBytes = writer.toByteArray(); + + return defineClass(name, classBytes, 0, classBytes.length); + } catch (IOException e) { + throw new ClassNotFoundException("IOException while loading", e); + } + } + } +} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/StringSerializerTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/StringSerializerTest.java similarity index 98% rename from impl/src/test/java/org/ehcache/impl/serialization/StringSerializerTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/StringSerializerTest.java index 09df5b15aa..606ac50311 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/StringSerializerTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/StringSerializerTest.java @@ -21,8 +21,8 @@ import java.util.Random; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * StringSerializerTest @@ -102,4 +102,4 @@ public void testEqualsMismatchOnMissingFinalSurrogateAgainstOldFormat() throws U ByteBuffer bytes = ByteBuffer.wrap(string.getBytes("UTF-8")); assertThat(serializer.equals(trimmed, bytes), is(false)); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java similarity index 88% rename from impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java index 1fc1413232..7424d90bf6 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java @@ -19,8 +19,8 @@ import org.ehcache.spi.persistence.StateHolder; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.*; /** * TransientStateRepositoryTest @@ -30,10 +30,10 @@ public class TransientStateRepositoryTest { @Test public void testRemembersCreatedMaps() throws Exception { TransientStateRepository repository = new TransientStateRepository(); - StateHolder test = repository.getPersistentStateHolder("test", Long.class, String.class); + StateHolder test = repository.getPersistentStateHolder("test", Long.class, String.class, c -> true, null); test.putIfAbsent(42L, "Again??"); - test = repository.getPersistentStateHolder("test", Long.class, String.class); + test = repository.getPersistentStateHolder("test", Long.class, String.class, c -> true, null); assertThat(test.get(42L), is("Again??")); } diff --git a/ehcache-impl/src/test/java/org/ehcache/impl/store/DefaultStoreEventDispatcherTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/store/DefaultStoreEventDispatcherTest.java new file mode 100644 index 0000000000..7ee9c7355a --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/impl/store/DefaultStoreEventDispatcherTest.java @@ -0,0 +1,175 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.store; + +import org.ehcache.event.EventType; +import org.ehcache.core.events.StoreEventSink; +import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.core.spi.store.events.StoreEvent; +import org.ehcache.core.spi.store.events.StoreEventFilter; +import org.ehcache.core.spi.store.events.StoreEventListener; +import org.hamcrest.Matcher; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Random; +import java.util.concurrent.CountDownLatch; + +import static org.ehcache.impl.internal.util.Matchers.eventOfType; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.withSettings; +import static org.mockito.hamcrest.MockitoHamcrest.argThat; + +/** + * DefaultStoreEventDispatcherTest + */ +public class DefaultStoreEventDispatcherTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultStoreEventDispatcherTest.class); + + @Test + public void testRegistersOrderingChange() { + DefaultStoreEventDispatcher dispatcher = new DefaultStoreEventDispatcher<>(1); + + assertThat(dispatcher.isEventOrdering(), is(false)); + dispatcher.setEventOrdering(true); + assertThat(dispatcher.isEventOrdering(), is(true)); + dispatcher.setEventOrdering(false); + assertThat(dispatcher.isEventOrdering(), is(false)); + } + + @Test + @SuppressWarnings("unchecked") + public void testListenerNotifiedUnordered() { + DefaultStoreEventDispatcher dispatcher = new DefaultStoreEventDispatcher<>(1); + @SuppressWarnings("unchecked") + StoreEventListener listener = mock(StoreEventListener.class); + dispatcher.addEventListener(listener); + + StoreEventSink sink = dispatcher.eventSink(); + sink.created("test", "test"); + dispatcher.releaseEventSink(sink); + + verify(listener).onEvent(any(StoreEvent.class)); + } + + @Test + @SuppressWarnings("unchecked") + public void testListenerNotifiedOrdered() { + DefaultStoreEventDispatcher dispatcher = new DefaultStoreEventDispatcher<>(1); + @SuppressWarnings("unchecked") + StoreEventListener listener = mock(StoreEventListener.class); + dispatcher.addEventListener(listener); + dispatcher.setEventOrdering(true); + + StoreEventSink sink = dispatcher.eventSink(); + sink.created("test", "test"); + dispatcher.releaseEventSink(sink); + + verify(listener).onEvent(any(StoreEvent.class)); + } + + @Test + public void testEventFiltering() { + DefaultStoreEventDispatcher dispatcher = new DefaultStoreEventDispatcher<>(1); + @SuppressWarnings("unchecked") + StoreEventListener listener = mock(StoreEventListener.class, withSettings().verboseLogging()); + dispatcher.addEventListener(listener); + + @SuppressWarnings("unchecked") + StoreEventFilter filter = mock(StoreEventFilter.class); + when(filter.acceptEvent(eq(EventType.CREATED), anyString(), ArgumentMatchers.isNull(), anyString())).thenReturn(true); + when(filter.acceptEvent(eq(EventType.REMOVED), anyString(), anyString(), anyString())).thenReturn(false); + dispatcher.addEventFilter(filter); + + StoreEventSink sink = dispatcher.eventSink(); + sink.removed("gone", () -> "really gone"); + sink.created("new", "and shiny"); + dispatcher.releaseEventSink(sink); + + Matcher> matcher = eventOfType(EventType.CREATED); + verify(listener).onEvent(argThat(matcher)); + verifyNoMoreInteractions(listener); + } + + @Test + public void testOrderedEventDelivery() throws Exception { + final DefaultStoreEventDispatcher dispatcher = new DefaultStoreEventDispatcher<>(4); + dispatcher.setEventOrdering(true); + final ConcurrentHashMap map = new ConcurrentHashMap<>(); + final long[] keys = new long[] { 1L, 42L, 256L }; + map.put(keys[0], 125L); + map.put(keys[1], 42 * 125L); + map.put(keys[2], 256 * 125L); + + final ConcurrentHashMap resultMap = new ConcurrentHashMap<>(map); + dispatcher.addEventListener(event -> { + if (event.getNewValue()) { + resultMap.compute(event.getKey(), (key, value) -> value + 10L); + } else { + resultMap.compute(event.getKey(), (key, value) -> 7L - value); + } + }); + + final long seed = new Random().nextLong(); + LOGGER.info("Starting test with seed {}", seed); + + int workers = Runtime.getRuntime().availableProcessors() + 2; + final CountDownLatch latch = new CountDownLatch(workers); + for (int i = 0; i < workers; i++) { + final int index =i; + new Thread(() -> { + Random random = new Random(seed * index); + for (int j = 0; j < 10000; j++) { + int keyIndex = random.nextInt(3); + final StoreEventSink sink = dispatcher.eventSink(); + if (random.nextBoolean()) { + map.compute(keys[keyIndex], (key, value) -> { + long newValue = value + 10L; + sink.created(key, true); + return newValue; + }); + } else { + map.compute(keys[keyIndex], (key, value) -> { + long newValue = 7L - value; + sink.created(key, false); + return newValue; + }); + } + dispatcher.releaseEventSink(sink); + } + latch.countDown(); + }).start(); + } + + latch.await(); + + LOGGER.info("\n\tResult map {} \n\tWork map {}", resultMap, map); + + assertThat(resultMap, is(map)); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/store/HashUtilsTest.java b/ehcache-impl/src/test/java/org/ehcache/impl/store/HashUtilsTest.java similarity index 95% rename from impl/src/test/java/org/ehcache/impl/store/HashUtilsTest.java rename to ehcache-impl/src/test/java/org/ehcache/impl/store/HashUtilsTest.java index e82f763ae8..c74abbe1f3 100644 --- a/impl/src/test/java/org/ehcache/impl/store/HashUtilsTest.java +++ b/ehcache-impl/src/test/java/org/ehcache/impl/store/HashUtilsTest.java @@ -20,8 +20,8 @@ import java.util.Random; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * HashUtilsTest diff --git a/ehcache-impl/src/test/java/org/ehcache/test/MockitoUtil.java b/ehcache-impl/src/test/java/org/ehcache/test/MockitoUtil.java new file mode 100644 index 0000000000..2e4570b7b7 --- /dev/null +++ b/ehcache-impl/src/test/java/org/ehcache/test/MockitoUtil.java @@ -0,0 +1,38 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.test; + +import org.mockito.Mockito; + +/** + * Tiny little class allowing to remove the type constraint between the parameter and the return type. That way, + * mocking a generic type won't be a systematic warning anymore. + *
                                      {@code
                                      + * List list = MockitoUtil.mock(List.class); // no suppress warning
                                      + * }
                                      + * 
                                      + */ +public final class MockitoUtil { + + private MockitoUtil() { + } + + @SuppressWarnings("unchecked") + public static T mock(Class clazz) { + return Mockito.mock((Class) clazz); + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java b/ehcache-impl/src/unsafe/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java similarity index 89% rename from impl/src/main/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java rename to ehcache-impl/src/unsafe/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java index fc7410543d..e113cd8ab6 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java +++ b/ehcache-impl/src/unsafe/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java @@ -22,10 +22,12 @@ package org.ehcache.impl.internal.concurrent; +import sun.misc.Unsafe; + +import org.ehcache.config.EvictionAdvisor; + import java.io.ObjectStreamField; import java.io.Serializable; -import static java.lang.Integer.rotateLeft; - import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.AbstractMap; @@ -43,7 +45,6 @@ import java.util.Random; import java.util.Set; import java.util.Spliterator; -import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountedCompleter; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.atomic.AtomicReference; @@ -56,28 +57,16 @@ import java.util.function.Function; import java.util.function.IntBinaryOperator; import java.util.function.LongBinaryOperator; +import java.util.function.Predicate; import java.util.function.ToDoubleBiFunction; import java.util.function.ToDoubleFunction; import java.util.function.ToIntBiFunction; import java.util.function.ToIntFunction; import java.util.function.ToLongBiFunction; import java.util.function.ToLongFunction; - -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.impl.internal.store.heap.holders.OnHeapValueHolder; - -import sun.misc.Unsafe; - +import java.util.stream.Stream; /** - * *** WARNING IF YOU'RE USING THIS CLASS WITH JDK 8 OR ABOVE *** - *
                                      - * BY ALL MEANS, DO NOT USE THE spliterator() METHOD - * OF THE keySet(), values() or - * entrySet() RETURNED OBJECTS. USE THE - * _spliterator() ONE INSTEAD. - *
                                      - *

                                      * A hash table supporting full concurrency of retrievals and * high expected concurrency for updates. This class obeys the * same functional specification as {@link java.util.Hashtable}, and @@ -139,8 +128,8 @@ * same mapping value. * *

                                      A ConcurrentHashMap can be used as a scalable frequency map (a - * form of histogram or multiset) by using - * LongAdder values and initializing via + * form of histogram or multiset) by using {@link + * java.util.concurrent.atomic.LongAdder} values and initializing via * {@link #computeIfAbsent computeIfAbsent}. For example, to add a count * to a {@code ConcurrentHashMap freqs}, you can use * {@code freqs.computeIfAbsent(key, k -> new LongAdder()).increment();} @@ -153,48 +142,47 @@ * does not allow {@code null} to be used as a key or value. * *

                                      ConcurrentHashMaps support a set of sequential and parallel bulk - * operations that, unlike most Stream methods, are designed + * operations that, unlike most {@link Stream} methods, are designed * to be safely, and often sensibly, applied even with maps that are * being concurrently updated by other threads; for example, when * computing a snapshot summary of the values in a shared registry. * There are three kinds of operation, each with four forms, accepting - * functions with Keys, Values, Entries, and (Key, Value) arguments - * and/or return values. Because the elements of a ConcurrentHashMap - * are not ordered in any particular way, and may be processed in - * different orders in different parallel executions, the correctness - * of supplied functions should not depend on any ordering, or on any - * other objects or values that may transiently change while - * computation is in progress; and except for forEach actions, should - * ideally be side-effect-free. Bulk operations on {@link java.util.Map.Entry} - * objects do not support method {@code setValue}. + * functions with keys, values, entries, and (key, value) pairs as + * arguments and/or return values. Because the elements of a + * ConcurrentHashMap are not ordered in any particular way, and may be + * processed in different orders in different parallel executions, the + * correctness of supplied functions should not depend on any + * ordering, or on any other objects or values that may transiently + * change while computation is in progress; and except for forEach + * actions, should ideally be side-effect-free. Bulk operations on + * {@link Map.Entry} objects do not support method {@code setValue}. * *

                                        - *
                                      • forEach: Perform a given action on each element. + *
                                      • forEach: Performs a given action on each element. * A variant form applies a given transformation on each element - * before performing the action.
                                      • + * before performing the action. * - *
                                      • search: Return the first available non-null result of + *
                                      • search: Returns the first available non-null result of * applying a given function on each element; skipping further - * search when a result is found.
                                      • + * search when a result is found. * - *
                                      • reduce: Accumulate each element. The supplied reduction + *
                                      • reduce: Accumulates each element. The supplied reduction * function cannot rely on ordering (more formally, it should be * both associative and commutative). There are five variants: * *
                                          * - *
                                        • Plain reductions. (There is not a form of this method for + *
                                        • Plain reductions. (There is not a form of this method for * (key, value) function arguments since there is no corresponding - * return type.)
                                        • + * return type.) * - *
                                        • Mapped reductions that accumulate the results of a given - * function applied to each element.
                                        • + *
                                        • Mapped reductions that accumulate the results of a given + * function applied to each element. * - *
                                        • Reductions to scalar doubles, longs, and ints, using a - * given basis value.
                                        • + *
                                        • Reductions to scalar doubles, longs, and ints, using a + * given basis value. * *
                                        - *
                                      • *
                                      * *

                                      These bulk operations accept a {@code parallelismThreshold} @@ -261,20 +249,39 @@ *

                                      All arguments to all task methods must be non-null. * *

                                      This class is a member of the - * + * * Java Collections Framework. + *

                                      + * EHCACHE SPECIFIC + * This is an edited version of the {@code ConcurrentHashMap} from + * JSR 166 CSV repository revision 1.312. + * Main modifications are tagged with "EHCACHE SPECIFIC" in the code. Global changes are: + *

                                        + *
                                      • Unsafe is retrieved using {@link ThreadLocalRandomUtil#UNSAFE}
                                      • + *
                                      • ThreadLocalRandom.getProbe is retrieved using {@link ThreadLocalRandomUtil#getProbe}
                                      • + *
                                      • getObjectAcquire is replaced by getObject for Java 8 compatibility
                                      • + *
                                      • compareAndSetObject is replaced by compareAndSwapObject for Java 8 compatibility
                                      • + *
                                      • compareAndSetInt is replaced by compareAndSwapInt for Java 8 compatibility
                                      • + *
                                      • compareAndSwapLong is replaced by compareAndSwapLong for Java 8 compatibility
                                      • + *
                                      • putObjectRelease is replaced by putObjectVolatile for Java 8 compatibility
                                      • + *
                                      • jdk.internal.misc.Unsafe is replaced by sun.misc.Unsafe for Java 8 compatibility
                                      • + *
                                      • jdk.internal.vm.annotation.Contended is replaced by sun.misc.Contended for Java 8 compatibility
                                      • + *
                                      • jdk.internal.vm.annotation.Contended is replaced by sun.misc.Contended for Java 8 compatibility
                                      • + *
                                      + * END OF EHCACHE SPECIFIC * * @since 1.5 * @author Doug Lea * @param the type of keys maintained by this map * @param the type of mapped values */ -@SuppressWarnings("unchecked") public class ConcurrentHashMap extends AbstractMap - implements ConcurrentMap, Serializable { + implements EvictingConcurrentMap, Serializable { // EHCACHE SPECIFIC: EvictingConcurrentMap instead of ConcurrentMap private static final long serialVersionUID = 7249069246763182397L; - public static final ConcurrentHashMap.TreeBin FAKE_TREE_BIN = new TreeBin(new TreeNode(0, null, null, null, null)); + // EHCACHE SPECIFIC + public static final ConcurrentHashMap.TreeBin FAKE_TREE_BIN = new TreeBin<>(new TreeNode<>(0, null, null, null, null)); + // END OF EHCACHE SPECIFIC /* * Overview: @@ -308,7 +315,7 @@ public class ConcurrentHashMap extends AbstractMap * Table accesses require volatile/atomic reads, writes, and * CASes. Because there is no other way to arrange this without * adding further indirections, we use intrinsics - * (sun.misc.Unsafe) operations. + * (jdk.internal.misc.Unsafe) operations. * * We use the top (sign) bit of Node hash fields for control * purposes -- it is available anyway because of addressing @@ -488,7 +495,7 @@ public class ConcurrentHashMap extends AbstractMap * * Maintaining API and serialization compatibility with previous * versions of this class introduces several oddities. Mainly: We - * leave untouched but unused constructor arguments refering to + * leave untouched but unused constructor arguments referring to * concurrencyLevel. We accept a loadFactor constructor argument, * but apply it only to initial table capacity (which is the only * time that we can guarantee to honor it.) We also declare an @@ -583,7 +590,7 @@ public class ConcurrentHashMap extends AbstractMap * The number of bits used for generation stamp in sizeCtl. * Must be at least 6 for 32bit arrays. */ - private static int RESIZE_STAMP_BITS = 16; + private static final int RESIZE_STAMP_BITS = 16; /** * The maximum number of threads that can help resize. @@ -607,11 +614,20 @@ public class ConcurrentHashMap extends AbstractMap /** Number of CPUS, to place bounds on some sizings */ static final int NCPU = Runtime.getRuntime().availableProcessors(); - /** For serialization compatibility. */ + /** + * Serialized pseudo-fields, provided only for jdk7 compatibility. + * @serialField segments Segment[] + * The segments, each of which is a specialized hash table. + * @serialField segmentMask int + * Mask value for indexing into segments. The upper bits of a + * key's hash code are used to choose the segment. + * @serialField segmentShift int + * Shift value for indexing within segments. + */ private static final ObjectStreamField[] serialPersistentFields = { new ObjectStreamField("segments", Segment[].class), new ObjectStreamField("segmentMask", Integer.TYPE), - new ObjectStreamField("segmentShift", Integer.TYPE) + new ObjectStreamField("segmentShift", Integer.TYPE), }; /* ---------------- Nodes -------------- */ @@ -630,17 +646,23 @@ static class Node implements Map.Entry { volatile V val; volatile Node next; - Node(int hash, K key, V val, Node next) { + Node(int hash, K key, V val) { this.hash = hash; this.key = key; this.val = val; + } + + Node(int hash, K key, V val, Node next) { + this(hash, key, val); this.next = next; } - public final K getKey() { return key; } - public final V getValue() { return val; } - public final int hashCode() { return key.hashCode() ^ val.hashCode(); } - public final String toString(){ return key + "=" + val; } + public final K getKey() { return key; } + public final V getValue() { return val; } + public final int hashCode() { return key.hashCode() ^ val.hashCode(); } + public final String toString() { + return ThreadLocalRandomUtil.mapEntryToString(key, val); // EHCACHE SPECIFIC: Code taken from java.util.concurrent.Helpers + } public final V setValue(V value) { throw new UnsupportedOperationException(); } @@ -690,7 +712,14 @@ Node find(int h, Object k) { * never be used in index calculations because of table bounds. */ static final int spread(int h) { - h = rotateLeft(h, 1); + // EHCACHE SPECIFIC + // We modify the hash spread to help eviction sampling. Because, in a full hashmap, ordered numbers will be hashed as [ 1, 2, 3, e, e, e ] ('e' being empty) + // So everything is at the beginning of the map. Then, is the sampler starts randomly at index 2, it will find empty nodes all the + // way to the end and will then wrap at the beginning. Slow. To prevent that, we perform a rotate left which basically multiplies everything + // by two but also wraps the last bit. The spreading will then be [ 1, e, 2, e, 3, e ] which will be much nicer for sampling and only be a tiny tiny + // bit slower at access. + h = Integer.rotateLeft(h, 1); + // END OF EHCACHE SPECIFIC return (h ^ (h >>> 16)) & HASH_BITS; } @@ -699,12 +728,7 @@ static final int spread(int h) { * See Hackers Delight, sec 3.2 */ private static final int tableSizeFor(int c) { - int n = c - 1; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; + int n = -1 >>> Integer.numberOfLeadingZeros(c - 1); return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1; } @@ -714,12 +738,12 @@ private static final int tableSizeFor(int c) { */ static Class comparableClassFor(Object x) { if (x instanceof Comparable) { - Class c; Type[] ts, as; Type t; ParameterizedType p; + Class c; Type[] ts, as; ParameterizedType p; if ((c = x.getClass()) == String.class) // bypass checks return c; if ((ts = c.getGenericInterfaces()) != null) { - for (int i = 0; i < ts.length; ++i) { - if (((t = ts[i]) instanceof ParameterizedType) && + for (Type t : ts) { + if ((t instanceof ParameterizedType) && ((p = (ParameterizedType)t).getRawType() == Comparable.class) && (as = p.getActualTypeArguments()) != null && @@ -744,7 +768,7 @@ static int compareComparables(Class kc, Object k, Object x) { /* ---------------- Table element access -------------- */ /* - * Volatile access methods are used for table elements as well as + * Atomic access methods are used for table elements as well as * elements of in-progress next table while resizing. All uses of * the tab arguments must be null checked by callers. All callers * also paranoically precheck that tab's length is not zero (or an @@ -754,14 +778,12 @@ static int compareComparables(Class kc, Object k, Object x) { * errors by users, these checks must operate on local variables, * which accounts for some odd-looking inline assignments below. * Note that calls to setTabAt always occur within locked regions, - * and so in principle require only release ordering, not - * full volatile semantics, but are currently coded as volatile - * writes to be conservative. + * and so require only release ordering. */ @SuppressWarnings("unchecked") static final Node tabAt(Node[] tab, int i) { - return (Node)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE); + return (Node)U.getObject(tab, ((long)i << ASHIFT) + ABASE); } static final boolean casTabAt(Node[] tab, int i, @@ -843,12 +865,7 @@ public ConcurrentHashMap() { * elements is negative */ public ConcurrentHashMap(int initialCapacity) { - if (initialCapacity < 0) - throw new IllegalArgumentException(); - int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ? - MAXIMUM_CAPACITY : - tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1)); - this.sizeCtl = cap; + this(initialCapacity, LOAD_FACTOR, 1); } /** @@ -882,8 +899,8 @@ public ConcurrentHashMap(int initialCapacity, float loadFactor) { /** * Creates a new, empty map with an initial table size based on - * the given number of elements ({@code initialCapacity}), table - * density ({@code loadFactor}), and number of concurrently + * the given number of elements ({@code initialCapacity}), initial + * table density ({@code loadFactor}), and number of concurrently * updating threads ({@code concurrencyLevel}). * * @param initialCapacity the initial capacity. The implementation @@ -988,7 +1005,7 @@ public boolean containsValue(Object value) { throw new NullPointerException(); Node[] t; if ((t = table) != null) { - Traverser it = new Traverser<>(t, t.length, 0, t.length); + Traverser it = new Traverser(t, t.length, 0, t.length); for (Node p; (p = it.advance()) != null; ) { V v; if ((v = p.val) == value || (v != null && value.equals(v))) @@ -1021,16 +1038,20 @@ final V putVal(K key, V value, boolean onlyIfAbsent) { int hash = spread(key.hashCode()); int binCount = 0; for (Node[] tab = table;;) { - Node f; int n, i, fh; + Node f; int n, i, fh; K fk; V fv; if (tab == null || (n = tab.length) == 0) tab = initTable(); else if ((f = tabAt(tab, i = (n - 1) & hash)) == null) { - if (casTabAt(tab, i, null, - new Node<>(hash, key, value, null))) + if (casTabAt(tab, i, null, new Node(hash, key, value))) break; // no lock when adding to empty bin } else if ((fh = f.hash) == MOVED) tab = helpTransfer(tab, f); + else if (onlyIfAbsent // check first node without acquiring lock + && fh == hash + && ((fk = f.key) == key || (fk != null && key.equals(fk))) + && (fv = f.val) != null) + return fv; else { V oldVal = null; synchronized (f) { @@ -1049,8 +1070,7 @@ else if ((fh = f.hash) == MOVED) } Node pred = e; if ((e = e.next) == null) { - pred.next = new Node<>(hash, key, - value, null); + pred.next = new Node(hash, key, value); break; } } @@ -1065,6 +1085,8 @@ else if (f instanceof TreeBin) { p.val = value; } } + else if (f instanceof ReservationNode) + throw new IllegalStateException("Recursive update"); } } if (binCount != 0) { @@ -1167,6 +1189,8 @@ else if (t.removeTreeNode(p)) } } } + else if (f instanceof ReservationNode) + throw new IllegalStateException("Recursive update"); } } if (validated) { @@ -1182,39 +1206,6 @@ else if (t.removeTreeNode(p)) return null; } - /** - * Remove and return all mappings for which the keys have the specified hashcode. - * @param keyHash the keys' hashcode. - * @return the removed mappings. - */ - public final Collection> removeAllWithHash(int keyHash) { - List> invalidated = new ArrayList<>(); - - int hash = spread(keyHash); - for (Node[] tab = table; ; ) { - Node f; - int n, i; - if (tab == null || (n = tab.length) == 0 || - (f = tabAt(tab, i = (n - 1) & hash)) == null) - break; - else if (f.hash == MOVED) - tab = helpTransfer(tab, f); - else { - int nodesCount = 0; - synchronized (f) { - if (tabAt(tab, i) == f) { - nodesCount = nodesAt(f, invalidated); - setTabAt(tab, i, null); - } - } - if (nodesCount > 0) { - addCount(-nodesCount, -nodesCount); - } - } - } - return invalidated; - } - /** * Removes all of the mappings from this map. */ @@ -1270,7 +1261,8 @@ else if ((fh = f.hash) == MOVED) { */ public KeySetView keySet() { KeySetView ks; - return (ks = keySet) != null ? ks : (keySet = new KeySetView<>(this, null)); + if ((ks = keySet) != null) return ks; + return keySet = new KeySetView(this, null); } /** @@ -1286,14 +1278,15 @@ public KeySetView keySet() { *

                                      The view's iterators and spliterators are * weakly consistent. * - *

                                      The view's {@code spliterator} reports {@link java.util.Spliterator#CONCURRENT} - * and {@link java.util.Spliterator#NONNULL}. + *

                                      The view's {@code spliterator} reports {@link Spliterator#CONCURRENT} + * and {@link Spliterator#NONNULL}. * * @return the collection view */ public Collection values() { ValuesView vs; - return (vs = values) != null ? vs : (values = new ValuesView<>(this)); + if ((vs = values) != null) return vs; + return values = new ValuesView(this); } /** @@ -1308,14 +1301,15 @@ public Collection values() { *

                                      The view's iterators and spliterators are * weakly consistent. * - *

                                      The view's {@code spliterator} reports {@link java.util.Spliterator#CONCURRENT}, - * {@link java.util.Spliterator#DISTINCT}, and {@link java.util.Spliterator#NONNULL}. + *

                                      The view's {@code spliterator} reports {@link Spliterator#CONCURRENT}, + * {@link Spliterator#DISTINCT}, and {@link Spliterator#NONNULL}. * * @return the set view */ public Set> entrySet() { EntrySetView es; - return (es = entrySet) != null ? es : (entrySet = new EntrySetView<>(this)); + if ((es = entrySet) != null) return es; + return entrySet = new EntrySetView(this); } /** @@ -1329,7 +1323,7 @@ public int hashCode() { int h = 0; Node[] t; if ((t = table) != null) { - Traverser it = new Traverser<>(t, t.length, 0, t.length); + Traverser it = new Traverser(t, t.length, 0, t.length); for (Node p; (p = it.advance()) != null; ) h += p.key.hashCode() ^ p.val.hashCode(); } @@ -1350,7 +1344,7 @@ public int hashCode() { public String toString() { Node[] t; int f = (t = table) == null ? 0 : t.length; - Traverser it = new Traverser<>(t, f, 0, f); + Traverser it = new Traverser(t, f, 0, f); StringBuilder sb = new StringBuilder(); sb.append('{'); Node p; @@ -1386,7 +1380,7 @@ public boolean equals(Object o) { Map m = (Map) o; Node[] t; int f = (t = table) == null ? 0 : t.length; - Traverser it = new Traverser<>(t, f, 0, f); + Traverser it = new Traverser(t, f, 0, f); for (Node p; (p = it.advance()) != null; ) { V val = p.val; Object v = m.get(p.key); @@ -1407,7 +1401,7 @@ public boolean equals(Object o) { /** * Stripped-down version of helper class used in previous version, - * declared for the sake of serialization compatibility + * declared for the sake of serialization compatibility. */ static class Segment extends ReentrantLock implements Serializable { private static final long serialVersionUID = 2249069246763182397L; @@ -1416,13 +1410,13 @@ static class Segment extends ReentrantLock implements Serializable { } /** - * Saves the state of the {@code ConcurrentHashMap} instance to a - * stream (i.e., serializes it). + * Saves this map to a stream (that is, serializes it). + * * @param s the stream * @throws java.io.IOException if an I/O error occurs * @serialData - * the key (Object) and value (Object) - * for each key-value mapping, followed by a null pair. + * the serialized fields, followed by the key (Object) and value + * (Object) for each key-value mapping, followed by a null pair. * The key-value mappings are emitted in no particular order. */ private void writeObject(java.io.ObjectOutputStream s) @@ -1441,15 +1435,16 @@ private void writeObject(java.io.ObjectOutputStream s) Segment[] segments = (Segment[]) new Segment[DEFAULT_CONCURRENCY_LEVEL]; for (int i = 0; i < segments.length; ++i) - segments[i] = new Segment<>(LOAD_FACTOR); - s.putFields().put("segments", segments); - s.putFields().put("segmentShift", segmentShift); - s.putFields().put("segmentMask", segmentMask); + segments[i] = new Segment(LOAD_FACTOR); + java.io.ObjectOutputStream.PutField streamFields = s.putFields(); + streamFields.put("segments", segments); + streamFields.put("segmentShift", segmentShift); + streamFields.put("segmentMask", segmentMask); s.writeFields(); Node[] t; if ((t = table) != null) { - Traverser it = new Traverser<>(t, t.length, 0, t.length); + Traverser it = new Traverser(t, t.length, 0, t.length); for (Node p; (p = it.advance()) != null; ) { s.writeObject(p.key); s.writeObject(p.val); @@ -1457,11 +1452,10 @@ private void writeObject(java.io.ObjectOutputStream s) } s.writeObject(null); s.writeObject(null); - segments = null; // throw away } /** - * Reconstitutes the instance from a stream (that is, deserializes it). + * Reconstitutes this map from a stream (that is, deserializes it). * @param s the stream * @throws ClassNotFoundException if the class of a serialized object * could not be found @@ -1486,7 +1480,7 @@ private void readObject(java.io.ObjectInputStream s) @SuppressWarnings("unchecked") V v = (V) s.readObject(); if (k != null && v != null) { - p = new Node<>(spread(k.hashCode()), k, v, p); + p = new Node(spread(k.hashCode()), k, v, p); ++size; } else @@ -1495,13 +1489,9 @@ private void readObject(java.io.ObjectInputStream s) if (size == 0L) sizeCtl = 0; else { - int n; - if (size >= (long)(MAXIMUM_CAPACITY >>> 1)) - n = MAXIMUM_CAPACITY; - else { - int sz = (int)size; - n = tableSizeFor(sz + (sz >>> 1) + 1); - } + long ts = (long)(1.0 + size / LOAD_FACTOR); + int n = (ts >= (long)MAXIMUM_CAPACITY) ? + MAXIMUM_CAPACITY : tableSizeFor((int)ts); @SuppressWarnings("unchecked") Node[] tab = (Node[])new Node[n]; int mask = n - 1; @@ -1539,15 +1529,15 @@ private void readObject(java.io.ObjectInputStream s) p.next = first; TreeNode hd = null, tl = null; for (q = p; q != null; q = q.next) { - TreeNode t = new TreeNode<> - (q.hash, q.key, q.val, null, null); + TreeNode t = new TreeNode + (q.hash, q.key, q.val, null, null); if ((t.prev = tl) == null) hd = t; else tl.next = t; tl = t; } - setTabAt(tab, j, new TreeBin<>(hd)); + setTabAt(tab, j, new TreeBin(hd)); } } } @@ -1634,7 +1624,7 @@ public void forEach(BiConsumer action) { if (action == null) throw new NullPointerException(); Node[] t; if ((t = table) != null) { - Traverser it = new Traverser<>(t, t.length, 0, t.length); + Traverser it = new Traverser(t, t.length, 0, t.length); for (Node p; (p = it.advance()) != null; ) { action.accept(p.key, p.val); } @@ -1645,7 +1635,7 @@ public void replaceAll(BiFunction function) { if (function == null) throw new NullPointerException(); Node[] t; if ((t = table) != null) { - Traverser it = new Traverser<>(t, t.length, 0, t.length); + Traverser it = new Traverser(t, t.length, 0, t.length); for (Node p; (p = it.advance()) != null; ) { V oldValue = p.val; for (K key = p.key;;) { @@ -1660,6 +1650,45 @@ public void replaceAll(BiFunction function) { } } + /** + * Helper method for EntrySetView.removeIf. + */ + boolean removeEntryIf(Predicate> function) { + if (function == null) throw new NullPointerException(); + Node[] t; + boolean removed = false; + if ((t = table) != null) { + Traverser it = new Traverser(t, t.length, 0, t.length); + for (Node p; (p = it.advance()) != null; ) { + K k = p.key; + V v = p.val; + Map.Entry e = new AbstractMap.SimpleImmutableEntry<>(k, v); + if (function.test(e) && replaceNode(k, null, v) != null) + removed = true; + } + } + return removed; + } + + /** + * Helper method for ValuesView.removeIf. + */ + boolean removeValueIf(Predicate function) { + if (function == null) throw new NullPointerException(); + Node[] t; + boolean removed = false; + if ((t = table) != null) { + Traverser it = new Traverser(t, t.length, 0, t.length); + for (Node p; (p = it.advance()) != null; ) { + K k = p.key; + V v = p.val; + if (function.test(v) && replaceNode(k, null, v) != null) + removed = true; + } + } + return removed; + } + /** * If the specified key is not already associated with a value, * attempts to compute its value using the given mapping function @@ -1689,18 +1718,18 @@ public V computeIfAbsent(K key, Function mappingFunction V val = null; int binCount = 0; for (Node[] tab = table;;) { - Node f; int n, i, fh; + Node f; int n, i, fh; K fk; V fv; if (tab == null || (n = tab.length) == 0) tab = initTable(); else if ((f = tabAt(tab, i = (n - 1) & h)) == null) { - Node r = new ReservationNode<>(); + Node r = new ReservationNode(); synchronized (r) { if (casTabAt(tab, i, null, r)) { binCount = 1; Node node = null; try { if ((val = mappingFunction.apply(key)) != null) - node = new Node<>(h, key, val, null); + node = new Node(h, key, val); } finally { setTabAt(tab, i, node); } @@ -1711,6 +1740,10 @@ else if ((f = tabAt(tab, i = (n - 1) & h)) == null) { } else if ((fh = f.hash) == MOVED) tab = helpTransfer(tab, f); + else if (fh == h // check first node without acquiring lock + && ((fk = f.key) == key || (fk != null && key.equals(fk))) + && (fv = f.val) != null) + return fv; else { boolean added = false; synchronized (f) { @@ -1728,8 +1761,10 @@ else if ((fh = f.hash) == MOVED) Node pred = e; if ((e = e.next) == null) { if ((val = mappingFunction.apply(key)) != null) { + if (pred.next != null) + throw new IllegalStateException("Recursive update"); added = true; - pred.next = new Node<>(h, key, val, null); + pred.next = new Node(h, key, val); } break; } @@ -1747,6 +1782,8 @@ else if ((val = mappingFunction.apply(key)) != null) { t.putTreeVal(h, key, val); } } + else if (f instanceof ReservationNode) + throw new IllegalStateException("Recursive update"); } } if (binCount != 0) { @@ -1842,6 +1879,8 @@ else if (f instanceof TreeBin) { } } } + else if (f instanceof ReservationNode) + throw new IllegalStateException("Recursive update"); } } if (binCount != 0) @@ -1886,7 +1925,7 @@ public V compute(K key, if (tab == null || (n = tab.length) == 0) tab = initTable(); else if ((f = tabAt(tab, i = (n - 1) & h)) == null) { - Node r = new ReservationNode<>(); + Node r = new ReservationNode(); synchronized (r) { if (casTabAt(tab, i, null, r)) { binCount = 1; @@ -1894,7 +1933,7 @@ else if ((f = tabAt(tab, i = (n - 1) & h)) == null) { try { if ((val = remappingFunction.apply(key, null)) != null) { delta = 1; - node = new Node<>(h, key, val, null); + node = new Node(h, key, val); } } finally { setTabAt(tab, i, node); @@ -1933,9 +1972,10 @@ else if ((fh = f.hash) == MOVED) if ((e = e.next) == null) { val = remappingFunction.apply(key, null); if (val != null) { + if (pred.next != null) + throw new IllegalStateException("Recursive update"); delta = 1; - pred.next = - new Node<>(h, key, val, null); + pred.next = new Node(h, key, val); } break; } @@ -1965,6 +2005,8 @@ else if (p != null) { setTabAt(tab, i, untreeify(t.first)); } } + else if (f instanceof ReservationNode) + throw new IllegalStateException("Recursive update"); } } if (binCount != 0) { @@ -2011,7 +2053,7 @@ public V merge(K key, V value, BiFunction rem if (tab == null || (n = tab.length) == 0) tab = initTable(); else if ((f = tabAt(tab, i = (n - 1) & h)) == null) { - if (casTabAt(tab, i, null, new Node<>(h, key, value, null))) { + if (casTabAt(tab, i, null, new Node(h, key, value))) { delta = 1; val = value; break; @@ -2046,8 +2088,7 @@ else if ((fh = f.hash) == MOVED) if ((e = e.next) == null) { delta = 1; val = value; - pred.next = - new Node<>(h, key, val, null); + pred.next = new Node(h, key, val); break; } } @@ -2074,6 +2115,8 @@ else if (p != null) { setTabAt(tab, i, untreeify(t.first)); } } + else if (f instanceof ReservationNode) + throw new IllegalStateException("Recursive update"); } } if (binCount != 0) { @@ -2091,14 +2134,13 @@ else if (p != null) { // Hashtable legacy methods /** - * Legacy method testing if some key maps into the specified value - * in this table. + * Tests if some key maps into the specified value in this table. * - * @deprecated This method is identical in functionality to + *

                                      Note that this method is identical in functionality to * {@link #containsValue(Object)}, and exists solely to ensure * full compatibility with class {@link java.util.Hashtable}, * which supported this method prior to introduction of the - * Java Collections framework. + * Java Collections Framework. * * @param value a value to search for * @return {@code true} if and only if some key maps to the @@ -2107,7 +2149,6 @@ else if (p != null) { * {@code false} otherwise * @throws NullPointerException if the specified value is null */ - @Deprecated public boolean contains(Object value) { return containsValue(value); } @@ -2121,7 +2162,7 @@ public boolean contains(Object value) { public Enumeration keys() { Node[] t; int f = (t = table) == null ? 0 : t.length; - return new KeyIterator<>(t, f, 0, f, this); + return new KeyIterator(t, f, 0, f, this); } /** @@ -2133,7 +2174,7 @@ public Enumeration keys() { public Enumeration elements() { Node[] t; int f = (t = table) == null ? 0 : t.length; - return new ValueIterator<>(t, f, 0, f, this); + return new ValueIterator(t, f, 0, f, this); } // ConcurrentHashMap-only methods @@ -2162,8 +2203,8 @@ public long mappingCount() { * @since 1.8 */ public static KeySetView newKeySet() { - return new KeySetView<> - (new ConcurrentHashMap<>(), Boolean.TRUE); + return new KeySetView + (new ConcurrentHashMap(), Boolean.TRUE); } /** @@ -2179,8 +2220,8 @@ public static KeySetView newKeySet() { * @since 1.8 */ public static KeySetView newKeySet(int initialCapacity) { - return new KeySetView<> - (new ConcurrentHashMap<>(initialCapacity), Boolean.TRUE); + return new KeySetView + (new ConcurrentHashMap(initialCapacity), Boolean.TRUE); } /** @@ -2197,7 +2238,7 @@ public static KeySetView newKeySet(int initialCapacity) { public KeySetView keySet(V mappedValue) { if (mappedValue == null) throw new NullPointerException(); - return new KeySetView<>(this, mappedValue); + return new KeySetView(this, mappedValue); } /* ---------------- Special Nodes -------------- */ @@ -2208,7 +2249,7 @@ public KeySetView keySet(V mappedValue) { static final class ForwardingNode extends Node { final Node[] nextTable; ForwardingNode(Node[] tab) { - super(MOVED, null, null, null); + super(MOVED, null, null); this.nextTable = tab; } @@ -2240,11 +2281,11 @@ Node find(int h, Object k) { } /** - * A place-holder node used in computeIfAbsent and compute + * A place-holder node used in computeIfAbsent and compute. */ static final class ReservationNode extends Node { ReservationNode() { - super(RESERVED, null, null, null); + super(RESERVED, null, null); } Node find(int h, Object k) { @@ -2299,15 +2340,15 @@ else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) { * @param check if <0, don't check resize, if <= 1 only check if uncontended */ private final void addCount(long x, int check) { - CounterCell[] as; long b, s; - if ((as = counterCells) != null || + CounterCell[] cs; long b, s; + if ((cs = counterCells) != null || !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) { - CounterCell a; long v; int m; + CounterCell c; long v; int m; boolean uncontended = true; - if (as == null || (m = as.length - 1) < 0 || - (a = as[ThreadLocalRandomUtil.getProbe() & m]) == null || + if (cs == null || (m = cs.length - 1) < 0 || + (c = cs[ThreadLocalRandomUtil.getProbe() & m]) == null || !(uncontended = - U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) { + U.compareAndSwapLong(c, CELLVALUE, v = c.value, v + x))) { fullAddCount(x, uncontended); return; } @@ -2389,17 +2430,8 @@ else if (c <= sc || n >= MAXIMUM_CAPACITY) break; else if (tab == table) { int rs = resizeStamp(n); - if (sc < 0) { - Node[] nt; - if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 || - sc == rs + MAX_RESIZERS || (nt = nextTable) == null || - transferIndex <= 0) - break; - if (U.compareAndSwapInt(this, SIZECTL, sc, sc + 1)) - transfer(tab, nt); - } - else if (U.compareAndSwapInt(this, SIZECTL, sc, - (rs << RESIZE_STAMP_SHIFT) + 2)) + if (U.compareAndSwapInt(this, SIZECTL, sc, + (rs << RESIZE_STAMP_SHIFT) + 2)) transfer(tab, null); } } @@ -2426,7 +2458,7 @@ private final void transfer(Node[] tab, Node[] nextTab) { transferIndex = n; } int nextn = nextTab.length; - ForwardingNode fwd = new ForwardingNode<>(nextTab); + ForwardingNode fwd = new ForwardingNode(nextTab); boolean advance = true; boolean finishing = false; // to ensure sweep before committing nextTab for (int i = 0, bound = 0;;) { @@ -2492,9 +2524,9 @@ else if ((fh = f.hash) == MOVED) for (Node p = f; p != lastRun; p = p.next) { int ph = p.hash; K pk = p.key; V pv = p.val; if ((ph & n) == 0) - ln = new Node<>(ph, pk, pv, ln); + ln = new Node(ph, pk, pv, ln); else - hn = new Node<>(ph, pk, pv, hn); + hn = new Node(ph, pk, pv, hn); } setTabAt(nextTab, i, ln); setTabAt(nextTab, i + n, hn); @@ -2508,8 +2540,8 @@ else if (f instanceof TreeBin) { int lc = 0, hc = 0; for (Node e = t.first; e != null; e = e.next) { int h = e.hash; - TreeNode p = new TreeNode<> - (h, e.key, e.val, null, null); + TreeNode p = new TreeNode + (h, e.key, e.val, null, null); if ((h & n) == 0) { if ((p.prev = loTail) == null) lo = p; @@ -2528,9 +2560,9 @@ else if (f instanceof TreeBin) { } } ln = (lc <= UNTREEIFY_THRESHOLD) ? untreeify(lo) : - (hc != 0) ? new TreeBin<>(lo) : t; + (hc != 0) ? new TreeBin(lo) : t; hn = (hc <= UNTREEIFY_THRESHOLD) ? untreeify(hi) : - (lc != 0) ? new TreeBin<>(hi) : t; + (lc != 0) ? new TreeBin(hi) : t; setTabAt(nextTab, i, ln); setTabAt(nextTab, i + n, hn); setTabAt(tab, i, fwd); @@ -2548,19 +2580,19 @@ else if (f instanceof TreeBin) { * A padded cell for distributing counts. Adapted from LongAdder * and Striped64. See their internal docs for explanation. */ - static final class CounterCell { + @sun.misc.Contended + static final class CounterCell { volatile long value; CounterCell(long x) { value = x; } } final long sumCount() { - CounterCell[] as = counterCells; CounterCell a; + CounterCell[] cs = counterCells; long sum = baseCount; - if (as != null) { - for (int i = 0; i < as.length; ++i) { - if ((a = as[i]) != null) - sum += a.value; - } + if (cs != null) { + for (CounterCell c : cs) + if (c != null) + sum += c.value; } return sum; } @@ -2569,15 +2601,15 @@ final long sumCount() { private final void fullAddCount(long x, boolean wasUncontended) { int h; if ((h = ThreadLocalRandomUtil.getProbe()) == 0) { - ThreadLocalRandomUtil.localInit(); // force initialization + ThreadLocalRandomUtil.localInit(); // force initialization h = ThreadLocalRandomUtil.getProbe(); wasUncontended = true; } boolean collide = false; // True if last slot nonempty for (;;) { - CounterCell[] as; CounterCell a; int n; long v; - if ((as = counterCells) != null && (n = as.length) > 0) { - if ((a = as[(n - 1) & h]) == null) { + CounterCell[] cs; CounterCell c; int n; long v; + if ((cs = counterCells) != null && (n = cs.length) > 0) { + if ((c = cs[(n - 1) & h]) == null) { if (cellsBusy == 0) { // Try to attach new Cell CounterCell r = new CounterCell(x); // Optimistic create if (cellsBusy == 0 && @@ -2603,21 +2635,17 @@ private final void fullAddCount(long x, boolean wasUncontended) { } else if (!wasUncontended) // CAS already known to fail wasUncontended = true; // Continue after rehash - else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x)) + else if (U.compareAndSwapLong(c, CELLVALUE, v = c.value, v + x)) break; - else if (counterCells != as || n >= NCPU) + else if (counterCells != cs || n >= NCPU) collide = false; // At max size or stale else if (!collide) collide = true; else if (cellsBusy == 0 && U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) { try { - if (counterCells == as) {// Expand table unless stale - CounterCell[] rs = new CounterCell[n << 1]; - for (int i = 0; i < n; ++i) - rs[i] = as[i]; - counterCells = rs; - } + if (counterCells == cs) // Expand table unless stale + counterCells = Arrays.copyOf(cs, n << 1); } finally { cellsBusy = 0; } @@ -2626,11 +2654,11 @@ else if (cellsBusy == 0 && } h = ThreadLocalRandomUtil.advanceProbe(h); } - else if (cellsBusy == 0 && counterCells == as && + else if (cellsBusy == 0 && counterCells == cs && U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) { boolean init = false; try { // Initialize table - if (counterCells == as) { + if (counterCells == cs) { CounterCell[] rs = new CounterCell[2]; rs[h & 1] = new CounterCell(x); counterCells = rs; @@ -2664,15 +2692,15 @@ else if ((b = tabAt(tab, index)) != null && b.hash >= 0) { TreeNode hd = null, tl = null; for (Node e = b; e != null; e = e.next) { TreeNode p = - new TreeNode<>(e.hash, e.key, e.val, - null, null); + new TreeNode(e.hash, e.key, e.val, + null, null); if ((p.prev = tl) == null) hd = p; else tl.next = p; tl = p; } - setTabAt(tab, index, new TreeBin<>(hd)); + setTabAt(tab, index, new TreeBin(hd)); } } } @@ -2680,12 +2708,12 @@ else if ((b = tabAt(tab, index)) != null && b.hash >= 0) { } /** - * Returns a list on non-TreeNodes replacing those in given list. + * Returns a list of non-TreeNodes replacing those in given list. */ static Node untreeify(Node b) { Node hd = null, tl = null; for (Node q = b; q != null; q = q.next) { - Node p = new Node<>(q.hash, q.key, q.val, null); + Node p = new Node(q.hash, q.key, q.val); if (tl == null) hd = p; else @@ -2695,35 +2723,10 @@ static Node untreeify(Node b) { return hd; } - private static int nodesAt(Node b, Collection> nodes) { - if (b instanceof TreeBin) { - return treeNodesAt(((TreeBin)b).root, nodes); - } else { - int count = 0; - for (Node q = b; q != null; q = q.next) { - nodes.add(new AbstractMap.SimpleImmutableEntry<>(q.key, q.val)); - count++; - } - return count; - } - } - - private static int treeNodesAt(TreeNode root, Collection> nodes) { - if (root == null) { - return 0; - } - - int count = 1; - nodes.add(new AbstractMap.SimpleImmutableEntry(root.key, root.val)); - count += treeNodesAt(root.left, nodes); - count += treeNodesAt(root.right, nodes); - return count; - } - /* ---------------- TreeNodes -------------- */ /** - * Nodes for use in TreeBins + * Nodes for use in TreeBins. */ static final class TreeNode extends Node { TreeNode parent; // red-black tree links @@ -2816,7 +2819,7 @@ static int tieBreakOrder(Object a, Object b) { * Creates bin with initial set of nodes headed by b. */ TreeBin(TreeNode b) { - super(TREEBIN, null, null, null); + super(TREEBIN, null, null); this.first = b; TreeNode r = null; for (TreeNode x = b, next; x != null; x = next) { @@ -2842,7 +2845,7 @@ else if ((kc == null && (kc = comparableClassFor(k)) == null) || (dir = compareComparables(kc, k, pk)) == 0) dir = tieBreakOrder(k, pk); - TreeNode xp = p; + TreeNode xp = p; if ((p = (dir <= 0) ? p.left : p.right) == null) { x.parent = xp; if (dir <= 0) @@ -2942,7 +2945,7 @@ final TreeNode putTreeVal(int h, K k, V v) { for (TreeNode p = root;;) { int dir, ph; K pk; if (p == null) { - first = root = new TreeNode<>(h, k, v, null, null); + first = root = new TreeNode(h, k, v, null, null); break; } else if ((ph = p.hash) > h) @@ -2969,7 +2972,7 @@ else if ((kc == null && TreeNode xp = p; if ((p = (dir <= 0) ? p.left : p.right) == null) { TreeNode x, f = first; - first = x = new TreeNode<>(h, k, v, f, xp); + first = x = new TreeNode(h, k, v, f, xp); if (f != null) f.prev = x; if (dir <= 0) @@ -3286,7 +3289,7 @@ else if ((xpl = xp.left) == x) { } /** - * Recursive invariant check + * Checks invariants recursively for the tree of Nodes rooted at t. */ static boolean checkInvariants(TreeNode t) { TreeNode tp = t.parent, tl = t.left, tr = t.right, @@ -3314,11 +3317,10 @@ static boolean checkInvariants(TreeNode t) { private static final long LOCKSTATE; static { try { - Class k = TreeBin.class; LOCKSTATE = U.objectFieldOffset - (k.getDeclaredField("lockState")); - } catch (Exception e) { - throw new Error(e); + (TreeBin.class.getDeclaredField("lockState")); + } catch (ReflectiveOperationException e) { + throw new ExceptionInInitializerError(e); } } } @@ -3416,7 +3418,7 @@ private void pushState(Node[] t, int i, int n) { if (s != null) spare = s.next; else - s = new TableStack<>(); + s = new TableStack(); s.tab = t; s.length = n; s.index = i; @@ -3474,9 +3476,9 @@ public final void remove() { static final class KeyIterator extends BaseIterator implements Iterator, Enumeration { - KeyIterator(Node[] tab, int index, int size, int limit, + KeyIterator(Node[] tab, int size, int index, int limit, ConcurrentHashMap map) { - super(tab, index, size, limit, map); + super(tab, size, index, limit, map); } public final K next() { @@ -3494,9 +3496,9 @@ public final K next() { static final class ValueIterator extends BaseIterator implements Iterator, Enumeration { - ValueIterator(Node[] tab, int index, int size, int limit, + ValueIterator(Node[] tab, int size, int index, int limit, ConcurrentHashMap map) { - super(tab, index, size, limit, map); + super(tab, size, index, limit, map); } public final V next() { @@ -3514,9 +3516,9 @@ public final V next() { static final class EntryIterator extends BaseIterator implements Iterator> { - EntryIterator(Node[] tab, int index, int size, int limit, + EntryIterator(Node[] tab, int size, int index, int limit, ConcurrentHashMap map) { - super(tab, index, size, limit, map); + super(tab, size, index, limit, map); } public final Map.Entry next() { @@ -3527,12 +3529,12 @@ public final Map.Entry next() { V v = p.val; lastReturned = p; advance(); - return new MapEntry<>(k, v, map); + return new MapEntry(k, v, map); } } /** - * Exported Entry for EntryIterator + * Exported Entry for EntryIterator. */ static final class MapEntry implements Map.Entry { final K key; // non-null @@ -3546,7 +3548,9 @@ static final class MapEntry implements Map.Entry { public K getKey() { return key; } public V getValue() { return val; } public int hashCode() { return key.hashCode() ^ val.hashCode(); } - public String toString() { return key + "=" + val; } + public String toString() { + return ThreadLocalRandomUtil.mapEntryToString(key, val); // EHCACHE SPECIFIC: Code taken from java.util.concurrent.Helpers + } public boolean equals(Object o) { Object k, v; Map.Entry e; @@ -3583,11 +3587,11 @@ static final class KeySpliterator extends Traverser this.est = est; } - public Spliterator trySplit() { + public KeySpliterator trySplit() { int i, f, h; return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null : - new KeySpliterator<>(tab, baseSize, baseLimit = h, - f, est >>>= 1); + new KeySpliterator(tab, baseSize, baseLimit = h, + f, est >>>= 1); } public void forEachRemaining(Consumer action) { @@ -3622,11 +3626,11 @@ static final class ValueSpliterator extends Traverser this.est = est; } - public Spliterator trySplit() { + public ValueSpliterator trySplit() { int i, f, h; return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null : - new ValueSpliterator<>(tab, baseSize, baseLimit = h, - f, est >>>= 1); + new ValueSpliterator(tab, baseSize, baseLimit = h, + f, est >>>= 1); } public void forEachRemaining(Consumer action) { @@ -3662,17 +3666,17 @@ static final class EntrySpliterator extends Traverser this.est = est; } - public Spliterator> trySplit() { + public EntrySpliterator trySplit() { int i, f, h; return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null : - new EntrySpliterator<>(tab, baseSize, baseLimit = h, - f, est >>>= 1, map); + new EntrySpliterator(tab, baseSize, baseLimit = h, + f, est >>>= 1, map); } public void forEachRemaining(Consumer> action) { if (action == null) throw new NullPointerException(); for (Node p; (p = advance()) != null; ) - action.accept(new MapEntry<>(p.key, p.val, map)); + action.accept(new MapEntry(p.key, p.val, map)); } public boolean tryAdvance(Consumer> action) { @@ -3680,7 +3684,7 @@ public boolean tryAdvance(Consumer> action) { Node p; if ((p = advance()) == null) return false; - action.accept(new MapEntry<>(p.key, p.val, map)); + action.accept(new MapEntry(p.key, p.val, map)); return true; } @@ -3721,9 +3725,9 @@ final int batchFor(long b) { public void forEach(long parallelismThreshold, BiConsumer action) { if (action == null) throw new NullPointerException(); - new ForEachMappingTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - action).invoke(); + new ForEachMappingTask + (null, batchFor(parallelismThreshold), 0, 0, table, + action).invoke(); } /** @@ -3768,9 +3772,9 @@ public void forEach(long parallelismThreshold, public U search(long parallelismThreshold, BiFunction searchFunction) { if (searchFunction == null) throw new NullPointerException(); - return new SearchMappingsTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - searchFunction, new AtomicReference<>()).invoke(); + return new SearchMappingsTask + (null, batchFor(parallelismThreshold), 0, 0, table, + searchFunction, new AtomicReference()).invoke(); } /** @@ -3794,9 +3798,9 @@ public U reduce(long parallelismThreshold, BiFunction reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceMappingsTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, reducer).invoke(); + return new MapReduceMappingsTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, reducer).invoke(); } /** @@ -3820,9 +3824,9 @@ public double reduceToDouble(long parallelismThreshold, DoubleBinaryOperator reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceMappingsToDoubleTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, basis, reducer).invoke(); + return new MapReduceMappingsToDoubleTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, basis, reducer).invoke(); } /** @@ -3846,9 +3850,9 @@ public long reduceToLong(long parallelismThreshold, LongBinaryOperator reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceMappingsToLongTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, basis, reducer).invoke(); + return new MapReduceMappingsToLongTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, basis, reducer).invoke(); } /** @@ -3872,9 +3876,9 @@ public int reduceToInt(long parallelismThreshold, IntBinaryOperator reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceMappingsToIntTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, basis, reducer).invoke(); + return new MapReduceMappingsToIntTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, basis, reducer).invoke(); } /** @@ -3888,9 +3892,9 @@ public int reduceToInt(long parallelismThreshold, public void forEachKey(long parallelismThreshold, Consumer action) { if (action == null) throw new NullPointerException(); - new ForEachKeyTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - action).invoke(); + new ForEachKeyTask + (null, batchFor(parallelismThreshold), 0, 0, table, + action).invoke(); } /** @@ -3935,9 +3939,9 @@ public void forEachKey(long parallelismThreshold, public U searchKeys(long parallelismThreshold, Function searchFunction) { if (searchFunction == null) throw new NullPointerException(); - return new SearchKeysTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - searchFunction, new AtomicReference<>()).invoke(); + return new SearchKeysTask + (null, batchFor(parallelismThreshold), 0, 0, table, + searchFunction, new AtomicReference()).invoke(); } /** @@ -3954,9 +3958,9 @@ public U searchKeys(long parallelismThreshold, public K reduceKeys(long parallelismThreshold, BiFunction reducer) { if (reducer == null) throw new NullPointerException(); - return new ReduceKeysTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, reducer).invoke(); + return new ReduceKeysTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, reducer).invoke(); } /** @@ -3980,9 +3984,9 @@ public U reduceKeys(long parallelismThreshold, BiFunction reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceKeysTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, reducer).invoke(); + return new MapReduceKeysTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, reducer).invoke(); } /** @@ -4006,9 +4010,9 @@ public double reduceKeysToDouble(long parallelismThreshold, DoubleBinaryOperator reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceKeysToDoubleTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, basis, reducer).invoke(); + return new MapReduceKeysToDoubleTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, basis, reducer).invoke(); } /** @@ -4032,9 +4036,9 @@ public long reduceKeysToLong(long parallelismThreshold, LongBinaryOperator reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceKeysToLongTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, basis, reducer).invoke(); + return new MapReduceKeysToLongTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, basis, reducer).invoke(); } /** @@ -4058,9 +4062,9 @@ public int reduceKeysToInt(long parallelismThreshold, IntBinaryOperator reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceKeysToIntTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, basis, reducer).invoke(); + return new MapReduceKeysToIntTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, basis, reducer).invoke(); } /** @@ -4075,9 +4079,9 @@ public void forEachValue(long parallelismThreshold, Consumer action) { if (action == null) throw new NullPointerException(); - new ForEachValueTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - action).invoke(); + new ForEachValueTask + (null, batchFor(parallelismThreshold), 0, 0, table, + action).invoke(); } /** @@ -4122,9 +4126,9 @@ public void forEachValue(long parallelismThreshold, public U searchValues(long parallelismThreshold, Function searchFunction) { if (searchFunction == null) throw new NullPointerException(); - return new SearchValuesTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - searchFunction, new AtomicReference<>()).invoke(); + return new SearchValuesTask + (null, batchFor(parallelismThreshold), 0, 0, table, + searchFunction, new AtomicReference()).invoke(); } /** @@ -4140,9 +4144,9 @@ public U searchValues(long parallelismThreshold, public V reduceValues(long parallelismThreshold, BiFunction reducer) { if (reducer == null) throw new NullPointerException(); - return new ReduceValuesTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, reducer).invoke(); + return new ReduceValuesTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, reducer).invoke(); } /** @@ -4166,9 +4170,9 @@ public U reduceValues(long parallelismThreshold, BiFunction reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceValuesTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, reducer).invoke(); + return new MapReduceValuesTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, reducer).invoke(); } /** @@ -4192,9 +4196,9 @@ public double reduceValuesToDouble(long parallelismThreshold, DoubleBinaryOperator reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceValuesToDoubleTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, basis, reducer).invoke(); + return new MapReduceValuesToDoubleTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, basis, reducer).invoke(); } /** @@ -4218,9 +4222,9 @@ public long reduceValuesToLong(long parallelismThreshold, LongBinaryOperator reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceValuesToLongTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, basis, reducer).invoke(); + return new MapReduceValuesToLongTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, basis, reducer).invoke(); } /** @@ -4244,9 +4248,9 @@ public int reduceValuesToInt(long parallelismThreshold, IntBinaryOperator reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceValuesToIntTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, basis, reducer).invoke(); + return new MapReduceValuesToIntTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, basis, reducer).invoke(); } /** @@ -4260,8 +4264,8 @@ public int reduceValuesToInt(long parallelismThreshold, public void forEachEntry(long parallelismThreshold, Consumer> action) { if (action == null) throw new NullPointerException(); - new ForEachEntryTask<>(null, batchFor(parallelismThreshold), 0, 0, table, - action).invoke(); + new ForEachEntryTask(null, batchFor(parallelismThreshold), 0, 0, table, + action).invoke(); } /** @@ -4306,9 +4310,9 @@ public void forEachEntry(long parallelismThreshold, public U searchEntries(long parallelismThreshold, Function, ? extends U> searchFunction) { if (searchFunction == null) throw new NullPointerException(); - return new SearchEntriesTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - searchFunction, new AtomicReference<>()).invoke(); + return new SearchEntriesTask + (null, batchFor(parallelismThreshold), 0, 0, table, + searchFunction, new AtomicReference()).invoke(); } /** @@ -4324,9 +4328,9 @@ public U searchEntries(long parallelismThreshold, public Map.Entry reduceEntries(long parallelismThreshold, BiFunction, Map.Entry, ? extends Map.Entry> reducer) { if (reducer == null) throw new NullPointerException(); - return new ReduceEntriesTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, reducer).invoke(); + return new ReduceEntriesTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, reducer).invoke(); } /** @@ -4350,9 +4354,9 @@ public U reduceEntries(long parallelismThreshold, BiFunction reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceEntriesTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, reducer).invoke(); + return new MapReduceEntriesTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, reducer).invoke(); } /** @@ -4376,9 +4380,9 @@ public double reduceEntriesToDouble(long parallelismThreshold, DoubleBinaryOperator reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceEntriesToDoubleTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, basis, reducer).invoke(); + return new MapReduceEntriesToDoubleTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, basis, reducer).invoke(); } /** @@ -4402,9 +4406,9 @@ public long reduceEntriesToLong(long parallelismThreshold, LongBinaryOperator reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceEntriesToLongTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, basis, reducer).invoke(); + return new MapReduceEntriesToLongTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, basis, reducer).invoke(); } /** @@ -4428,9 +4432,9 @@ public int reduceEntriesToInt(long parallelismThreshold, IntBinaryOperator reducer) { if (transformer == null || reducer == null) throw new NullPointerException(); - return new MapReduceEntriesToIntTask<> - (null, batchFor(parallelismThreshold), 0, 0, table, - null, transformer, basis, reducer).invoke(); + return new MapReduceEntriesToIntTask + (null, batchFor(parallelismThreshold), 0, 0, table, + null, transformer, basis, reducer).invoke(); } @@ -4474,19 +4478,19 @@ abstract static class CollectionView public abstract boolean contains(Object o); public abstract boolean remove(Object o); - private static final String oomeMsg = "Required array size too large"; + private static final String OOME_MSG = "Required array size too large"; public final Object[] toArray() { long sz = map.mappingCount(); if (sz > MAX_ARRAY_SIZE) - throw new OutOfMemoryError(oomeMsg); + throw new OutOfMemoryError(OOME_MSG); int n = (int)sz; Object[] r = new Object[n]; int i = 0; for (E e : this) { if (i == n) { if (n >= MAX_ARRAY_SIZE) - throw new OutOfMemoryError(oomeMsg); + throw new OutOfMemoryError(OOME_MSG); if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1) n = MAX_ARRAY_SIZE; else @@ -4502,7 +4506,7 @@ public final Object[] toArray() { public final T[] toArray(T[] a) { long sz = map.mappingCount(); if (sz > MAX_ARRAY_SIZE) - throw new OutOfMemoryError(oomeMsg); + throw new OutOfMemoryError(OOME_MSG); int m = (int)sz; T[] r = (a.length >= m) ? a : (T[])java.lang.reflect.Array @@ -4512,7 +4516,7 @@ public final T[] toArray(T[] a) { for (E e : this) { if (i == n) { if (n >= MAX_ARRAY_SIZE) - throw new OutOfMemoryError(oomeMsg); + throw new OutOfMemoryError(OOME_MSG); if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1) n = MAX_ARRAY_SIZE; else @@ -4565,14 +4569,24 @@ public final boolean containsAll(Collection c) { return true; } - public final boolean removeAll(Collection c) { + public boolean removeAll(Collection c) { if (c == null) throw new NullPointerException(); boolean modified = false; - for (Iterator it = iterator(); it.hasNext();) { - if (c.contains(it.next())) { - it.remove(); - modified = true; + // Use (c instanceof Set) as a hint that lookup in c is as + // efficient as this view + Node[] t; + if ((t = map.table) == null) { + return false; + } else if (c instanceof Set && c.size() > t.length) { + for (Iterator it = iterator(); it.hasNext(); ) { + if (c.contains(it.next())) { + it.remove(); + modified = true; + } } + } else { + for (Object e : c) + modified |= remove(e); } return modified; } @@ -4644,7 +4658,7 @@ public Iterator iterator() { Node[] t; ConcurrentHashMap m = map; int f = (t = m.table) == null ? 0 : t.length; - return new KeyIterator<>(t, f, 0, f, m); + return new KeyIterator(t, f, 0, f, m); } /** @@ -4701,19 +4715,19 @@ public boolean equals(Object o) { (containsAll(c) && c.containsAll(this)))); } - public Spliterator _spliterator() { + public Spliterator spliterator() { Node[] t; ConcurrentHashMap m = map; long n = m.sumCount(); int f = (t = m.table) == null ? 0 : t.length; - return new KeySpliterator<>(t, f, 0, f, n < 0L ? 0L : n); + return new KeySpliterator(t, f, 0, f, n < 0L ? 0L : n); } public void forEach(Consumer action) { if (action == null) throw new NullPointerException(); Node[] t; if ((t = map.table) != null) { - Traverser it = new Traverser<>(t, t.length, 0, t.length); + Traverser it = new Traverser(t, t.length, 0, t.length); for (Node p; (p = it.advance()) != null; ) action.accept(p.key); } @@ -4749,7 +4763,7 @@ public final Iterator iterator() { ConcurrentHashMap m = map; Node[] t; int f = (t = m.table) == null ? 0 : t.length; - return new ValueIterator<>(t, f, 0, f, m); + return new ValueIterator(t, f, 0, f, m); } public final boolean add(V e) { @@ -4759,19 +4773,35 @@ public final boolean addAll(Collection c) { throw new UnsupportedOperationException(); } - public Spliterator _spliterator() { + @Override public boolean removeAll(Collection c) { + if (c == null) throw new NullPointerException(); + boolean modified = false; + for (Iterator it = iterator(); it.hasNext();) { + if (c.contains(it.next())) { + it.remove(); + modified = true; + } + } + return modified; + } + + public boolean removeIf(Predicate filter) { + return map.removeValueIf(filter); + } + + public Spliterator spliterator() { Node[] t; ConcurrentHashMap m = map; long n = m.sumCount(); int f = (t = m.table) == null ? 0 : t.length; - return new ValueSpliterator<>(t, f, 0, f, n < 0L ? 0L : n); + return new ValueSpliterator(t, f, 0, f, n < 0L ? 0L : n); } public void forEach(Consumer action) { if (action == null) throw new NullPointerException(); Node[] t; if ((t = map.table) != null) { - Traverser it = new Traverser<>(t, t.length, 0, t.length); + Traverser it = new Traverser(t, t.length, 0, t.length); for (Node p; (p = it.advance()) != null; ) action.accept(p.val); } @@ -4812,7 +4842,7 @@ public Iterator> iterator() { ConcurrentHashMap m = map; Node[] t; int f = (t = m.table) == null ? 0 : t.length; - return new EntryIterator<>(t, f, 0, f, m); + return new EntryIterator(t, f, 0, f, m); } public boolean add(Entry e) { @@ -4828,11 +4858,15 @@ public boolean addAll(Collection> c) { return added; } + public boolean removeIf(Predicate> filter) { + return map.removeEntryIf(filter); + } + public final int hashCode() { int h = 0; Node[] t; if ((t = map.table) != null) { - Traverser it = new Traverser<>(t, t.length, 0, t.length); + Traverser it = new Traverser(t, t.length, 0, t.length); for (Node p; (p = it.advance()) != null; ) { h += p.hashCode(); } @@ -4847,21 +4881,21 @@ public final boolean equals(Object o) { (containsAll(c) && c.containsAll(this)))); } - public Spliterator> spliterator() { + public Spliterator> spliterator() { Node[] t; ConcurrentHashMap m = map; long n = m.sumCount(); int f = (t = m.table) == null ? 0 : t.length; - return new EntrySpliterator<>(t, f, 0, f, n < 0L ? 0L : n, m); + return new EntrySpliterator(t, f, 0, f, n < 0L ? 0L : n, m); } public void forEach(Consumer> action) { if (action == null) throw new NullPointerException(); Node[] t; if ((t = map.table) != null) { - Traverser it = new Traverser<>(t, t.length, 0, t.length); + Traverser it = new Traverser(t, t.length, 0, t.length); for (Node p; (p = it.advance()) != null; ) - action.accept(new MapEntry<>(p.key, p.val, map)); + action.accept(new MapEntry(p.key, p.val, map)); } } @@ -4899,7 +4933,7 @@ else if (par == null) } /** - * Same as Traverser version + * Same as Traverser version. */ final Node advance() { Node e; @@ -4936,7 +4970,7 @@ private void pushState(Node[] t, int i, int n) { if (s != null) spare = s.next; else - s = new TableStack<>(); + s = new TableStack(); s.tab = t; s.length = n; s.index = i; @@ -4984,9 +5018,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - new ForEachKeyTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - action).fork(); + new ForEachKeyTask + (this, batch >>>= 1, baseLimit = h, f, tab, + action).fork(); } for (Node p; (p = advance()) != null;) action.accept(p.key); @@ -5011,9 +5045,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - new ForEachValueTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - action).fork(); + new ForEachValueTask + (this, batch >>>= 1, baseLimit = h, f, tab, + action).fork(); } for (Node p; (p = advance()) != null;) action.accept(p.val); @@ -5038,9 +5072,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - new ForEachEntryTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - action).fork(); + new ForEachEntryTask + (this, batch >>>= 1, baseLimit = h, f, tab, + action).fork(); } for (Node p; (p = advance()) != null; ) action.accept(p); @@ -5065,9 +5099,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - new ForEachMappingTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - action).fork(); + new ForEachMappingTask + (this, batch >>>= 1, baseLimit = h, f, tab, + action).fork(); } for (Node p; (p = advance()) != null; ) action.accept(p.key, p.val); @@ -5232,9 +5266,9 @@ public final void compute() { if (result.get() != null) return; addToPendingCount(1); - new SearchKeysTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - searchFunction, result).fork(); + new SearchKeysTask + (this, batch >>>= 1, baseLimit = h, f, tab, + searchFunction, result).fork(); } while (result.get() == null) { U u; @@ -5276,9 +5310,9 @@ public final void compute() { if (result.get() != null) return; addToPendingCount(1); - new SearchValuesTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - searchFunction, result).fork(); + new SearchValuesTask + (this, batch >>>= 1, baseLimit = h, f, tab, + searchFunction, result).fork(); } while (result.get() == null) { U u; @@ -5320,9 +5354,9 @@ public final void compute() { if (result.get() != null) return; addToPendingCount(1); - new SearchEntriesTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - searchFunction, result).fork(); + new SearchEntriesTask + (this, batch >>>= 1, baseLimit = h, f, tab, + searchFunction, result).fork(); } while (result.get() == null) { U u; @@ -5364,9 +5398,9 @@ public final void compute() { if (result.get() != null) return; addToPendingCount(1); - new SearchMappingsTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - searchFunction, result).fork(); + new SearchMappingsTask + (this, batch >>>= 1, baseLimit = h, f, tab, + searchFunction, result).fork(); } while (result.get() == null) { U u; @@ -5405,9 +5439,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new ReduceKeysTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, reducer)).fork(); + (rights = new ReduceKeysTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, reducer)).fork(); } K r = null; for (Node p; (p = advance()) != null; ) { @@ -5453,9 +5487,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new ReduceValuesTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, reducer)).fork(); + (rights = new ReduceValuesTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, reducer)).fork(); } V r = null; for (Node p; (p = advance()) != null; ) { @@ -5501,9 +5535,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new ReduceEntriesTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, reducer)).fork(); + (rights = new ReduceEntriesTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, reducer)).fork(); } Map.Entry r = null; for (Node p; (p = advance()) != null; ) @@ -5552,9 +5586,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceKeysTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, reducer)).fork(); + (rights = new MapReduceKeysTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, reducer)).fork(); } U r = null; for (Node p; (p = advance()) != null; ) { @@ -5606,9 +5640,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceValuesTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, reducer)).fork(); + (rights = new MapReduceValuesTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, reducer)).fork(); } U r = null; for (Node p; (p = advance()) != null; ) { @@ -5660,9 +5694,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceEntriesTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, reducer)).fork(); + (rights = new MapReduceEntriesTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, reducer)).fork(); } U r = null; for (Node p; (p = advance()) != null; ) { @@ -5714,9 +5748,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceMappingsTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, reducer)).fork(); + (rights = new MapReduceMappingsTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, reducer)).fork(); } U r = null; for (Node p; (p = advance()) != null; ) { @@ -5771,9 +5805,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceKeysToDoubleTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, r, reducer)).fork(); + (rights = new MapReduceKeysToDoubleTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, r, reducer)).fork(); } for (Node p; (p = advance()) != null; ) r = reducer.applyAsDouble(r, transformer.applyAsDouble(p.key)); @@ -5821,9 +5855,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceValuesToDoubleTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, r, reducer)).fork(); + (rights = new MapReduceValuesToDoubleTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, r, reducer)).fork(); } for (Node p; (p = advance()) != null; ) r = reducer.applyAsDouble(r, transformer.applyAsDouble(p.val)); @@ -5871,9 +5905,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceEntriesToDoubleTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, r, reducer)).fork(); + (rights = new MapReduceEntriesToDoubleTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, r, reducer)).fork(); } for (Node p; (p = advance()) != null; ) r = reducer.applyAsDouble(r, transformer.applyAsDouble(p)); @@ -5921,9 +5955,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceMappingsToDoubleTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, r, reducer)).fork(); + (rights = new MapReduceMappingsToDoubleTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, r, reducer)).fork(); } for (Node p; (p = advance()) != null; ) r = reducer.applyAsDouble(r, transformer.applyAsDouble(p.key, p.val)); @@ -5971,9 +6005,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceKeysToLongTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, r, reducer)).fork(); + (rights = new MapReduceKeysToLongTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, r, reducer)).fork(); } for (Node p; (p = advance()) != null; ) r = reducer.applyAsLong(r, transformer.applyAsLong(p.key)); @@ -6021,9 +6055,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceValuesToLongTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, r, reducer)).fork(); + (rights = new MapReduceValuesToLongTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, r, reducer)).fork(); } for (Node p; (p = advance()) != null; ) r = reducer.applyAsLong(r, transformer.applyAsLong(p.val)); @@ -6071,9 +6105,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceEntriesToLongTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, r, reducer)).fork(); + (rights = new MapReduceEntriesToLongTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, r, reducer)).fork(); } for (Node p; (p = advance()) != null; ) r = reducer.applyAsLong(r, transformer.applyAsLong(p)); @@ -6121,9 +6155,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceMappingsToLongTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, r, reducer)).fork(); + (rights = new MapReduceMappingsToLongTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, r, reducer)).fork(); } for (Node p; (p = advance()) != null; ) r = reducer.applyAsLong(r, transformer.applyAsLong(p.key, p.val)); @@ -6171,9 +6205,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceKeysToIntTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, r, reducer)).fork(); + (rights = new MapReduceKeysToIntTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, r, reducer)).fork(); } for (Node p; (p = advance()) != null; ) r = reducer.applyAsInt(r, transformer.applyAsInt(p.key)); @@ -6221,9 +6255,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceValuesToIntTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, r, reducer)).fork(); + (rights = new MapReduceValuesToIntTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, r, reducer)).fork(); } for (Node p; (p = advance()) != null; ) r = reducer.applyAsInt(r, transformer.applyAsInt(p.val)); @@ -6271,9 +6305,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceEntriesToIntTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, r, reducer)).fork(); + (rights = new MapReduceEntriesToIntTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, r, reducer)).fork(); } for (Node p; (p = advance()) != null; ) r = reducer.applyAsInt(r, transformer.applyAsInt(p)); @@ -6321,9 +6355,9 @@ public final void compute() { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); - (rights = new MapReduceMappingsToIntTask<> - (this, batch >>>= 1, baseLimit = h, f, tab, - rights, transformer, r, reducer)).fork(); + (rights = new MapReduceMappingsToIntTask + (this, batch >>>= 1, baseLimit = h, f, tab, + rights, transformer, r, reducer)).fork(); } for (Node p; (p = advance()) != null; ) r = reducer.applyAsInt(r, transformer.applyAsInt(p.key, p.val)); @@ -6350,38 +6384,103 @@ public final void compute() { private static final long BASECOUNT; private static final long CELLSBUSY; private static final long CELLVALUE; - private static final long ABASE; + private static final int ABASE; private static final int ASHIFT; static { try { - Class k = ConcurrentHashMap.class; SIZECTL = U.objectFieldOffset - (k.getDeclaredField("sizeCtl")); + (ConcurrentHashMap.class.getDeclaredField("sizeCtl")); TRANSFERINDEX = U.objectFieldOffset - (k.getDeclaredField("transferIndex")); + (ConcurrentHashMap.class.getDeclaredField("transferIndex")); BASECOUNT = U.objectFieldOffset - (k.getDeclaredField("baseCount")); + (ConcurrentHashMap.class.getDeclaredField("baseCount")); CELLSBUSY = U.objectFieldOffset - (k.getDeclaredField("cellsBusy")); - Class ck = CounterCell.class; + (ConcurrentHashMap.class.getDeclaredField("cellsBusy")); + CELLVALUE = U.objectFieldOffset - (ck.getDeclaredField("value")); - Class ak = Node[].class; - ABASE = U.arrayBaseOffset(ak); - int scale = U.arrayIndexScale(ak); + (CounterCell.class.getDeclaredField("value")); + + ABASE = U.arrayBaseOffset(Node[].class); + int scale = U.arrayIndexScale(Node[].class); if ((scale & (scale - 1)) != 0) - throw new Error("data type scale not a power of two"); + throw new ExceptionInInitializerError("array index scale not a power of two"); ASHIFT = 31 - Integer.numberOfLeadingZeros(scale); - } catch (Exception e) { - throw new Error(e); + } catch (ReflectiveOperationException e) { + throw new ExceptionInInitializerError(e); } + + // Reduce the risk of rare disastrous classloading in first call to + // LockSupport.park: https://bugs.openjdk.java.net/browse/JDK-8074773 + Class ensureLoaded = LockSupport.class; + + // Eager class load observed to help JIT during startup + ensureLoaded = ReservationNode.class; + } + + // EHCACHE SPECIFIC + private static int treeNodesAt(TreeNode root, Collection> nodes) { + if (root == null) { + return 0; + } + + int count = 1; + nodes.add(new AbstractMap.SimpleImmutableEntry<>(root.key, root.val)); + count += treeNodesAt(root.left, nodes); + count += treeNodesAt(root.right, nodes); + return count; + } + + private static int nodesAt(Node b, Collection> nodes) { + if (b instanceof TreeBin) { + return treeNodesAt(((TreeBin)b).root, nodes); + } else { + int count = 0; + for (Node q = b; q != null; q = q.next) { + nodes.add(new AbstractMap.SimpleImmutableEntry<>(q.key, q.val)); + count++; + } + return count; + } + } + + /** + * Remove and return all mappings for which the keys have the specified hashcode. + * @param keyHash the keys' hashcode. + * @return the removed mappings. + */ + public final Collection> removeAllWithHash(int keyHash) { + List> invalidated = new ArrayList<>(); + + int hash = spread(keyHash); + for (Node[] tab = table; ; ) { + Node f; + int n, i; + if (tab == null || (n = tab.length) == 0 || + (f = tabAt(tab, i = (n - 1) & hash)) == null) + break; + else if (f.hash == MOVED) + tab = helpTransfer(tab, f); + else { + int nodesCount = 0; + synchronized (f) { + if (tabAt(tab, i) == f) { + nodesCount = nodesAt(f, invalidated); + setTabAt(tab, i, null); + } + } + if (nodesCount > 0) { + addCount(-nodesCount, -nodesCount); + } + } + } + return invalidated; } public Entry getEvictionCandidate(Random rndm, int size, Comparator prioritizer, EvictionAdvisor evictionAdvisor) { Node[] tab = table; if (tab == null || size == 0) { - return null; + return null; } K maxKey = null; @@ -6445,5 +6544,5 @@ private Entry getEvictionCandidateWrap(Node[] tab, int start, int siz return new MapEntry<>(maxKey, maxVal, this); } } - + // END OF EHCACHE SPECIFIC } diff --git a/ehcache-impl/src/unsafe/java/org/ehcache/impl/internal/concurrent/EvictingConcurrentMap.java b/ehcache-impl/src/unsafe/java/org/ehcache/impl/internal/concurrent/EvictingConcurrentMap.java new file mode 100644 index 0000000000..88e191b5e2 --- /dev/null +++ b/ehcache-impl/src/unsafe/java/org/ehcache/impl/internal/concurrent/EvictingConcurrentMap.java @@ -0,0 +1,57 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.concurrent; + +import org.ehcache.config.EvictionAdvisor; + +import java.util.Collection; +import java.util.Comparator; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.ConcurrentMap; + +public interface EvictingConcurrentMap extends ConcurrentMap{ + + /** + * Return the preferred entry to evict based on a sample of entries taken from the map. + * + * @param rndm Random implementation used to determine the sample randomly + * @param size Number of sampled entries + * @param prioritizer Prioritizer used to determine the best entry to evict in the sample + * @param evictionAdvisor Can veto against the eviction of an entry + * @return Entry to evict or null is none was found + */ + Entry getEvictionCandidate(Random rndm, int size, Comparator prioritizer, EvictionAdvisor evictionAdvisor); + + /** + * Returns the number of mappings. This method should be used + * instead of {@link #size} because a ConcurrentHashMap may + * contain more mappings than can be represented as an int. The + * value returned is an estimate; the actual count may differ if + * there are concurrent insertions or removals. + * + * @return the number of mappings + */ + long mappingCount(); + + /** + * Remove all entries for a given hashcode (as returned by {@code key.hashCode()}). + * + * @param keyHash remove entries having this hashcode + * @return the removed entries + */ + Collection> removeAllWithHash(int keyHash); +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/concurrent/ThreadLocalRandomUtil.java b/ehcache-impl/src/unsafe/java/org/ehcache/impl/internal/concurrent/ThreadLocalRandomUtil.java similarity index 81% rename from impl/src/main/java/org/ehcache/impl/internal/concurrent/ThreadLocalRandomUtil.java rename to ehcache-impl/src/unsafe/java/org/ehcache/impl/internal/concurrent/ThreadLocalRandomUtil.java index 6b4773b4bc..91a00c1949 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/concurrent/ThreadLocalRandomUtil.java +++ b/ehcache-impl/src/unsafe/java/org/ehcache/impl/internal/concurrent/ThreadLocalRandomUtil.java @@ -87,4 +87,23 @@ static final void localInit() { // there shouldn't be any drawback to do it that way ThreadLocalRandom.current(); } + + /** Optimized form of: key + "=" + val */ + static String mapEntryToString(Object key, Object val) { + final String k, v; + final int klen, vlen; + final char[] chars = + new char[(klen = (k = objectToString(key)).length()) + + (vlen = (v = objectToString(val)).length()) + 1]; + k.getChars(0, klen, chars, 0); + chars[klen] = '='; + v.getChars(0, vlen, chars, klen + 1); + return new String(chars); + } + + private static String objectToString(Object x) { + // Extreme compatibility with StringBuilder.append(null) + String s; + return (x == null || (s = x.toString()) == null) ? "null" : s; + } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/concurrent/package-info.java b/ehcache-impl/src/unsafe/java/org/ehcache/impl/internal/concurrent/package-info.java similarity index 100% rename from impl/src/main/java/org/ehcache/impl/internal/concurrent/package-info.java rename to ehcache-impl/src/unsafe/java/org/ehcache/impl/internal/concurrent/package-info.java diff --git a/ehcache-management/build.gradle b/ehcache-management/build.gradle new file mode 100644 index 0000000000..4110cbd634 --- /dev/null +++ b/ehcache-management/build.gradle @@ -0,0 +1,49 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.internal-module' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 Management and Monitoring module' + description = 'The Management and Monitoring module of Ehcache 3' + } +} + +dependencies { + implementation "org.terracotta:statistics:$statisticVersion" + // optional: if we want xml config + compileOnly project(':ehcache-xml') + + // optional: if we want to use the clustered management layer + compileOnly project(':clustered:ehcache-client') + compileOnly "org.terracotta:entity-client-api:$terracottaApisVersion" + compileOnly ("org.terracotta.management:nms-agent-entity-client:$terracottaPlatformVersion") { + // This is to avoid stats lib being directly used. + exclude group:'org.terracotta', module:'statistics' + } + + compileOnly project(':ehcache-api') + compileOnly project(':ehcache-core') + compileOnly project(':ehcache-impl') + testImplementation "org.terracotta.management:management-registry:$terracottaPlatformVersion" + testImplementation project(':ehcache-xml') + testImplementation project(':ehcache-impl') + testImplementation "com.fasterxml.jackson.core:jackson-databind:$jacksonVersion" + testImplementation testFixtures(project(':ehcache-xml')) +} diff --git a/management/config/checkstyle-suppressions.xml b/ehcache-management/config/checkstyle-suppressions.xml similarity index 100% rename from management/config/checkstyle-suppressions.xml rename to ehcache-management/config/checkstyle-suppressions.xml diff --git a/management/src/main/java/org/ehcache/management/CollectorService.java b/ehcache-management/src/main/java/org/ehcache/management/CollectorService.java similarity index 100% rename from management/src/main/java/org/ehcache/management/CollectorService.java rename to ehcache-management/src/main/java/org/ehcache/management/CollectorService.java diff --git a/ehcache-management/src/main/java/org/ehcache/management/ExtendedStatisticsService.java b/ehcache-management/src/main/java/org/ehcache/management/ExtendedStatisticsService.java new file mode 100644 index 0000000000..b0441c075c --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/ExtendedStatisticsService.java @@ -0,0 +1,74 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.management; + +import org.ehcache.Cache; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.management.registry.LatencyHistogramConfiguration; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.stats.Statistic; + +import java.io.Serializable; +import java.util.Collection; +import java.util.Map; +import java.util.function.LongSupplier; + +public interface ExtendedStatisticsService extends StatisticsService { + + /** + * Create statistics registry + * @param cacheName name (alias) of the cache + * @param cache the {@link Cache} associated with the given alias + * @param timeSource source of time for statistics maintenance + */ + void createCacheRegistry(String cacheName, Cache cache, LongSupplier timeSource); + + /** + * Registers a cache for statistics + * @param cacheName name (alias) of the cache + */ + void registerCacheStatistics(String cacheName); + + /** + * Returns the Statistics descriptor for the cache with the given alias + * @param cacheName name (alias) of the cache + * @return the collection of {@link StatisticDescriptor}s of the cache + */ + Collection getCacheDescriptors(String cacheName); + + /** + * Registers derived statistics for the cache + * @param the generic type of statistics + * @param cacheName name (alias) of the cache + * @param cache the cache associated with the given alias + * @param statName name of the statistic + * @param outcome Class of the type of statistics + * @param derivedName visible name of the statistics + * @param configuration the histogram configuration for statistics + */ + , K, V> void registerDerivedStatistics(String cacheName, Cache cache, String statName, T outcome, String derivedName, LatencyHistogramConfiguration configuration); + + /** + * Returns the statistics for the cache + * @param cacheName name (alias) of the cache + * @param statisticNames names of the statistics + * @param since time since statistics needs to be collected + * @return map of statisticNames and statistics + */ + Map> collectStatistics(String cacheName, Collection statisticNames, long since); + +} diff --git a/management/src/main/java/org/ehcache/management/ManagementRegistryService.java b/ehcache-management/src/main/java/org/ehcache/management/ManagementRegistryService.java similarity index 100% rename from management/src/main/java/org/ehcache/management/ManagementRegistryService.java rename to ehcache-management/src/main/java/org/ehcache/management/ManagementRegistryService.java diff --git a/ehcache-management/src/main/java/org/ehcache/management/ManagementRegistryServiceConfiguration.java b/ehcache-management/src/main/java/org/ehcache/management/ManagementRegistryServiceConfiguration.java new file mode 100644 index 0000000000..3a98419716 --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/ManagementRegistryServiceConfiguration.java @@ -0,0 +1,58 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management; + +import org.ehcache.management.registry.LatencyHistogramConfiguration; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.terracotta.management.model.context.Context; + +import java.util.Collection; + +/** + * Configuration interface for a {@link ManagementRegistryService}. + */ +public interface ManagementRegistryServiceConfiguration extends ServiceCreationConfiguration { + + /** + * The context used to identify this cache manager + */ + Context getContext(); + + /** + * Gets the alias of the executor to use for asynchronous collector service tasks. + * + * @return The static colector executor alias + */ + String getCollectorExecutorAlias(); + + /** + * The users tags that can be used to filter this client's management registry amongst others + */ + Collection getTags(); + + /** + * @return an identifier used to identify this running instance. It will be the same even if a clustered Ehcache client reconnects (and clientId changes). + */ + String getInstanceId(); + + /** + * Configuration of the latency histogram derived property. It is used to setup + * different resolution parameters of the histogram. + * + * @return configuration of the latency histogram + */ + LatencyHistogramConfiguration getLatencyHistogramConfiguration(); +} diff --git a/management/src/main/java/org/ehcache/management/SharedManagementService.java b/ehcache-management/src/main/java/org/ehcache/management/SharedManagementService.java similarity index 100% rename from management/src/main/java/org/ehcache/management/SharedManagementService.java rename to ehcache-management/src/main/java/org/ehcache/management/SharedManagementService.java diff --git a/management/src/main/java/org/ehcache/management/cluster/Clustering.java b/ehcache-management/src/main/java/org/ehcache/management/cluster/Clustering.java similarity index 96% rename from management/src/main/java/org/ehcache/management/cluster/Clustering.java rename to ehcache-management/src/main/java/org/ehcache/management/cluster/Clustering.java index 4b35df0642..d441141ad2 100644 --- a/management/src/main/java/org/ehcache/management/cluster/Clustering.java +++ b/ehcache-management/src/main/java/org/ehcache/management/cluster/Clustering.java @@ -46,7 +46,7 @@ public static boolean isAvailable(ServiceProvider serviceProvider) { /** * Creates a new ${@link ClusteringManagementService} to handle the management integration with the cluster */ - public static ClusteringManagementService newClusteringManagementService(ClusteringManagementServiceConfiguration configuration) { + public static ClusteringManagementService newClusteringManagementService(ClusteringManagementServiceConfiguration configuration) { return new DefaultClusteringManagementService(configuration); } diff --git a/management/src/main/java/org/ehcache/management/cluster/ClusteringManagementService.java b/ehcache-management/src/main/java/org/ehcache/management/cluster/ClusteringManagementService.java similarity index 100% rename from management/src/main/java/org/ehcache/management/cluster/ClusteringManagementService.java rename to ehcache-management/src/main/java/org/ehcache/management/cluster/ClusteringManagementService.java diff --git a/management/src/main/java/org/ehcache/management/cluster/ClusteringManagementServiceConfiguration.java b/ehcache-management/src/main/java/org/ehcache/management/cluster/ClusteringManagementServiceConfiguration.java similarity index 90% rename from management/src/main/java/org/ehcache/management/cluster/ClusteringManagementServiceConfiguration.java rename to ehcache-management/src/main/java/org/ehcache/management/cluster/ClusteringManagementServiceConfiguration.java index a20d72cde9..a18ab82e93 100644 --- a/management/src/main/java/org/ehcache/management/cluster/ClusteringManagementServiceConfiguration.java +++ b/ehcache-management/src/main/java/org/ehcache/management/cluster/ClusteringManagementServiceConfiguration.java @@ -20,7 +20,7 @@ /** * Configuration interface for a {@link ClusteringManagementService}. */ -public interface ClusteringManagementServiceConfiguration extends ServiceCreationConfiguration { +public interface ClusteringManagementServiceConfiguration extends ServiceCreationConfiguration { /** * @return The alias of the executor used to run management call queries. diff --git a/ehcache-management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java b/ehcache-management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java new file mode 100644 index 0000000000..22d2d14bd8 --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java @@ -0,0 +1,192 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.cluster; + +import org.ehcache.Cache; +import org.ehcache.Status; +import org.ehcache.clustered.client.service.ClientEntityFactory; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.client.service.EntityService; +import org.ehcache.core.events.CacheManagerListener; +import org.ehcache.core.spi.service.CacheManagerProviderService; +import org.ehcache.core.spi.service.ExecutionService; +import org.ehcache.core.spi.store.InternalCacheManager; +import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.management.CollectorService; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.registry.DefaultCollectorService; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; +import org.slf4j.LoggerFactory; +import org.terracotta.exception.EntityNotFoundException; +import org.terracotta.management.entity.nms.agent.client.DefaultNmsAgentService; +import org.terracotta.management.entity.nms.agent.client.NmsAgentEntity; +import org.terracotta.management.entity.nms.agent.client.NmsAgentService; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.notification.ContextualNotification; +import org.terracotta.management.model.stats.ContextualStatistics; + +import java.util.Collection; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; + +import static org.ehcache.impl.internal.executor.ExecutorUtil.shutdownNow; + +@ServiceDependencies({CacheManagerProviderService.class, ExecutionService.class, TimeSourceService.class, ManagementRegistryService.class, EntityService.class, ClusteringService.class}) +public class DefaultClusteringManagementService implements ClusteringManagementService, CacheManagerListener, CollectorService.Collector { + + private final ClusteringManagementServiceConfiguration configuration; + + private volatile ManagementRegistryService managementRegistryService; + private volatile CollectorService collectorService; + private volatile NmsAgentService nmsAgentService; + private volatile ClientEntityFactory nmsAgentFactory; + private volatile InternalCacheManager cacheManager; + private volatile ExecutorService managementCallExecutor; + private volatile ClusteringService clusteringService; + + public DefaultClusteringManagementService() { + this(new DefaultClusteringManagementServiceConfiguration()); + } + + public DefaultClusteringManagementService(ClusteringManagementServiceConfiguration configuration) { + this.configuration = configuration == null ? new DefaultClusteringManagementServiceConfiguration() : configuration; + } + + @Override + public void start(ServiceProvider serviceProvider) { + // register this service BEFORE any other one so that the NMS entity gets created first in stateTransition() before + // the other services are called + this.cacheManager = serviceProvider.getService(CacheManagerProviderService.class).getCacheManager(); + this.cacheManager.registerListener(this); + + this.clusteringService = serviceProvider.getService(ClusteringService.class); + this.managementRegistryService = serviceProvider.getService(ManagementRegistryService.class); + // get an ordered executor to keep ordering of management call requests + this.managementCallExecutor = serviceProvider.getService(ExecutionService.class).getOrderedExecutor( + configuration.getManagementCallExecutorAlias(), + new ArrayBlockingQueue<>(configuration.getManagementCallQueueSize())); + + this.collectorService = new DefaultCollectorService(this); + this.collectorService.start(serviceProvider); + + EntityService entityService = serviceProvider.getService(EntityService.class); + this.nmsAgentFactory = entityService.newClientEntityFactory("NmsAgent", NmsAgentEntity.class, 1, null); + } + + @Override + public void stop() { + if (collectorService != null) { + collectorService.stop(); + collectorService = null; + } + + if (managementCallExecutor != null) { + shutdownNow(managementCallExecutor); + managementCallExecutor = null; + } + + // nullify so that no further actions are done with them (see null-checks below) + if (nmsAgentService != null) { + nmsAgentService.close(); + nmsAgentService = null; + } + + managementRegistryService = null; + } + + @Override + public void cacheAdded(String alias, Cache cache) { + } + + @Override + public void cacheRemoved(String alias, Cache cache) { + } + + @Override + public void stateTransition(Status from, Status to) { + // we are only interested when cache manager is initializing (but at the end of the initialization) + switch (to) { + + case AVAILABLE: { + nmsAgentService = createNmsAgentService(); + nmsAgentService.sendStates(); + nmsAgentService.setTags(managementRegistryService.getConfiguration().getTags()); + break; + } + + case UNINITIALIZED: { + this.cacheManager.deregisterListener(this); + break; + } + + case MAINTENANCE: + // in case we need management capabilities in maintenance mode + break; + + default: + throw new AssertionError("Unsupported state: " + to); + } + } + + private NmsAgentService createNmsAgentService() { + // root context will contain: instanceId=... and cacheManagerName=... + final Context rootContext = managementRegistryService.getConfiguration().getContext(); + DefaultNmsAgentService nmsAgentService = new DefaultNmsAgentService(rootContext, () -> { + try { + return nmsAgentFactory.retrieve(); + } catch (EntityNotFoundException e) { + // should never occur because entity is permanent + throw new AssertionError("Entity " + NmsAgentEntity.class.getSimpleName() + " not found", e.getCause()); + } + }); + + nmsAgentService.setOperationTimeout(configuration.getManagementCallTimeoutSec(), TimeUnit.SECONDS); + nmsAgentService.setManagementRegistry(managementRegistryService); + + // setup the executor that will handle the management call requests received from the server. We log failures. + nmsAgentService.setManagementCallExecutor(new LoggingExecutor( + managementCallExecutor, + LoggerFactory.getLogger(getClass().getName() + ".managementCallExecutor"))); + + // when Ehcache reconnects, we resend to the server the management states + clusteringService.addConnectionRecoveryListener(() -> { + nmsAgentService.flushEntity(); + nmsAgentService.sendStates(); + }); + + return nmsAgentService; + } + + @Override + public void onNotification(ContextualNotification notification) { + NmsAgentService service = nmsAgentService; + if (service != null && clusteringService.isConnected()) { + service.pushNotification(notification); + } + } + + @Override + public void onStatistics(Collection statistics) { + NmsAgentService service = nmsAgentService; + if (service != null && clusteringService.isConnected()) { + service.pushStatistics(statistics); + } + } + +} diff --git a/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementServiceConfiguration.java b/ehcache-management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementServiceConfiguration.java similarity index 97% rename from management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementServiceConfiguration.java rename to ehcache-management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementServiceConfiguration.java index 95c460f4b1..e502abcfeb 100644 --- a/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementServiceConfiguration.java +++ b/ehcache-management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementServiceConfiguration.java @@ -15,7 +15,7 @@ */ package org.ehcache.management.cluster; -public class DefaultClusteringManagementServiceConfiguration implements ClusteringManagementServiceConfiguration { +public class DefaultClusteringManagementServiceConfiguration implements ClusteringManagementServiceConfiguration { private String managementCallExecutorAlias = "managementCallExecutor"; private int managementCallQueueSize = 1024; diff --git a/management/src/main/java/org/ehcache/management/cluster/LoggingExecutor.java b/ehcache-management/src/main/java/org/ehcache/management/cluster/LoggingExecutor.java similarity index 100% rename from management/src/main/java/org/ehcache/management/cluster/LoggingExecutor.java rename to ehcache-management/src/main/java/org/ehcache/management/cluster/LoggingExecutor.java diff --git a/management/src/main/java/org/ehcache/management/providers/CacheBinding.java b/ehcache-management/src/main/java/org/ehcache/management/providers/CacheBinding.java similarity index 81% rename from management/src/main/java/org/ehcache/management/providers/CacheBinding.java rename to ehcache-management/src/main/java/org/ehcache/management/providers/CacheBinding.java index 1364aecbf7..d03f86b367 100644 --- a/management/src/main/java/org/ehcache/management/providers/CacheBinding.java +++ b/ehcache-management/src/main/java/org/ehcache/management/providers/CacheBinding.java @@ -19,28 +19,29 @@ import org.terracotta.management.registry.Named; import org.terracotta.management.registry.RequiredContext; +import java.util.Objects; + /** * Class representing an association between an object and an alias, name, identifier */ -@RequiredContext({@Named("cacheManagerName"), @Named("cacheName")}) +@RequiredContext({@Named("instanceId"), @Named("cacheManagerName"), @Named("cacheName")}) public final class CacheBinding { private final String alias; - private final Cache cache; + private final Cache cache; public CacheBinding(String alias, Cache cache) { - if (alias == null) throw new NullPointerException(); - if (cache == null) throw new NullPointerException(); - this.alias = alias; - this.cache = cache; + this.alias = Objects.requireNonNull(alias); + this.cache = Objects.requireNonNull(cache); } public String getAlias() { return alias; } - public Cache getCache() { - return cache; + @SuppressWarnings("unchecked") + public Cache getCache() { + return (Cache) cache; } @Override diff --git a/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java b/ehcache-management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java similarity index 100% rename from management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java rename to ehcache-management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java diff --git a/management/src/main/java/org/ehcache/management/providers/EhcacheStatisticCollectorProvider.java b/ehcache-management/src/main/java/org/ehcache/management/providers/EhcacheStatisticCollectorProvider.java similarity index 94% rename from management/src/main/java/org/ehcache/management/providers/EhcacheStatisticCollectorProvider.java rename to ehcache-management/src/main/java/org/ehcache/management/providers/EhcacheStatisticCollectorProvider.java index 9f2dee44e1..dc159e82ef 100644 --- a/management/src/main/java/org/ehcache/management/providers/EhcacheStatisticCollectorProvider.java +++ b/ehcache-management/src/main/java/org/ehcache/management/providers/EhcacheStatisticCollectorProvider.java @@ -20,7 +20,7 @@ import org.terracotta.management.registry.RequiredContext; import org.terracotta.management.registry.collect.StatisticCollectorProvider; -@RequiredContext(@Named("cacheManagerName")) +@RequiredContext({@Named("instanceId"), @Named("cacheManagerName")}) public class EhcacheStatisticCollectorProvider extends StatisticCollectorProvider { public EhcacheStatisticCollectorProvider(ManagementRegistryServiceConfiguration configuration) { super(configuration.getContext()); diff --git a/management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java b/ehcache-management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java similarity index 97% rename from management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java rename to ehcache-management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java index 1ab7966668..e43cdcdce5 100644 --- a/management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java +++ b/ehcache-management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java @@ -15,7 +15,7 @@ */ package org.ehcache.management.providers; -import org.ehcache.core.internal.util.ClassLoading; +import org.ehcache.core.util.ClassLoading; import org.ehcache.management.ManagementRegistryServiceConfiguration; import org.terracotta.management.model.capabilities.descriptors.Descriptor; import org.terracotta.management.model.context.Context; diff --git a/management/src/main/java/org/ehcache/management/providers/actions/EhcacheActionProvider.java b/ehcache-management/src/main/java/org/ehcache/management/providers/actions/EhcacheActionProvider.java similarity index 94% rename from management/src/main/java/org/ehcache/management/providers/actions/EhcacheActionProvider.java rename to ehcache-management/src/main/java/org/ehcache/management/providers/actions/EhcacheActionProvider.java index 32bb963b2a..97234095a6 100644 --- a/management/src/main/java/org/ehcache/management/providers/actions/EhcacheActionProvider.java +++ b/ehcache-management/src/main/java/org/ehcache/management/providers/actions/EhcacheActionProvider.java @@ -23,7 +23,7 @@ import org.terracotta.management.registry.ExposedObject; @Named("ActionsCapability") -@RequiredContext({@Named("cacheManagerName"), @Named("cacheName")}) +@RequiredContext({@Named("instanceId"), @Named("cacheManagerName"), @Named("cacheName")}) public class EhcacheActionProvider extends AbstractActionManagementProvider { private final ManagementRegistryServiceConfiguration registryServiceConfiguration; diff --git a/management/src/main/java/org/ehcache/management/providers/actions/EhcacheActionWrapper.java b/ehcache-management/src/main/java/org/ehcache/management/providers/actions/EhcacheActionWrapper.java similarity index 100% rename from management/src/main/java/org/ehcache/management/providers/actions/EhcacheActionWrapper.java rename to ehcache-management/src/main/java/org/ehcache/management/providers/actions/EhcacheActionWrapper.java diff --git a/management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java b/ehcache-management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java similarity index 85% rename from management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java rename to ehcache-management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java index 2f96f69ce0..c34705d2a3 100644 --- a/management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java +++ b/ehcache-management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java @@ -29,13 +29,15 @@ import java.util.Collection; @Named("SettingsCapability") -@RequiredContext({@Named("cacheManagerName")}) +@RequiredContext({@Named("instanceId"), @Named("cacheManagerName")}) public class EhcacheSettingsProvider extends CacheBindingManagementProvider { + private final ManagementRegistryServiceConfiguration configuration; private final CacheManager cacheManager; public EhcacheSettingsProvider(ManagementRegistryServiceConfiguration configuration, CacheManager cacheManager) { super(configuration); + this.configuration = configuration; this.cacheManager = cacheManager; } @@ -54,9 +56,11 @@ public Collection getDescriptors() { private Descriptor cacheManagerSettings() { return new Settings() .set("cacheManagerDescription", ((HumanReadable)cacheManager.getRuntimeConfiguration()).readableString()) - .set("status", cacheManager.getStatus()) + // NEVER DO THAT: This might block cm.close() because getStatus() is blocking. + //.set("status", cacheManager.getStatus()) + .set("instanceId", configuration.getInstanceId()) .set("managementContext", new Settings(registryConfiguration.getContext())) - .set("tags", registryConfiguration.getTags().toArray(new String[registryConfiguration.getTags().size()])); + .set("tags", registryConfiguration.getTags().toArray(new String[0])); } } diff --git a/management/src/main/java/org/ehcache/management/providers/settings/ExposedCacheSettings.java b/ehcache-management/src/main/java/org/ehcache/management/providers/settings/ExposedCacheSettings.java similarity index 98% rename from management/src/main/java/org/ehcache/management/providers/settings/ExposedCacheSettings.java rename to ehcache-management/src/main/java/org/ehcache/management/providers/settings/ExposedCacheSettings.java index d736d2a0cf..83f871ca66 100644 --- a/management/src/main/java/org/ehcache/management/providers/settings/ExposedCacheSettings.java +++ b/ehcache-management/src/main/java/org/ehcache/management/providers/settings/ExposedCacheSettings.java @@ -47,7 +47,7 @@ class ExposedCacheSettings extends ExposedCacheBinding { public Collection getDescriptors() { final CacheConfiguration cacheConfig = cacheBinding.getCache().getRuntimeConfiguration(); List> resourceTypes = new ArrayList<>(cacheConfig.getResourcePools().getResourceTypeSet()); - Collections.sort(resourceTypes, RESOURCE_TYPE_COMPARATOR); + resourceTypes.sort(RESOURCE_TYPE_COMPARATOR); Map> map = new LinkedHashMap<>(); for (ResourceType type : resourceTypes) { map.put(type.toString(), type); diff --git a/management/src/main/java/org/ehcache/management/providers/settings/Reflect.java b/ehcache-management/src/main/java/org/ehcache/management/providers/settings/Reflect.java similarity index 100% rename from management/src/main/java/org/ehcache/management/providers/settings/Reflect.java rename to ehcache-management/src/main/java/org/ehcache/management/providers/settings/Reflect.java diff --git a/ehcache-management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java b/ehcache-management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java new file mode 100644 index 0000000000..561df8f700 --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java @@ -0,0 +1,78 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.management.ExtendedStatisticsService; +import org.ehcache.management.ManagementRegistryServiceConfiguration; +import org.ehcache.management.providers.CacheBinding; +import org.ehcache.management.providers.CacheBindingManagementProvider; +import org.ehcache.management.providers.ExposedCacheBinding; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.stats.Statistic; +import org.terracotta.management.registry.Named; +import org.terracotta.management.registry.collect.StatisticProvider; + +import java.io.Serializable; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; + +import static java.util.stream.Collectors.toList; + +@Named("StatisticsCapability") +@StatisticProvider +public class EhcacheStatisticsProvider extends CacheBindingManagementProvider { + + private final ExtendedStatisticsService statisticsService; + private final TimeSource timeSource; + + public EhcacheStatisticsProvider(ManagementRegistryServiceConfiguration configuration, ExtendedStatisticsService statisticsService, TimeSource timeSource) { + super(configuration); + this.statisticsService = Objects.requireNonNull(statisticsService); + this.timeSource = Objects.requireNonNull(timeSource); + } + + @Override + protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { + return new StandardEhcacheStatistics(registryConfiguration, cacheBinding, statisticsService, timeSource); + } + + @Override + public final Collection getDescriptors() { + // To keep ordering because these objects end up in an immutable + // topology so this is easier for testing to compare with json payloads + return super.getDescriptors() + .stream() + .map(d -> (StatisticDescriptor) d) + .sorted(STATISTIC_DESCRIPTOR_COMPARATOR) + .collect(toList()); + } + + @Override + public Map> collectStatistics(Context context, Collection statisticNames, long since) { + StandardEhcacheStatistics exposedObject = (StandardEhcacheStatistics) findExposedObject(context); + if (exposedObject == null) { + return Collections.emptyMap(); + } + return exposedObject.collectStatistics(statisticNames, since); + } + +} diff --git a/ehcache-management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/ehcache-management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java new file mode 100644 index 0000000000..ac27889b52 --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -0,0 +1,69 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import org.ehcache.Cache; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.core.statistics.CacheOperationOutcomes; +import org.ehcache.management.registry.LatencyHistogramConfiguration; +import org.ehcache.management.ExtendedStatisticsService; +import org.ehcache.management.ManagementRegistryServiceConfiguration; +import org.ehcache.management.providers.CacheBinding; +import org.ehcache.management.providers.ExposedCacheBinding; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.stats.Statistic; + +import java.io.Serializable; +import java.util.Collection; +import java.util.Map; + +public class StandardEhcacheStatistics extends ExposedCacheBinding { + + private final String cacheAlias; + private final ExtendedStatisticsService statisticsService; + + StandardEhcacheStatistics(ManagementRegistryServiceConfiguration registryConfiguration, CacheBinding cacheBinding, ExtendedStatisticsService statisticsService, TimeSource timeSource) { + super(registryConfiguration, cacheBinding); + this.cacheAlias = cacheBinding.getAlias(); + this.statisticsService = statisticsService; + + statisticsService.createCacheRegistry(this.cacheAlias, cacheBinding.getCache(), timeSource::getTimeMillis); + + statisticsService.registerCacheStatistics(this.cacheAlias); + + LatencyHistogramConfiguration latencyHistogramConfiguration = registryConfiguration.getLatencyHistogramConfiguration(); + + // We want some latency statistics as well, so let's register them + registerDerivedStatistics(cacheBinding.getCache(), "get", CacheOperationOutcomes.GetOutcome.HIT, "Cache:GetHitLatency", latencyHistogramConfiguration); + registerDerivedStatistics(cacheBinding.getCache(),"get", CacheOperationOutcomes.GetOutcome.MISS, "Cache:GetMissLatency", latencyHistogramConfiguration); + registerDerivedStatistics(cacheBinding.getCache(),"put", CacheOperationOutcomes.PutOutcome.PUT, "Cache:PutLatency", latencyHistogramConfiguration); + registerDerivedStatistics(cacheBinding.getCache(),"remove", CacheOperationOutcomes.RemoveOutcome.SUCCESS, "Cache:RemoveLatency", latencyHistogramConfiguration); + } + + private , K, V> void registerDerivedStatistics(Cache cache, String statName, T outcome, String derivedName, LatencyHistogramConfiguration configuration) { + this.statisticsService.registerDerivedStatistics(this.cacheAlias, cache , statName, outcome, derivedName, configuration); + } + + @Override + public Collection getDescriptors() { + return statisticsService.getCacheDescriptors(cacheAlias); + } + + Map> collectStatistics(Collection statisticNames, long since) { + return this.statisticsService.collectStatistics(this.cacheAlias, statisticNames, since); + } + +} diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java b/ehcache-management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java similarity index 85% rename from management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java rename to ehcache-management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java index 89662baa9d..8444d78192 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java +++ b/ehcache-management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java @@ -21,7 +21,9 @@ import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.spi.service.ExecutionService; import org.ehcache.core.spi.store.InternalCacheManager; +import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.core.internal.statistics.StatsUtils; import org.ehcache.management.CollectorService; import org.ehcache.management.ManagementRegistryService; import org.ehcache.management.ManagementRegistryServiceConfiguration; @@ -29,11 +31,8 @@ import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.terracotta.management.model.notification.ContextualNotification; -import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.registry.collect.DefaultStatisticCollector; -import org.terracotta.management.registry.collect.StatisticCollector; -import java.util.Collection; import java.util.concurrent.ScheduledExecutorService; import static org.ehcache.impl.internal.executor.ExecutorUtil.shutdownNow; @@ -44,6 +43,7 @@ public class DefaultCollectorService implements CollectorService, CacheManagerLi private enum EhcacheNotification { CACHE_ADDED, CACHE_REMOVED, + CACHE_CLEARED, CACHE_MANAGER_AVAILABLE, CACHE_MANAGER_MAINTENANCE, CACHE_MANAGER_CLOSED, @@ -73,10 +73,13 @@ public synchronized void start(ServiceProvider serviceProvider) { cacheManager = serviceProvider.getService(CacheManagerProviderService.class).getCacheManager(); scheduledExecutorService = serviceProvider.getService(ExecutionService.class).getScheduledExecutor(configuration.getCollectorExecutorAlias()); + TimeSource timeSource = serviceProvider.getService(TimeSourceService.class).getTimeSource(); + statisticCollector = new DefaultStatisticCollector( managementRegistry, scheduledExecutorService, - collector::onStatistics); + collector::onStatistics, + timeSource::getTimeMillis); cacheManager.registerListener(this); } @@ -87,12 +90,19 @@ public synchronized void stop() { // so deregisterListener is done in the stateTransition listener //cacheManager.deregisterListener(this); + collector.onNotification( + new ContextualNotification( + configuration.getContext(), + EhcacheNotification.CACHE_MANAGER_CLOSED.name())); + statisticCollector.stopStatisticCollector(); shutdownNow(scheduledExecutorService); } @Override public void cacheAdded(String alias, Cache cache) { + registerClearNotification(alias, cache); + collector.onNotification( new ContextualNotification( configuration.getContext().with("cacheName", alias), @@ -107,6 +117,17 @@ public void cacheRemoved(String alias, Cache cache) { EhcacheNotification.CACHE_REMOVED.name())); } + private void cacheCleared(String alias) { + collector.onNotification( + new ContextualNotification( + configuration.getContext().with("cacheName", alias), + EhcacheNotification.CACHE_CLEARED.name())); + } + + private void registerClearNotification(String alias, Cache cache) { + StatsUtils.registerClearNotification(alias, cache, this::cacheCleared); + } + @Override public void stateTransition(Status from, Status to) { switch (to) { @@ -130,11 +151,6 @@ public void stateTransition(Status from, Status to) { break; case UNINITIALIZED: - collector.onNotification( - new ContextualNotification( - configuration.getContext(), - EhcacheNotification.CACHE_MANAGER_CLOSED.name())); - // deregister me - should not be in stop() - see other comments cacheManager.deregisterListener(this); break; diff --git a/ehcache-management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryConfiguration.java b/ehcache-management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryConfiguration.java new file mode 100644 index 0000000000..0bdb03c729 --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryConfiguration.java @@ -0,0 +1,138 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.registry; + +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.ManagementRegistryServiceConfiguration; +import org.terracotta.management.model.context.Context; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Objects; +import java.util.TreeSet; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicLong; + +public class DefaultManagementRegistryConfiguration implements ManagementRegistryServiceConfiguration { + + private static final AtomicLong COUNTER = new AtomicLong(); + + private final Collection tags = new TreeSet<>(); + private final String instanceId = UUID.randomUUID().toString(); + private Context context = Context.empty().with("instanceId", instanceId); + private String collectorExecutorAlias = "collectorExecutor"; + private LatencyHistogramConfiguration latencyHistogramConfiguration = LatencyHistogramConfiguration.DEFAULT; + + public DefaultManagementRegistryConfiguration() { + setCacheManagerAlias("cache-manager-" + COUNTER.getAndIncrement()); + } + + public DefaultManagementRegistryConfiguration setCacheManagerAlias(String alias) { + return setContext(Context.create("cacheManagerName", alias)); + } + + public DefaultManagementRegistryConfiguration setContext(Context context) { + if (!this.context.contains("cacheManagerName") && !context.contains("cacheManagerName")) { + throw new IllegalArgumentException("'cacheManagerName' is missing from context"); + } + if (context.contains("instanceId") && !Objects.equals(context.get("instanceId"), instanceId)) { + throw new IllegalArgumentException("Cannot override instanceId in context " + this.context + " by " + context); + } + this.context = this.context.with(context); + return this; + } + + public DefaultManagementRegistryConfiguration setCollectorExecutorAlias(String collectorExecutorAlias) { + this.collectorExecutorAlias = collectorExecutorAlias; + return this; + } + + public DefaultManagementRegistryConfiguration addTags(String... tags) { + this.tags.addAll(Arrays.asList(tags)); + return this; + } + + public DefaultManagementRegistryConfiguration addTag(String tag) { + return addTags(tag); + } + + @Override + public Context getContext() { + return context; + } + + public String getCacheManagerAlias() { + return getContext().get("cacheManagerName"); + } + + @Override + public String getCollectorExecutorAlias() { + return this.collectorExecutorAlias; + } + + @Override + public Collection getTags() { + return tags; + } + + @Override + public String getInstanceId() { + return instanceId; + } + + @Override + public LatencyHistogramConfiguration getLatencyHistogramConfiguration() { + return latencyHistogramConfiguration; + } + + public DefaultManagementRegistryConfiguration setLatencyHistogramConfiguration(LatencyHistogramConfiguration latencyHistogramConfiguration) { + this.latencyHistogramConfiguration = Objects.requireNonNull(latencyHistogramConfiguration); + return this; + } + + @Override + public Class getServiceType() { + return ManagementRegistryService.class; + } + + @Override + public String toString() { + return "DefaultManagementRegistryConfiguration{" + "context=" + context + + ", tags=" + tags + + ", collectorExecutorAlias='" + collectorExecutorAlias + '\'' + + ", instanceId='" + instanceId + '\'' + + ", latencyHistogramConfiguration='" + latencyHistogramConfiguration + '\'' + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DefaultManagementRegistryConfiguration that = (DefaultManagementRegistryConfiguration) o; + return Objects.equals(tags, that.tags) && + Objects.equals(instanceId, that.instanceId) && + Objects.equals(context, that.context) && + Objects.equals(collectorExecutorAlias, that.collectorExecutorAlias) && + Objects.equals(latencyHistogramConfiguration, that.latencyHistogramConfiguration); + } + + @Override + public int hashCode() { + return Objects.hash(tags, instanceId, context, collectorExecutorAlias); + } + +} diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryFactory.java b/ehcache-management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryFactory.java similarity index 88% rename from management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryFactory.java rename to ehcache-management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryFactory.java index 287e2595cb..8f2ca11868 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryFactory.java +++ b/ehcache-management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryFactory.java @@ -23,15 +23,15 @@ public class DefaultManagementRegistryFactory implements ServiceFactory { @Override - public ManagementRegistryService create(ServiceCreationConfiguration configuration) { + public ManagementRegistryService create(ServiceCreationConfiguration configuration) { return configuration instanceof ManagementRegistryServiceConfiguration ? new DefaultManagementRegistryService((ManagementRegistryServiceConfiguration) configuration) : new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration()); } @Override - public Class getServiceType() { - return ManagementRegistryService.class; + public Class getServiceType() { + return DefaultManagementRegistryService.class; } } diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java b/ehcache-management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java similarity index 79% rename from management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java rename to ehcache-management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java index 6049ad891f..d72208de54 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java +++ b/ehcache-management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java @@ -20,8 +20,10 @@ import org.ehcache.config.CacheConfiguration; import org.ehcache.core.events.CacheManagerListener; import org.ehcache.core.spi.service.CacheManagerProviderService; -import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.spi.service.ExecutionService; import org.ehcache.core.spi.store.InternalCacheManager; +import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.management.ExtendedStatisticsService; import org.ehcache.management.ManagementRegistryService; import org.ehcache.management.ManagementRegistryServiceConfiguration; import org.ehcache.management.cluster.Clustering; @@ -32,31 +34,35 @@ import org.ehcache.management.providers.actions.EhcacheActionProvider; import org.ehcache.management.providers.settings.EhcacheSettingsProvider; import org.ehcache.management.providers.statistics.EhcacheStatisticsProvider; +import org.ehcache.spi.service.OptionalServiceDependencies; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; +import org.terracotta.management.model.context.Context; import org.terracotta.management.model.context.ContextContainer; import org.terracotta.management.registry.DefaultManagementRegistry; -import org.terracotta.management.registry.ManagementProvider; -import org.terracotta.statistics.StatisticsManager; import java.util.ArrayList; import java.util.Collection; import java.util.Map; -@ServiceDependencies({CacheManagerProviderService.class, StatisticsService.class}) +@ServiceDependencies({CacheManagerProviderService.class, ExtendedStatisticsService.class, TimeSourceService.class, ExecutionService.class}) +@OptionalServiceDependencies({ + "org.ehcache.clustered.client.service.EntityService", + "org.ehcache.clustered.client.service.ClusteringService"}) public class DefaultManagementRegistryService extends DefaultManagementRegistry implements ManagementRegistryService, CacheManagerListener { private final ManagementRegistryServiceConfiguration configuration; private volatile InternalCacheManager cacheManager; private volatile ClusteringManagementService clusteringManagementService; + private volatile boolean clusteringManagementServiceAutoStarted; + private volatile ExtendedStatisticsService statisticsService; public DefaultManagementRegistryService() { this(new DefaultManagementRegistryConfiguration()); } public DefaultManagementRegistryService(ManagementRegistryServiceConfiguration configuration) { - super(null); // context container creation is overriden here this.configuration = configuration == null ? new DefaultManagementRegistryConfiguration() : configuration; } @@ -64,11 +70,12 @@ public DefaultManagementRegistryService(ManagementRegistryServiceConfiguration c public void start(final ServiceProvider serviceProvider) { this.cacheManager = serviceProvider.getService(CacheManagerProviderService.class).getCacheManager(); - StatisticsService statisticsService = serviceProvider.getService(StatisticsService.class); + this.statisticsService = serviceProvider.getService(ExtendedStatisticsService.class); + TimeSourceService timeSourceService = serviceProvider.getService(TimeSourceService.class); // initialize management capabilities (stats, action calls, etc) addManagementProvider(new EhcacheActionProvider(getConfiguration())); - addManagementProvider(new EhcacheStatisticsProvider(getConfiguration(), statisticsService)); + addManagementProvider(new EhcacheStatisticsProvider(getConfiguration(), statisticsService, timeSourceService.getTimeSource())); addManagementProvider(new EhcacheStatisticCollectorProvider(getConfiguration())); addManagementProvider(new EhcacheSettingsProvider(getConfiguration(), cacheManager)); @@ -80,22 +87,25 @@ public void start(final ServiceProvider serviceProvider) { if (this.clusteringManagementService == null && Clustering.isAvailable(serviceProvider)) { this.clusteringManagementService = Clustering.newClusteringManagementService(new DefaultClusteringManagementServiceConfiguration()); this.clusteringManagementService.start(serviceProvider); + this.clusteringManagementServiceAutoStarted = true; + } else { + this.clusteringManagementServiceAutoStarted = false; } } @Override public void stop() { - if (this.clusteringManagementService != null) { + if (this.clusteringManagementService != null && this.clusteringManagementServiceAutoStarted) { this.clusteringManagementService.stop(); - this.clusteringManagementService = null; } + this.clusteringManagementService = null; super.close(); } @Override public void cacheAdded(String alias, Cache cache) { - StatisticsManager.associate(cache).withParent(cacheManager); + statisticsService.registerWithParent(cache, cacheManager); register(new CacheBinding(alias, cache)); } @@ -104,7 +114,7 @@ public void cacheAdded(String alias, Cache cache) { public void cacheRemoved(String alias, Cache cache) { unregister(new CacheBinding(alias, cache)); - StatisticsManager.dissociate(cache).fromParent(cacheManager); + statisticsService.deRegisterFromParent(cache, cacheManager); } @Override @@ -151,4 +161,9 @@ public ContextContainer getContextContainer() { return new ContextContainer("cacheManagerName", getConfiguration().getContext().get("cacheManagerName"), cacheCtx); } + @Override + public Context getContext() { + // contains instanceId + cacheManagerName keys + return configuration.getContext(); + } } diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultSharedManagementService.java b/ehcache-management/src/main/java/org/ehcache/management/registry/DefaultSharedManagementService.java similarity index 100% rename from management/src/main/java/org/ehcache/management/registry/DefaultSharedManagementService.java rename to ehcache-management/src/main/java/org/ehcache/management/registry/DefaultSharedManagementService.java diff --git a/ehcache-management/src/main/java/org/ehcache/management/registry/LatencyHistogramConfiguration.java b/ehcache-management/src/main/java/org/ehcache/management/registry/LatencyHistogramConfiguration.java new file mode 100644 index 0000000000..687660173b --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/registry/LatencyHistogramConfiguration.java @@ -0,0 +1,92 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.registry; + +import java.time.Duration; +import java.util.Objects; + +/** + * Configuration of all latency histograms. + */ +public class LatencyHistogramConfiguration { + + public static final double DEFAULT_PHI = 0.63; + public static final int DEFAULT_BUCKET_COUNT = 20; + public static final Duration DEFAULT_WINDOW = Duration.ofMinutes(1) ; + + public static final LatencyHistogramConfiguration DEFAULT = new LatencyHistogramConfiguration(DEFAULT_PHI, DEFAULT_BUCKET_COUNT, DEFAULT_WINDOW); + + private final double phi; + private final int bucketCount; + private final Duration window; + + /** + * Default constructor. + * + * @param phi histogram bucket bias factor + * @param bucketCount number of buckets + * @param window sliding window size + */ + public LatencyHistogramConfiguration(double phi, int bucketCount, Duration window) { + this.phi = phi; + this.bucketCount = bucketCount; + this.window = Objects.requireNonNull(window); + } + + public double getPhi() { + return phi; + } + + public int getBucketCount() { + return bucketCount; + } + + public Duration getWindow() { + return window; + } + + @Override + public String toString() { + return "LatencyHistogramConfiguration{" + + "phi=" + phi + + ", bucketCount=" + bucketCount + + ", window=" + window + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + LatencyHistogramConfiguration that = (LatencyHistogramConfiguration) o; + + if (that.phi != phi) return false; + if (bucketCount != that.bucketCount) return false; + return window.equals(that.window); + } + + @Override + public int hashCode() { + int result; + long temp; + temp = Double.doubleToLongBits(phi); + result = (int) (temp ^ (temp >>> 32)); + result = 31 * result + bucketCount; + result = 31 * result + window.hashCode(); + return result; + } +} diff --git a/ehcache-management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java b/ehcache-management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java new file mode 100644 index 0000000000..12b3e412db --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java @@ -0,0 +1,127 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.registry; + +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.BaseConfigParser; +import org.ehcache.xml.CacheManagerServiceConfigurationParser; +import org.ehcache.xml.JaxbParsers; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.w3c.dom.Document; +import org.w3c.dom.Element; + +import java.io.IOException; +import java.net.URI; +import java.net.URL; + +import javax.xml.transform.Source; +import javax.xml.transform.stream.StreamSource; + +public class ManagementRegistryServiceConfigurationParser extends BaseConfigParser implements CacheManagerServiceConfigurationParser { + + private static final String NAMESPACE = "http://www.ehcache.org/v3/management"; + private static final URI NAMESPACE_URI = URI.create(NAMESPACE); + private static final URL XML_SCHEMA = ManagementRegistryServiceConfigurationParser.class.getResource("/ehcache-management-ext.xsd"); + private static final String MANAGEMENT_NAMESPACE_PREFIX = "mgm:"; + private static final String MANAGEMENT_ELEMENT_NAME = "management"; + private static final String CACHE_MANAGER_ATTRIBUTE_NAME = "cache-manager-alias"; + private static final String COLLECTOR_EXECUTOR_ATTRIBUTE_NAME = "collector-executor-alias"; + private static final String TAGS_NAME = "tags"; + private static final String TAG_NAME = "tag"; + + @Override + public Source getXmlSchema() throws IOException { + return new StreamSource(XML_SCHEMA.openStream()); + } + + @Override + public URI getNamespace() { + return NAMESPACE_URI; + } + + @Override + public ServiceCreationConfiguration parseServiceCreationConfiguration(Element fragment, ClassLoader classLoader) { + if ("management".equals(fragment.getLocalName())) { + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration(); + + // ATTR: cache-manager-alias + if (fragment.hasAttribute("cache-manager-alias")) { + registryConfiguration.setCacheManagerAlias(attr(fragment, "cache-manager-alias")); + } + + // ATTR: collector-executor-alias + if (fragment.hasAttribute("collector-executor-alias")) { + registryConfiguration.setCollectorExecutorAlias(attr(fragment, "collector-executor-alias")); + } + + // tags + for (Element tags : NodeListIterable.elements(fragment, NAMESPACE, "tags")) { + // tag + for (Element tag : NodeListIterable.elements(tags, NAMESPACE, "tag")) { + String val = JaxbParsers.parsePropertyOrString(tag.getTextContent()); + if (!val.isEmpty()) { + registryConfiguration.addTag(val); + } + } + } + + return registryConfiguration; + + } else { + throw new XmlConfigurationException(String.format( + "XML configuration element <%s> in <%s> is not supported", + fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); + } + } + + private static String attr(Element element, String name) { + String s = element.getAttribute(name); + return s == null || s.equals("") ? null : s; + } + + @Override + public Class getServiceType() { + return ManagementRegistryService.class; + } + + @Override + public Element unparseServiceCreationConfiguration(ServiceCreationConfiguration serviceCreationConfiguration) { + return unparseConfig(serviceCreationConfiguration); + } + + @Override + protected Element createRootElement(Document doc, DefaultManagementRegistryConfiguration defaultManagementRegistryConfiguration) { + Element rootElement = doc.createElementNS(NAMESPACE,MANAGEMENT_NAMESPACE_PREFIX + MANAGEMENT_ELEMENT_NAME); + rootElement.setAttribute(CACHE_MANAGER_ATTRIBUTE_NAME, defaultManagementRegistryConfiguration.getCacheManagerAlias()); + rootElement.setAttribute(COLLECTOR_EXECUTOR_ATTRIBUTE_NAME, defaultManagementRegistryConfiguration.getCollectorExecutorAlias()); + processManagementTags(doc, rootElement, defaultManagementRegistryConfiguration); + return rootElement; + } + + private void processManagementTags(Document doc, Element parent, DefaultManagementRegistryConfiguration defaultManagementRegistryConfiguration) { + if (!defaultManagementRegistryConfiguration.getTags().isEmpty()) { + Element tagsName = doc.createElement(MANAGEMENT_NAMESPACE_PREFIX + TAGS_NAME); + for (String tag : defaultManagementRegistryConfiguration.getTags()) { + Element tagName = doc.createElement(MANAGEMENT_NAMESPACE_PREFIX + TAG_NAME); + tagName.setTextContent(tag); + tagsName.appendChild(tagName); + } + parent.appendChild(tagsName); + } + } + +} diff --git a/management/src/main/java/org/ehcache/management/registry/NodeListIterable.java b/ehcache-management/src/main/java/org/ehcache/management/registry/NodeListIterable.java similarity index 100% rename from management/src/main/java/org/ehcache/management/registry/NodeListIterable.java rename to ehcache-management/src/main/java/org/ehcache/management/registry/NodeListIterable.java diff --git a/ehcache-management/src/main/java/org/ehcache/management/statistics/DefaultCacheStatistics.java b/ehcache-management/src/main/java/org/ehcache/management/statistics/DefaultCacheStatistics.java new file mode 100644 index 0000000000..6ebb39b9d2 --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/statistics/DefaultCacheStatistics.java @@ -0,0 +1,247 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.management.statistics; + +import org.ehcache.core.InternalCache; +import org.ehcache.core.internal.statistics.DefaultTierStatistics; +import org.ehcache.core.internal.statistics.DelegatingOperationStatistic; +import org.ehcache.core.statistics.BulkOps; +import org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.PutOutcome; +import org.ehcache.core.statistics.CacheStatistics; +import org.ehcache.core.statistics.ChainedOperationObserver; +import org.ehcache.core.statistics.OperationStatistic; +import org.ehcache.core.statistics.TierStatistics; +import org.ehcache.core.statistics.ValueStatistic; +import org.terracotta.statistics.ValueStatistics; + +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import static org.ehcache.core.internal.statistics.StatsUtils.findLowestTier; +import static org.ehcache.core.internal.statistics.StatsUtils.findOperationStatisticOnChildren; +import static org.ehcache.core.internal.statistics.StatsUtils.findTiers; +import static org.ehcache.core.statistics.CacheOperationOutcomes.ConditionalRemoveOutcome; +import static org.ehcache.core.statistics.CacheOperationOutcomes.PutIfAbsentOutcome; +import static org.ehcache.core.statistics.CacheOperationOutcomes.RemoveOutcome; +import static org.ehcache.core.statistics.CacheOperationOutcomes.ReplaceOutcome; +import static org.ehcache.core.statistics.SuppliedValueStatistic.counter; + +/** + * Contains usage statistics relative to a given cache. + */ +public class DefaultCacheStatistics implements CacheStatistics { + + private volatile CompensatingCounters compensatingCounters = CompensatingCounters.empty(); + + private final org.terracotta.statistics.OperationStatistic get; + private final org.terracotta.statistics.OperationStatistic put; + private final org.terracotta.statistics.OperationStatistic remove; + private final org.terracotta.statistics.OperationStatistic putIfAbsent; + private final org.terracotta.statistics.OperationStatistic replace; + private final org.terracotta.statistics.OperationStatistic conditionalRemove; + + private final InternalCache cache; + + private final Map tierStatistics; + private final TierStatistics lowestTier; + + private final Map> knownStatistics; + + public DefaultCacheStatistics(InternalCache cache) { + this.cache = cache; + + get = findOperationStatisticOnChildren(cache, GetOutcome.class, "get"); + put = findOperationStatisticOnChildren(cache, PutOutcome.class, "put"); + remove = findOperationStatisticOnChildren(cache, RemoveOutcome.class, "remove"); + putIfAbsent = findOperationStatisticOnChildren(cache, PutIfAbsentOutcome.class, "putIfAbsent"); + replace = findOperationStatisticOnChildren(cache, ReplaceOutcome.class, "replace"); + conditionalRemove = findOperationStatisticOnChildren(cache, ConditionalRemoveOutcome.class, "conditionalRemove"); + + String[] tierNames = findTiers(cache); + + String lowestTierName = findLowestTier(tierNames); + TierStatistics lowestTier = null; + + tierStatistics = new HashMap<>(tierNames.length); + for (String tierName : tierNames) { + DefaultTierStatistics tierStatistics = new DefaultTierStatistics(cache, tierName); + this.tierStatistics.put(tierName, tierStatistics); + if (lowestTierName.equals(tierName)) { + lowestTier = tierStatistics; + } + } + this.lowestTier = lowestTier; + + knownStatistics = createKnownStatistics(); + } + + @Override + public , S extends ChainedOperationObserver> void registerDerivedStatistic(Class outcomeClass, String statName, S derivedStatistic) { + OperationStatistic stat = new DelegatingOperationStatistic<>(findOperationStatisticOnChildren(cache, outcomeClass, statName)); + stat.addDerivedStatistic(derivedStatistic); + } + + private Map> createKnownStatistics() { + Map> knownStatistics = new HashMap<>(30); + knownStatistics.put("Cache:HitCount", ValueStatistics.counter(this::getCacheHits)); + knownStatistics.put("Cache:MissCount", ValueStatistics.counter(this::getCacheMisses)); + knownStatistics.put("Cache:PutCount", ValueStatistics.counter(this::getCachePuts)); + knownStatistics.put("Cache:RemovalCount", ValueStatistics.counter(this::getCacheRemovals)); + knownStatistics.put("Cache:EvictionCount", ValueStatistics.counter(this::getCacheEvictions)); + knownStatistics.put("Cache:ExpirationCount", ValueStatistics.counter(this::getCacheExpirations)); + + for (DefaultTierStatistics tier : tierStatistics.values()) { + knownStatistics.putAll(tier.getKnownStatistics()); + } + + return Collections.unmodifiableMap(knownStatistics); + } + + public Map> getKnownStatistics() { + return knownStatistics; + } + + public Map getTierStatistics() { + return Collections.unmodifiableMap(tierStatistics); + } + + @Override + public void clear() { + compensatingCounters = compensatingCounters.snapshot(this); + for (TierStatistics t : tierStatistics.values()) { + t.clear(); + } + } + + @Override + public long getCacheHits() { + return normalize(getHits() - compensatingCounters.cacheHits); + } + + @Override + public float getCacheHitPercentage() { + long cacheHits = getCacheHits(); + return normalize((float) cacheHits / (cacheHits + getCacheMisses())) * 100.0f; + } + + @Override + public long getCacheMisses() { + return normalize(getMisses() - compensatingCounters.cacheMisses); + } + + @Override + public float getCacheMissPercentage() { + long cacheMisses = getCacheMisses(); + return normalize((float) cacheMisses / (getCacheHits() + cacheMisses)) * 100.0f; + } + + @Override + public long getCacheGets() { + return normalize(getHits() + getMisses() - compensatingCounters.cacheGets); + } + + @Override + public long getCachePuts() { + return normalize(getBulkCount(BulkOps.PUT_ALL) + + put.sum(EnumSet.of(PutOutcome.PUT)) + + putIfAbsent.sum(EnumSet.of(PutIfAbsentOutcome.PUT)) + + replace.sum(EnumSet.of(ReplaceOutcome.HIT)) - + compensatingCounters.cachePuts); + } + + @Override + public long getCacheRemovals() { + return normalize(getBulkCount(BulkOps.REMOVE_ALL) + + remove.sum(EnumSet.of(RemoveOutcome.SUCCESS)) + + conditionalRemove.sum(EnumSet.of(ConditionalRemoveOutcome.SUCCESS)) - + compensatingCounters.cacheRemovals); + } + + @Override + public long getCacheEvictions() { + return normalize(lowestTier.getEvictions()); + } + + @Override + public long getCacheExpirations() { + return normalize(lowestTier.getExpirations()); + } + + private long getMisses() { + return getBulkCount(BulkOps.GET_ALL_MISS) + + get.sum(EnumSet.of(GetOutcome.MISS)) + + putIfAbsent.sum(EnumSet.of(PutIfAbsentOutcome.PUT)) + + replace.sum(EnumSet.of(ReplaceOutcome.MISS_NOT_PRESENT)) + + conditionalRemove.sum(EnumSet.of(ConditionalRemoveOutcome.FAILURE_KEY_MISSING)); + } + + private long getHits() { + return getBulkCount(BulkOps.GET_ALL_HITS) + + get.sum(EnumSet.of(GetOutcome.HIT)) + + putIfAbsent.sum(EnumSet.of(PutIfAbsentOutcome.HIT)) + + replace.sum(EnumSet.of(ReplaceOutcome.HIT, ReplaceOutcome.MISS_PRESENT)) + + conditionalRemove.sum(EnumSet.of(ConditionalRemoveOutcome.SUCCESS, ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); + } + + private long getBulkCount(BulkOps bulkOps) { + return cache.getBulkMethodEntries().get(bulkOps).longValue(); + } + + private static long normalize(long value) { + return Math.max(0, value); + } + + private static float normalize(float value) { + if (Float.isNaN(value)) { + return 0.0f; + } + return Math.min(1.0f, Math.max(0.0f, value)); + } + + private static class CompensatingCounters { + final long cacheHits; + final long cacheMisses; + final long cacheGets; + final long cachePuts; + final long cacheRemovals; + + private CompensatingCounters(long cacheHits, long cacheMisses, long cacheGets, long cachePuts, long cacheRemovals) { + this.cacheHits = cacheHits; + this.cacheMisses = cacheMisses; + this.cacheGets = cacheGets; + this.cachePuts = cachePuts; + this.cacheRemovals = cacheRemovals; + } + + static CompensatingCounters empty() { + return new CompensatingCounters(0, 0, 0, 0, 0); + } + + CompensatingCounters snapshot(DefaultCacheStatistics statistics) { + return new CompensatingCounters( + cacheHits + statistics.getHits(), + cacheMisses + statistics.getMisses(), + cacheGets + statistics.getCacheGets(), + cachePuts + statistics.getCachePuts(), + cacheRemovals + statistics.getCacheRemovals()); + } + } + +} diff --git a/ehcache-management/src/main/java/org/ehcache/management/statistics/DefaultExtendedStatisticsService.java b/ehcache-management/src/main/java/org/ehcache/management/statistics/DefaultExtendedStatisticsService.java new file mode 100644 index 0000000000..a42566a425 --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/statistics/DefaultExtendedStatisticsService.java @@ -0,0 +1,241 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.management.statistics; + +import org.ehcache.Cache; +import org.ehcache.Status; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.core.InternalCache; +import org.ehcache.core.events.CacheManagerListener; +import org.ehcache.core.spi.service.CacheManagerProviderService; +import org.ehcache.core.spi.store.InternalCacheManager; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.statistics.CacheStatistics; +import org.ehcache.core.statistics.StatisticType; +import org.ehcache.core.statistics.OperationObserver; +import org.ehcache.core.statistics.ZeroOperationStatistic; +import org.ehcache.management.ExtendedStatisticsService; +import org.ehcache.management.registry.LatencyHistogramConfiguration; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.stats.Statistic; +import org.terracotta.management.model.stats.StatisticRegistry; +import org.terracotta.statistics.MappedOperationStatistic; +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.StatisticsManager; +import org.terracotta.statistics.derived.OperationResultFilter; +import org.terracotta.statistics.derived.latency.DefaultLatencyHistogramStatistic; + +import java.io.Serializable; +import java.util.Collection; +import java.util.EnumSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.function.LongSupplier; +import java.util.function.Supplier; + +import static org.ehcache.core.internal.statistics.StatsUtils.findOperationStatisticOnChildren; +import static org.terracotta.statistics.StatisticBuilder.operation; + +/** + * Default implementation using the statistics calculated by the observers set on the caches. + */ +@ServiceDependencies(CacheManagerProviderService.class) +public class DefaultExtendedStatisticsService implements ExtendedStatisticsService, CacheManagerListener { + + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultExtendedStatisticsService.class); + + private final ConcurrentMap cacheStatistics = new ConcurrentHashMap<>(); + private final ConcurrentMap statisticRegistries = new ConcurrentHashMap<>(); + + private volatile InternalCacheManager cacheManager; + private volatile boolean started = false; + + public CacheStatistics getCacheStatistics(String cacheName) { + CacheStatistics stats = cacheStatistics.get(cacheName); + if (stats == null) { + throw new IllegalArgumentException("Unknown cache: " + cacheName); + } + return stats; + } + + @Override + public void registerWithParent(Object toAssociate, Object parent) { + StatisticsManager.associate(toAssociate).withParent(parent); + } + + @Override + public , T extends Enum> org.ehcache.core.statistics.OperationStatistic registerStoreStatistics(Store store, String targetName, int tierHeight, String tag, Map> translation, String statisticName) { + + Class outcomeType = getOutcomeType(translation); + + // If the original stat doesn't exist, we do not need to translate it + if (StatsUtils.hasOperationStat(store, outcomeType, targetName)) { + + MappedOperationStatistic operationStatistic = new MappedOperationStatistic<>(store, translation, statisticName, tierHeight, targetName, tag); + StatisticsManager.associate(operationStatistic).withParent(store); + return new DelegatedMappedOperationStatistics<>(operationStatistic); + } + return ZeroOperationStatistic.get(); + } + + /** + * From the Map of translation, we extract one of the items to get the declaring class of the enum. + * + * @param translation translation map + * @param type of the outcome + * @param type of the possible translations + * @return the outcome type + */ + private static , T extends Enum> Class getOutcomeType(Map> translation) { + Map.Entry> first = translation.entrySet().iterator().next(); + return first.getValue().iterator().next().getDeclaringClass(); + } + + @Override + public void deRegisterFromParent(Object toDisassociate, Object parent) { + StatisticsManager.dissociate(toDisassociate).fromParent(parent); + } + + @Override + public void cleanForNode(Object node) { + StatisticsManager.nodeFor(node).clean(); + } + + @Override + public void createCacheRegistry(String cacheName, Cache cache, LongSupplier timeSource) { + statisticRegistries.put(cacheName, new StatisticRegistry(cache, timeSource)); + } + + @Override + public void registerCacheStatistics(String cacheName) { + cacheStatistics.get(cacheName).getKnownStatistics().forEach(statisticRegistries.get(cacheName)::registerStatistic); + } + + @Override + public Collection getCacheDescriptors(String cacheName) { + return statisticRegistries.get(cacheName).getDescriptors(); + } + + @Override + public , K, V> void registerDerivedStatistics(String cacheName, Cache cache, String statName, T outcome, String derivedName, LatencyHistogramConfiguration configuration) { + DefaultLatencyHistogramStatistic histogram = new DefaultLatencyHistogramStatistic(configuration.getPhi(), configuration.getBucketCount(), configuration.getWindow()); + + @SuppressWarnings("unchecked") + Class outcomeClass = (Class) outcome.getClass(); + OperationStatistic stat = findOperationStatisticOnChildren(cache, outcomeClass, statName); + stat.addDerivedStatistic(new OperationResultFilter<>(EnumSet.of(outcome), histogram)); + + statisticRegistries.get(cacheName).registerStatistic(derivedName + "#50", histogram.medianStatistic()); + statisticRegistries.get(cacheName).registerStatistic(derivedName + "#95", histogram.percentileStatistic(0.95)); + statisticRegistries.get(cacheName).registerStatistic(derivedName + "#99", histogram.percentileStatistic(0.99)); + statisticRegistries.get(cacheName).registerStatistic(derivedName + "#100", histogram.maximumStatistic()); + } + + @Override + public Map> collectStatistics(String cacheName, Collection statisticNames, long since) { + return StatisticRegistry.collect(statisticRegistries.get(cacheName), statisticNames, since); + } + + @Override + public void registerStatistic(Object context, String name, StatisticType type, Set tags, Supplier valueSupplier) { + StatisticsManager.createPassThroughStatistic(context, name, tags, convert(type), valueSupplier); + } + + @Override + public > OperationObserver createOperationStatistics(String name, Class outcome, String tag, Object context) { + return new DelegatingOperationObserver<>(operation(outcome).named(name).of(context).tag(tag).build()); + } + + public boolean isStarted() { + return started; + } + + @Override + public void start(ServiceProvider serviceProvider) { + LOGGER.debug("Starting service"); + + CacheManagerProviderService cacheManagerProviderService = serviceProvider.getService(CacheManagerProviderService.class); + cacheManager = cacheManagerProviderService.getCacheManager(); + cacheManager.registerListener(this); + started = true; + } + + @Override + public void stop() { + LOGGER.debug("Stopping service"); + cacheManager.deregisterListener(this); + cacheStatistics.clear(); + started = false; + } + + @Override + public void stateTransition(Status from, Status to) { + LOGGER.debug("Moving from " + from + " to " + to); + switch (to) { + case AVAILABLE: + registerAllCaches(); + break; + case UNINITIALIZED: + cacheManager.deregisterListener(this); + cacheStatistics.clear(); + break; + case MAINTENANCE: + throw new IllegalStateException("Should not be started in maintenance mode"); + default: + throw new AssertionError("Unsupported state: " + to); + } + } + + private void registerAllCaches() { + for (Map.Entry> entry : cacheManager.getRuntimeConfiguration().getCacheConfigurations().entrySet()) { + String alias = entry.getKey(); + CacheConfiguration configuration = entry.getValue(); + Cache cache = cacheManager.getCache(alias, configuration.getKeyType(), configuration.getValueType()); + cacheAdded(alias, cache); + } + } + + @Override + public void cacheAdded(String alias, Cache cache) { + LOGGER.debug("Cache added " + alias); + cacheStatistics.put(alias, new DefaultCacheStatistics((InternalCache) cache)); + } + + @Override + public void cacheRemoved(String alias, Cache cache) { + LOGGER.debug("Cache removed " + alias); + cacheStatistics.remove(alias); + } + + private static org.terracotta.statistics.StatisticType convert(StatisticType type) { + switch (type) { + case COUNTER: + return org.terracotta.statistics.StatisticType.COUNTER; + case GAUGE: + return org.terracotta.statistics.StatisticType.GAUGE; + default: + throw new IllegalArgumentException("Untranslatable statistic type : " + type); + } + } +} diff --git a/ehcache-management/src/main/java/org/ehcache/management/statistics/DefaultExtendedStatisticsServiceFactory.java b/ehcache-management/src/main/java/org/ehcache/management/statistics/DefaultExtendedStatisticsServiceFactory.java new file mode 100644 index 0000000000..ae43c72ffb --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/statistics/DefaultExtendedStatisticsServiceFactory.java @@ -0,0 +1,39 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.management.statistics; + +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.management.ExtendedStatisticsService; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +public class DefaultExtendedStatisticsServiceFactory implements ServiceFactory { + + @Override + public int rank() { + return 10; + } + + @Override + public ExtendedStatisticsService create(ServiceCreationConfiguration configuration) { + return new DefaultExtendedStatisticsService(); + } + + @Override + public Class getServiceType() { + return DefaultExtendedStatisticsService.class; + } +} diff --git a/ehcache-management/src/main/java/org/ehcache/management/statistics/DefaultTierStatistics.java b/ehcache-management/src/main/java/org/ehcache/management/statistics/DefaultTierStatistics.java new file mode 100755 index 0000000000..742ee19cd6 --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/statistics/DefaultTierStatistics.java @@ -0,0 +1,247 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.management.statistics; + +import org.ehcache.Cache; +import org.ehcache.core.internal.statistics.StatsUtils; +import org.ehcache.core.statistics.StoreOperationOutcomes; +import org.ehcache.core.statistics.TierOperationOutcomes; +import org.ehcache.core.statistics.TierStatistics; +import org.ehcache.core.statistics.ValueStatistic; +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.ValueStatistics; +import org.terracotta.statistics.ZeroOperationStatistic; + +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.function.Supplier; + +import static org.ehcache.core.internal.statistics.StatsUtils.findStatisticOnDescendants; +import static org.ehcache.core.statistics.SuppliedValueStatistic.counter; +import static org.ehcache.core.statistics.SuppliedValueStatistic.gauge; + +/** + * Contains usage statistics relative to a given tier. + */ +public class DefaultTierStatistics implements TierStatistics { + + private volatile CompensatingCounters compensatingCounters = CompensatingCounters.empty(); + + private final Map> knownStatistics; + + private final OperationStatistic get; + private final OperationStatistic put; + private final OperationStatistic putIfAbsent; + private final OperationStatistic replace; + private final OperationStatistic conditionalReplace; + private final OperationStatistic remove; + private final OperationStatistic conditionalRemove; + private final OperationStatistic eviction; + private final OperationStatistic expiration; + private final OperationStatistic compute; + private final OperationStatistic computeIfAbsent; + + //Ehcache default to -1 if unavailable, but the management layer needs optional or null + // (since -1 can be a normal value for a stat). + private final Optional> mapping; + private final Optional> allocatedMemory; + private final Optional> occupiedMemory; + + public DefaultTierStatistics(Cache cache, String tierName) { + + get = findOperationStatistic(cache, tierName, "tier", "get"); + put = findOperationStatistic(cache, tierName, "put"); + putIfAbsent = findOperationStatistic(cache, tierName, "putIfAbsent"); + replace = findOperationStatistic(cache, tierName, "replace"); + conditionalReplace = findOperationStatistic(cache, tierName, "conditionalReplace"); + remove = findOperationStatistic(cache, tierName, "remove"); + conditionalRemove = findOperationStatistic(cache, tierName, "conditionalRemove"); + eviction = findOperationStatistic(cache, tierName, "tier", "eviction"); + expiration = findOperationStatistic(cache, tierName, "expiration"); + compute = findOperationStatistic(cache, tierName, "compute"); + computeIfAbsent = findOperationStatistic(cache, tierName, "computeIfAbsent"); + + mapping = findValueStatistics(cache, tierName, "mappings"); + allocatedMemory = findValueStatistics(cache, tierName, "allocatedMemory"); + occupiedMemory = findValueStatistics(cache, tierName, "occupiedMemory"); + + Map> knownStatistics = createKnownStatistics(tierName); + this.knownStatistics = Collections.unmodifiableMap(knownStatistics); + } + + private Map> createKnownStatistics(String tierName) { + Map> knownStatistics = new HashMap<>(7); + addIfPresent(knownStatistics, tierName + ":HitCount", get, this::getHits); + addIfPresent(knownStatistics, tierName + ":MissCount", get, this::getMisses); + addIfPresent(knownStatistics, tierName + ":PutCount", put, this::getPuts); + addIfPresent(knownStatistics, tierName + ":RemovalCount", remove, this::getRemovals); + + // These two a special because they are used by the cache so they should always be there + knownStatistics.put(tierName + ":EvictionCount", ValueStatistics.counter(this::getEvictions)); + knownStatistics.put(tierName + ":ExpirationCount", ValueStatistics.counter(this::getExpirations)); + + mapping.ifPresent(longValueStatistic -> knownStatistics.put(tierName + ":MappingCount", ValueStatistics.gauge(this::getMappings))); + allocatedMemory.ifPresent(longValueStatistic -> knownStatistics.put(tierName + ":AllocatedByteSize", ValueStatistics.gauge(this::getAllocatedByteSize))); + occupiedMemory.ifPresent(longValueStatistic -> knownStatistics.put(tierName + ":OccupiedByteSize", ValueStatistics.gauge(this::getOccupiedByteSize))); + return knownStatistics; + } + + /** + * Add the statistic as a known statistic only if the reference statistic is available. We consider that the reference statistic can only be + * an instance of {@code ZeroOperationStatistic} when statistics are disabled. + * + * @param knownStatistics map of known statistics + * @param name the name of the statistic to add + * @param reference the reference statistic that should be available for the statistic to be added + * @param valueSupplier the supplier that will provide the current value for the statistic + * @param type of the supplied value + */ + private static void addIfPresent(Map> knownStatistics, String name, OperationStatistic reference, Supplier valueSupplier) { + if(!(reference instanceof ZeroOperationStatistic)) { + knownStatistics.put(name, ValueStatistics.counter(valueSupplier)); + } + } + + public Map> getKnownStatistics() { + return knownStatistics; + } + + private > OperationStatistic findOperationStatistic(Cache cache, String tierName, String tag, String stat) { + return StatsUtils.>findStatisticOnDescendants(cache, tierName, tag, stat).orElse(ZeroOperationStatistic.get()); + } + + private > OperationStatistic findOperationStatistic(Cache cache, String tierName, String stat) { + return StatsUtils.>findStatisticOnDescendants(cache, tierName, stat).orElse(ZeroOperationStatistic.get()); + } + + private Optional> findValueStatistics(Cache cache, String tierName, String statName) { + return findStatisticOnDescendants(cache, tierName, statName); + } + + /** + * Reset the values for this tier. However, note that {@code mapping, allocatedMemory, occupiedMemory} + * but be reset since it doesn't make sense. + */ + @Override + public void clear() { + compensatingCounters = compensatingCounters.snapshot(this); + } + + @Override + public long getHits() { + return get.sum(EnumSet.of(TierOperationOutcomes.GetOutcome.HIT)) + + putIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.HIT)) + + replace.sum(EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.REPLACED)) + + compute.sum(EnumSet.of(StoreOperationOutcomes.ComputeOutcome.HIT)) + + computeIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.ComputeIfAbsentOutcome.HIT)) + + conditionalReplace.sum(EnumSet.of(StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED)) + + conditionalRemove.sum(EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED)) - + compensatingCounters.hits; + } + + @Override + public long getMisses() { + return get.sum(EnumSet.of(TierOperationOutcomes.GetOutcome.MISS)) + + putIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.PUT)) + + replace.sum(EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.MISS)) + + computeIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.ComputeIfAbsentOutcome.NOOP)) + + conditionalReplace.sum(EnumSet.of(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS)) + + conditionalRemove.sum(EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS)) - + compensatingCounters.misses; + } + + @Override + public long getPuts() { + return put.sum(EnumSet.of(StoreOperationOutcomes.PutOutcome.PUT)) + + putIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.PUT)) + + compute.sum(EnumSet.of(StoreOperationOutcomes.ComputeOutcome.PUT)) + + computeIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.ComputeIfAbsentOutcome.PUT)) + + replace.sum(EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.REPLACED)) + + conditionalReplace.sum(EnumSet.of(StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED)) - + compensatingCounters.puts; + } + + @Override + public long getRemovals() { + return remove.sum(EnumSet.of(StoreOperationOutcomes.RemoveOutcome.REMOVED)) + + compute.sum(EnumSet.of(StoreOperationOutcomes.ComputeOutcome.REMOVED)) + + conditionalRemove.sum(EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED)) - + compensatingCounters.removals; + } + + @Override + public long getEvictions() { + return eviction.sum(EnumSet.of(TierOperationOutcomes.EvictionOutcome.SUCCESS)) - + compensatingCounters.evictions; + } + + @Override + public long getExpirations() { + return expiration.sum() - compensatingCounters.expirations; + } + + @Override + public long getMappings() { + return mapping.map(ValueStatistic::value).orElse(-1L); + } + + @Override + public long getAllocatedByteSize() { + return allocatedMemory.map(ValueStatistic::value).orElse(-1L); + } + + @Override + public long getOccupiedByteSize() { + return occupiedMemory.map(ValueStatistic::value).orElse(-1L); + } + + private static class CompensatingCounters { + final long hits; + final long misses; + final long puts; + final long removals; + final long evictions; + final long expirations; + + private CompensatingCounters(long hits, long misses, long puts, long removals, long evictions, long expirations) { + this.hits = hits; + this.misses = misses; + this.puts = puts; + this.removals = removals; + this.evictions = evictions; + this.expirations = expirations; + } + + static CompensatingCounters empty() { + return new CompensatingCounters(0, 0, 0, 0, 0, 0); + } + + CompensatingCounters snapshot(DefaultTierStatistics statistics) { + return new CompensatingCounters( + statistics.getHits() + hits, + statistics.getMisses() + misses, + statistics.getPuts() + puts, + statistics.getRemovals() + removals, + statistics.getEvictions() + evictions, + statistics.getExpirations() + expirations + ); + } + } +} diff --git a/ehcache-management/src/main/java/org/ehcache/management/statistics/DelegatedMappedOperationStatistics.java b/ehcache-management/src/main/java/org/ehcache/management/statistics/DelegatedMappedOperationStatistics.java new file mode 100644 index 0000000000..cfbb3be33d --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/statistics/DelegatedMappedOperationStatistics.java @@ -0,0 +1,109 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.statistics; + +import org.ehcache.core.statistics.ChainedOperationObserver; +import org.ehcache.core.statistics.OperationStatistic; +import org.terracotta.statistics.MappedOperationStatistic; + +import java.util.Collection; +import java.util.Set; +import java.util.stream.Collectors; + +public class DelegatedMappedOperationStatistics, D extends Enum> implements OperationStatistic { + + private final MappedOperationStatistic delegate; + + public DelegatedMappedOperationStatistics(MappedOperationStatistic operationStatistic) { + this.delegate = operationStatistic; + } + + @Override + public Class type() { + return delegate.type(); + } + + @Override + public long count(D type) { + return delegate.count(type); + } + + @Override + public long sum(Set types) { + return delegate.sum(types); + } + + @Override + public long sum() { + return delegate.sum(); + } + + @Override + public void begin() { + delegate.begin(); + } + + @Override + public void end(D result) { + delegate.end(result); + } + + @Override + public void addDerivedStatistic(ChainedOperationObserver derived) { + delegate.addDerivedStatistic(convert(derived)); + } + + @Override + public void removeDerivedStatistic(ChainedOperationObserver derived) { + delegate.removeDerivedStatistic(convert(derived)); + } + + @Override + public Collection> getDerivedStatistics() { + Collection> derivedStatistics = delegate.getDerivedStatistics(); + return derivedStatistics.stream().map(this::revert).collect(Collectors.toSet()); + } + + private ChainedOperationObserver revert(org.terracotta.statistics.observer.ChainedOperationObserver observer) { + return new ChainedOperationObserver() { + @Override + public void begin(long time) { + observer.begin(time); + } + + @Override + public void end(long time, long latency, D result) { + observer.end(time, latency, result); + } + }; + } + + private org.terracotta.statistics.observer.ChainedOperationObserver convert(ChainedOperationObserver observer) { + return new org.terracotta.statistics.observer.ChainedOperationObserver() { + @Override + public void begin(long time) { + observer.begin(time); + } + + @Override + public void end(long time, long latency, D result) { + observer.end(time, latency, result); + } + }; + } + + +} diff --git a/ehcache-management/src/main/java/org/ehcache/management/statistics/DelegatingOperationObserver.java b/ehcache-management/src/main/java/org/ehcache/management/statistics/DelegatingOperationObserver.java new file mode 100644 index 0000000000..773b28d11a --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/statistics/DelegatingOperationObserver.java @@ -0,0 +1,37 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.statistics; + +import org.ehcache.core.statistics.OperationObserver; + +public class DelegatingOperationObserver> implements OperationObserver { + + private final org.terracotta.statistics.observer.OperationObserver observer; + + public DelegatingOperationObserver(org.terracotta.statistics.observer.OperationObserver operationObserver) { + this.observer = operationObserver; + } + + @Override + public void begin() { + this.observer.begin(); + } + + @Override + public void end(T result) { + this.observer.end(result); + } +} diff --git a/ehcache-management/src/main/java/org/ehcache/management/statistics/StatsUtils.java b/ehcache-management/src/main/java/org/ehcache/management/statistics/StatsUtils.java new file mode 100644 index 0000000000..39f313ce5f --- /dev/null +++ b/ehcache-management/src/main/java/org/ehcache/management/statistics/StatsUtils.java @@ -0,0 +1,270 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.management.statistics; + +import org.ehcache.Cache; +import org.ehcache.core.statistics.CacheOperationOutcomes; +import org.ehcache.core.statistics.StoreOperationOutcomes; +import org.terracotta.context.ContextManager; +import org.terracotta.context.TreeNode; +import org.terracotta.context.query.Matcher; +import org.terracotta.context.query.Matchers; +import org.terracotta.context.query.Query; +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.derived.OperationResultFilter; + +import java.util.Collections; +import java.util.EnumSet; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.function.Consumer; + +import static org.terracotta.context.query.Matchers.attributes; +import static org.terracotta.context.query.Matchers.context; +import static org.terracotta.context.query.Matchers.hasAttribute; +import static org.terracotta.context.query.Matchers.identifier; +import static org.terracotta.context.query.Matchers.subclassOf; +import static org.terracotta.context.query.QueryBuilder.queryBuilder; + +/** + * Class allowing to query cache and tier statistics + */ +public final class StatsUtils { + + private StatsUtils() {} + + public static Matcher> hasTag(final String tag) { + return hasAttribute("tags", new Matcher>() { + @Override + protected boolean matchesSafely(Set object) { + return object.contains(tag); + } + }); + } + + public static Matcher> hasProperty(final String key, final String value) { + return hasAttribute("properties", new Matcher>() { + @Override + protected boolean matchesSafely(Map properties) { + Object val = properties.get(key); + return val != null && value.equals(val); + } + }); + } + + /** + * Search for a statistic on the descendant of the context that matches the tag and statistic name. + * + * @param context the context of the query + * @param discriminator a filter on the discriminator property + * @param tag the tag we are looking for + * @param statName statistic name + * @param type of the statistic that will be returned + * @return the wanted statistic or null if no such statistic is found + * @throws RuntimeException when more than one matching statistic is found + */ + public static Optional findStatisticOnDescendants(Object context, String discriminator, String tag, String statName) { + + @SuppressWarnings("unchecked") + Set statResult = queryBuilder() + .descendants() + .filter(context(attributes(Matchers.allOf( + hasAttribute("name", statName), + hasProperty("discriminator", discriminator), + hasTag(tag))))) + .build().execute(Collections.singleton(ContextManager.nodeFor(context))); + + if (statResult.size() > 1) { + throw new RuntimeException("One stat expected for " + statName + " but found " + statResult.size()); + } + + if (statResult.size() == 1) { + @SuppressWarnings("unchecked") + T result = (T) statResult.iterator().next().getContext().attributes().get("this"); + return Optional.ofNullable(result); + } + + // No such stat in this context + return Optional.empty(); + } + + /** + * Search for a statistic on the descendant of the context that matches the tag and statistic name. + * + * @param context the context of the query + * @param tag the tag we are looking for + * @param statName statistic name + * @param type of the statistic that will be returned + * @return the wanted statistic or null if no such statistic is found + * @throws RuntimeException when more than one matching statistic is found + */ + public static Optional findStatisticOnDescendants(Object context, String tag, String statName) { + + @SuppressWarnings("unchecked") + Set statResult = queryBuilder() + .descendants() + .filter(context(attributes(Matchers.allOf( + hasAttribute("name", statName), + hasTag(tag))))) + .build().execute(Collections.singleton(ContextManager.nodeFor(context))); + + if (statResult.size() > 1) { + throw new RuntimeException("One stat expected for " + statName + " but found " + statResult.size()); + } + + if (statResult.size() == 1) { + @SuppressWarnings("unchecked") + T result = (T) statResult.iterator().next().getContext().attributes().get("this"); + return Optional.ofNullable(result); + } + + // No such stat in this context + return Optional.empty(); + } + + /** + * Find an operation statistic attached (as a children) to this context that matches the statistic name and type + * + * @param context the context of the query + * @param type type of the operation statistic + * @param statName statistic name + * @param type of the operation statistic content + * @return the operation statistic searched for + * @throws RuntimeException if 0 or more than 1 result is found + */ + public static > OperationStatistic findOperationStatisticOnChildren(Object context, Class type, String statName) { + @SuppressWarnings("unchecked") + Query query = queryBuilder() + .children() + .filter(context(attributes(Matchers.allOf(hasAttribute("name", statName), hasAttribute("type", type))))) + .build(); + + Set result = query.execute(Collections.singleton(ContextManager.nodeFor(context))); + if (result.size() > 1) { + throw new RuntimeException("result must be unique"); + } + if (result.isEmpty()) { + throw new RuntimeException("result must not be null"); + } + @SuppressWarnings("unchecked") + OperationStatistic statistic = (OperationStatistic) result.iterator().next().getContext().attributes().get("this"); + return statistic; + } + + /** + * Find the list of tiers of a cache. We assume a lot of things here. + *

                                        + *
                                      • The "eviction" statistic is available on the tier
                                      • + *
                                      • That the tiers have only one tag attribute
                                      • + *
                                      • That this tag contains the tier name
                                      • + *
                                      • That the only descendants having an "eviction" statistic are the tiers
                                      • + *
                                      + * + * @param cache the context for looking for tiers + * @return an array of tier names + * @throws RuntimeException if not tiers are found or if tiers have multiple tags + */ + public static String[] findTiers(Cache cache) { + // Here I'm randomly taking the eviction observer because it exists on all tiers + @SuppressWarnings("unchecked") + Query statQuery = queryBuilder() + .descendants() + .filter(context(attributes(Matchers.allOf(hasAttribute("name", "eviction"), hasAttribute("type", StoreOperationOutcomes.EvictionOutcome.class))))) + .build(); + + Set statResult = statQuery.execute(Collections.singleton(ContextManager.nodeFor(cache))); + + if (statResult.isEmpty()) { + throw new RuntimeException("Failed to find tiers using the eviction observer, valid result Set sizes must 1 or more"); + } + + String[] tiers = new String[statResult.size()]; + + int i = 0; + for (TreeNode treeNode : statResult) { + Set tags = (Set) treeNode.getContext().attributes().get("tags"); + if (tags.size() != 1) { + throw new RuntimeException("We expect tiers to have only one tag"); + } + + String storeType = tags.iterator().next().toString(); + tiers[i++] = storeType; + } + return tiers; + } + + /** + * Find the lowest tier from a list of tier. We assume a lot of things here that the tiers depth + * magically matches the alphabetical order. + * + * @param tiers all tiers + * @return the lowest tier + */ + public static String findLowestTier(String[] tiers) { + //if only 1 store then you don't need to find the lowest tier + if (tiers.length == 1) { + return tiers[0]; + } + + //we expect at least one tier + if (tiers.length == 0) { + throw new RuntimeException("No existing tier"); + } + + // We rely here on the alphabetical order matching the depth order so from highest to lowest we have + // OnHeap, OffHeap, Disk, Clustered + String lowestTier = tiers[0]; + for (int i = 1; i < tiers.length; i++) { + if (tiers[i].compareTo(lowestTier) < 0) { + lowestTier = tiers[i]; + } + } + + return lowestTier; + } + + public static > boolean hasOperationStat(Object rootNode, Class statisticType, String statName) { + Query q = queryBuilder().descendants() + .filter(context(identifier(subclassOf(OperationStatistic.class)))) + .filter(context(attributes(Matchers.allOf( + hasAttribute("name", statName), + hasAttribute("this", new Matcher>() { + @Override + protected boolean matchesSafely(OperationStatistic object) { + return object.type().equals(statisticType); + } + }) + )))) + .build(); + + Set result = q.execute(Collections.singleton(ContextManager.nodeFor(rootNode))); + + if (result.size() > 1) { + throw new RuntimeException("a zero or a single stat was expected; found " + result.size()); + } + + return !result.isEmpty(); + } + + public static void registerClearNotification(String alias, Cache cache, Consumer cacheClear) { + OperationStatistic clear = StatsUtils.findOperationStatisticOnChildren(cache, + CacheOperationOutcomes.ClearOutcome.class, "clear"); + clear.addDerivedStatistic(new OperationResultFilter<>(EnumSet.of(CacheOperationOutcomes.ClearOutcome.SUCCESS), + (time, latency) -> cacheClear.accept(alias))); + } +} diff --git a/ehcache-management/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory b/ehcache-management/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory new file mode 100644 index 0000000000..fb6fb41c9f --- /dev/null +++ b/ehcache-management/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory @@ -0,0 +1,2 @@ +org.ehcache.management.registry.DefaultManagementRegistryFactory +org.ehcache.management.statistics.DefaultExtendedStatisticsServiceFactory diff --git a/management/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser b/ehcache-management/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser similarity index 100% rename from management/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser rename to ehcache-management/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser diff --git a/management/src/main/resources/ehcache-management-ext.xsd b/ehcache-management/src/main/resources/ehcache-management-ext.xsd similarity index 93% rename from management/src/main/resources/ehcache-management-ext.xsd rename to ehcache-management/src/main/resources/ehcache-management-ext.xsd index 13d8130b11..927129c9c9 100644 --- a/management/src/main/resources/ehcache-management-ext.xsd +++ b/ehcache-management/src/main/resources/ehcache-management-ext.xsd @@ -34,7 +34,7 @@ - + diff --git a/management/src/test/java/org/ehcache/docs/ManagementTest.java b/ehcache-management/src/test/java/org/ehcache/docs/ManagementTest.java similarity index 87% rename from management/src/test/java/org/ehcache/docs/ManagementTest.java rename to ehcache-management/src/test/java/org/ehcache/docs/ManagementTest.java index 60840f9bac..53bebb923f 100644 --- a/management/src/test/java/org/ehcache/docs/ManagementTest.java +++ b/ehcache-management/src/test/java/org/ehcache/docs/ManagementTest.java @@ -29,7 +29,6 @@ import org.ehcache.management.registry.DefaultManagementRegistryService; import org.ehcache.management.registry.DefaultSharedManagementService; import org.hamcrest.Matchers; -import org.junit.Assert; import org.junit.Test; import org.terracotta.management.model.call.Parameter; import org.terracotta.management.model.capabilities.Capability; @@ -45,6 +44,8 @@ import java.util.Collection; import java.util.Iterator; +import static org.hamcrest.MatcherAssert.assertThat; + public class ManagementTest { @Test @@ -94,7 +95,7 @@ public void usingManagementRegistry() throws Exception { ContextualStatistics statisticsContext = counters.getResult(context); - Assert.assertThat(counters.size(), Matchers.is(1)); + assertThat(counters.size(), Matchers.is(1)); } finally { if(cacheManager != null) cacheManager.close(); @@ -118,30 +119,33 @@ public void capabilitiesAndContexts() throws Exception { Collection capabilities = managementRegistry.getCapabilities(); // <1> - Assert.assertThat(capabilities.isEmpty(), Matchers.is(false)); + assertThat(capabilities.isEmpty(), Matchers.is(false)); Capability capability = capabilities.iterator().next(); String capabilityName = capability.getName(); // <2> Collection capabilityDescriptions = capability.getDescriptors(); // <3> - Assert.assertThat(capabilityDescriptions.isEmpty(), Matchers.is(false)); + assertThat(capabilityDescriptions.isEmpty(), Matchers.is(false)); CapabilityContext capabilityContext = capability.getCapabilityContext(); Collection attributes = capabilityContext.getAttributes(); // <4> - Assert.assertThat(attributes.size(), Matchers.is(2)); + assertThat(attributes.size(), Matchers.is(3)); Iterator iterator = attributes.iterator(); CapabilityContext.Attribute attribute1 = iterator.next(); - Assert.assertThat(attribute1.getName(), Matchers.equalTo("cacheManagerName")); // <5> - Assert.assertThat(attribute1.isRequired(), Matchers.is(true)); + assertThat(attribute1.getName(), Matchers.equalTo("instanceId")); // <5> + assertThat(attribute1.isRequired(), Matchers.is(true)); CapabilityContext.Attribute attribute2 = iterator.next(); - Assert.assertThat(attribute2.getName(), Matchers.equalTo("cacheName")); // <6> - Assert.assertThat(attribute2.isRequired(), Matchers.is(true)); + assertThat(attribute2.getName(), Matchers.equalTo("cacheManagerName")); // <5> + assertThat(attribute2.isRequired(), Matchers.is(true)); + CapabilityContext.Attribute attribute3 = iterator.next(); + assertThat(attribute3.getName(), Matchers.equalTo("cacheName")); // <6> + assertThat(attribute3.isRequired(), Matchers.is(true)); ContextContainer contextContainer = managementRegistry.getContextContainer(); // <7> - Assert.assertThat(contextContainer.getName(), Matchers.equalTo("cacheManagerName")); // <8> - Assert.assertThat(contextContainer.getValue(), Matchers.startsWith("cache-manager-")); + assertThat(contextContainer.getName(), Matchers.equalTo("cacheManagerName")); // <8> + assertThat(contextContainer.getValue(), Matchers.startsWith("cache-manager-")); Collection subContexts = contextContainer.getSubContexts(); - Assert.assertThat(subContexts.size(), Matchers.is(1)); + assertThat(subContexts.size(), Matchers.is(1)); ContextContainer subContextContainer = subContexts.iterator().next(); - Assert.assertThat(subContextContainer.getName(), Matchers.equalTo("cacheName")); // <9> - Assert.assertThat(subContextContainer.getValue(), Matchers.equalTo("aCache")); + assertThat(subContextContainer.getName(), Matchers.equalTo("cacheName")); // <9> + assertThat(subContextContainer.getValue(), Matchers.equalTo("aCache")); } finally { if(cacheManager != null) cacheManager.close(); @@ -175,7 +179,7 @@ public void actionCall() throws Exception { .build() .execute(); - Assert.assertThat(aCache.get(0L), Matchers.is(Matchers.nullValue())); // <4> + assertThat(aCache.get(0L), Matchers.is(Matchers.nullValue())); // <4> } finally { if(cacheManager != null) cacheManager.close(); @@ -231,7 +235,7 @@ public void managingMultipleCacheManagers() throws Exception { ContextualStatistics statisticsContext1 = counters.getResult(context1); - Number counterContext1 = statisticsContext1.getStatistic("Cache:MissCount"); + Long counterContext1 = statisticsContext1.getLatestSampleValue("Cache:MissCount").get(); // miss count is a sampled stat, for example its values could be [0,1,2]. // In the present case, only the last value is important to us , the cache was eventually missed 2 times diff --git a/ehcache-management/src/test/java/org/ehcache/management/ManagementRegistryServiceConfigurationParserIT.java b/ehcache-management/src/test/java/org/ehcache/management/ManagementRegistryServiceConfigurationParserIT.java new file mode 100644 index 0000000000..4c33101298 --- /dev/null +++ b/ehcache-management/src/test/java/org/ehcache/management/ManagementRegistryServiceConfigurationParserIT.java @@ -0,0 +1,42 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management; + +import org.ehcache.config.Configuration; +import org.ehcache.xml.XmlConfiguration; +import org.junit.Test; + +import javax.xml.namespace.QName; +import java.net.URL; + +import static org.ehcache.xml.XmlConfigurationMatchers.isSameConfigurationAs; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.xmlunit.diff.ElementSelectors.byNameAndText; +import static org.xmlunit.diff.ElementSelectors.selectorForElementNamed; + +/** + * ManagementRegistryServiceConfigurationParserIT + */ +public class ManagementRegistryServiceConfigurationParserIT { + + @Test + public void testManagementRegistryXmlTranslationToString() { + URL resource = ManagementRegistryServiceConfigurationParserIT.class.getResource("/ehcache-management.xml"); + Configuration config = new XmlConfiguration(resource); + XmlConfiguration xmlConfig = new XmlConfiguration(config); + assertThat(xmlConfig.toString(), isSameConfigurationAs(resource, selectorForElementNamed(new QName("http://www.ehcache.org/v3/management", "tag"), byNameAndText))); + } +} diff --git a/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java b/ehcache-management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java similarity index 84% rename from management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java rename to ehcache-management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java index 5ab4f508f2..cdf495f79a 100644 --- a/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java +++ b/ehcache-management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java @@ -16,12 +16,12 @@ package org.ehcache.management.providers.actions; import org.ehcache.config.CacheRuntimeConfiguration; -import org.ehcache.core.EhcacheWithLoaderWriter; +import org.ehcache.core.Ehcache; import org.ehcache.management.ManagementRegistryServiceConfiguration; import org.ehcache.management.providers.CacheBinding; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; -import org.hamcrest.Matcher; import org.junit.Test; +import org.mockito.Mockito; import org.terracotta.management.model.call.Parameter; import org.terracotta.management.model.capabilities.context.CapabilityContext; import org.terracotta.management.model.capabilities.descriptors.CallDescriptor; @@ -39,33 +39,33 @@ import static org.hamcrest.Matchers.is; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class EhcacheActionProviderTest { - Context cmContext = Context.create("cacheManagerName", "myCacheManagerName"); - Context cmContext_0 = Context.create("cacheManagerName", "cache-manager-0"); - ManagementRegistryServiceConfiguration cmConfig = new DefaultManagementRegistryConfiguration().setContext(cmContext); - ManagementRegistryServiceConfiguration cmConfig_0 = new DefaultManagementRegistryConfiguration().setContext(cmContext_0); + ManagementRegistryServiceConfiguration cmConfig = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManagerName"); + ManagementRegistryServiceConfiguration cmConfig_0 = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("cache-manager-0"); + + Context cmContext = cmConfig.getContext(); + Context cmContext_0 = cmConfig_0.getContext(); @Test @SuppressWarnings("unchecked") public void testDescriptions() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig); - ehcacheActionProvider.register(new CacheBinding("myCacheName1", mock(EhcacheWithLoaderWriter.class))); - ehcacheActionProvider.register(new CacheBinding("myCacheName2", mock(EhcacheWithLoaderWriter.class))); + ehcacheActionProvider.register(new CacheBinding("myCacheName1", mock(Ehcache.class))); + ehcacheActionProvider.register(new CacheBinding("myCacheName2", mock(Ehcache.class))); Collection descriptions = ehcacheActionProvider.getDescriptors(); assertThat(descriptions.size(), is(4)); - assertThat(descriptions, (Matcher) containsInAnyOrder( + assertThat(descriptions, containsInAnyOrder( new CallDescriptor("remove", "void", Collections.singletonList(new CallDescriptor.Parameter("key", "java.lang.Object"))), new CallDescriptor("get", "java.lang.Object", Collections.singletonList(new CallDescriptor.Parameter("key", "java.lang.Object"))), new CallDescriptor("put", "void", Arrays.asList(new CallDescriptor.Parameter("key", "java.lang.Object"), new CallDescriptor.Parameter("value", "java.lang.Object"))), - new CallDescriptor("clear", "void", Collections.emptyList()) + new CallDescriptor("clear", "void", Collections.emptyList()) )); } @@ -73,15 +73,18 @@ public void testDescriptions() throws Exception { public void testCapabilityContext() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig); - ehcacheActionProvider.register(new CacheBinding("myCacheName1", mock(EhcacheWithLoaderWriter.class))); - ehcacheActionProvider.register(new CacheBinding("myCacheName2", mock(EhcacheWithLoaderWriter.class))); + ehcacheActionProvider.register(new CacheBinding("myCacheName1", mock(Ehcache.class))); + ehcacheActionProvider.register(new CacheBinding("myCacheName2", mock(Ehcache.class))); CapabilityContext capabilityContext = ehcacheActionProvider.getCapabilityContext(); - assertThat(capabilityContext.getAttributes().size(), is(2)); + assertThat(capabilityContext.getAttributes().size(), is(3)); Iterator iterator = capabilityContext.getAttributes().iterator(); CapabilityContext.Attribute next = iterator.next(); + assertThat(next.getName(), equalTo("instanceId")); + assertThat(next.isRequired(), is(true)); + next = iterator.next(); assertThat(next.getName(), equalTo("cacheManagerName")); assertThat(next.isRequired(), is(true)); next = iterator.next(); @@ -94,7 +97,7 @@ public void testCollectStatistics() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig); try { - ehcacheActionProvider.collectStatistics(null, null); + ehcacheActionProvider.collectStatistics(null, null, 0); fail("expected UnsupportedOperationException"); } catch (UnsupportedOperationException uoe) { // expected @@ -105,13 +108,12 @@ public void testCollectStatistics() throws Exception { public void testCallAction_happyPathNoParam() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig_0); - EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); - CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); + Ehcache ehcache = mock(Ehcache.class); + CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); when(cacheRuntimeConfiguration.getClassLoader()).thenReturn(ClassLoader.getSystemClassLoader()); when(ehcache.getRuntimeConfiguration()).thenReturn(cacheRuntimeConfiguration); ehcacheActionProvider.register(new CacheBinding("cache-0", ehcache)); - Context context = cmContext_0.with("cacheName", "cache-0"); ehcacheActionProvider.callAction(context, "clear", Void.class); @@ -124,7 +126,7 @@ public void testCallAction_happyPathWithParams() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig_0); @SuppressWarnings("unchecked") - EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); + Ehcache ehcache = mock(Ehcache.class); @SuppressWarnings("unchecked") CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); when(cacheRuntimeConfiguration.getClassLoader()).thenReturn(ClassLoader.getSystemClassLoader()); @@ -144,7 +146,7 @@ public void testCallAction_happyPathWithParams() throws Exception { public void testCallAction_noSuchCache() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig_0); - EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); + Ehcache ehcache = mock(Ehcache.class); ehcacheActionProvider.register(new CacheBinding("cache-0", ehcache)); Context context = cmContext_0.with("cacheName", "cache-1"); @@ -163,7 +165,7 @@ public void testCallAction_noSuchCache() throws Exception { public void testCallAction_noSuchCacheManager() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig_0); - EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); + Ehcache ehcache = mock(Ehcache.class); ehcacheActionProvider.register(new CacheBinding("cache-0", ehcache)); Context context = Context.empty() @@ -184,8 +186,8 @@ public void testCallAction_noSuchCacheManager() throws Exception { public void testCallAction_noSuchMethodName() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig_0); - EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); - CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); + Ehcache ehcache = mock(Ehcache.class); + CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); when(cacheRuntimeConfiguration.getClassLoader()).thenReturn(ClassLoader.getSystemClassLoader()); when(ehcache.getRuntimeConfiguration()).thenReturn(cacheRuntimeConfiguration); ehcacheActionProvider.register(new CacheBinding("cache-0", ehcache)); @@ -205,7 +207,7 @@ public void testCallAction_noSuchMethod() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig_0); @SuppressWarnings("unchecked") - EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); + Ehcache ehcache = mock(Ehcache.class); @SuppressWarnings("unchecked") CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); when(cacheRuntimeConfiguration.getClassLoader()).thenReturn(ClassLoader.getSystemClassLoader()); @@ -226,4 +228,8 @@ public void testCallAction_noSuchMethod() throws Exception { verify(ehcache, times(0)).get(null); } + @SuppressWarnings("unchecked") + private static T mock(Class clazz) { + return Mockito.mock((Class) clazz); + } } diff --git a/management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java b/ehcache-management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java similarity index 81% rename from management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java rename to ehcache-management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java index af358598f5..0761bf46ea 100644 --- a/management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java +++ b/ehcache-management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java @@ -21,10 +21,9 @@ import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.management.SharedManagementService; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; @@ -41,9 +40,9 @@ import java.io.FileNotFoundException; import java.io.IOException; +import java.time.Duration; import java.util.Collection; import java.util.Scanner; -import java.util.concurrent.TimeUnit; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.junit.Assert.assertEquals; @@ -77,7 +76,7 @@ public void test_standalone_ehcache() throws IOException { .heap(10, EntryUnit.ENTRIES) .offheap(1, MemoryUnit.MB) .disk(2, MemoryUnit.MB, true)) - .withExpiry(Expirations.noExpiration()) + .withExpiry(ExpiryPolicyBuilder.noExpiration()) .build(); CacheConfiguration cacheConfiguration2 = CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, @@ -85,25 +84,31 @@ public void test_standalone_ehcache() throws IOException { .heap(10, EntryUnit.ENTRIES) .offheap(1, MemoryUnit.MB) .disk(2, MemoryUnit.MB, true)) - .withExpiry(Expirations.timeToIdleExpiration(Duration.of(2, TimeUnit.HOURS))) + .withExpiry(ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofHours(2))) .build(); // ehcache cache manager + DefaultManagementRegistryConfiguration serviceConfiguration = new DefaultManagementRegistryConfiguration() + .setCacheManagerAlias("my-cm-1") + .addTag("boo") + .addTags("foo", "baz"); + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .using(sharedManagementService) .using(new DefaultPersistenceConfiguration(ROOT.newFolder("test_standalone_ehcache"))) - .using(new DefaultManagementRegistryConfiguration() - .setCacheManagerAlias("my-cm-1") - .addTag("boo") - .addTags("foo", "baz")) + .using(serviceConfiguration) .withCache("cache-1", cacheConfiguration1) .withCache("cache-2", cacheConfiguration2) .build(false); cacheManager.init(); - String expected = read("/settings-capability.json"); - String actual = mapper.writeValueAsString(getSettingsCapability()).replaceAll("\\\"cacheManagerDescription\\\":\\\".*\\\",\\\"status\\\"", "\\\"cacheManagerDescription\\\":\\\"\\\",\\\"status\\\""); + String expected = read("/settings-capability.json") + .replaceAll("instance-id", serviceConfiguration.getInstanceId()); + String actual = mapper.writeValueAsString(getSettingsCapability()) + .replaceAll("\\\"cacheManagerDescription\\\":\\\".*\\\",\\\"instanceId\\\"", "\\\"cacheManagerDescription\\\":\\\"\\\",\\\"instanceId\\\"") + .replaceAll("\\\"instanceId\\\":\\\".*\\\",\\\"managementContext\\\"", "\\\"instanceId\\\":\\\"UUID\\\",\\\"managementContext\\\"") + .replaceAll("\\\"instanceId\\\":\\\".*\\\",\\\"cacheManagerName\\\"", "\\\"instanceId\\\":\\\"UUID\\\",\\\"cacheManagerName\\\""); // assertThat for formatted string comparison: ide support is bad assertEquals(expected, actual); diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java b/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java similarity index 84% rename from management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java rename to ehcache-management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java index 239c97cc95..67fbc958b0 100644 --- a/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java +++ b/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java @@ -15,12 +15,14 @@ */ package org.ehcache.management.providers.statistics; -import org.ehcache.core.EhcacheWithLoaderWriter; -import org.ehcache.core.spi.service.StatisticsService; -import org.ehcache.impl.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.Ehcache; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.core.spi.time.TimeSource; +import org.ehcache.management.ExtendedStatisticsService; import org.ehcache.management.ManagementRegistryServiceConfiguration; import org.ehcache.management.providers.CacheBinding; import org.ehcache.management.providers.ExposedCacheBinding; +import org.ehcache.management.statistics.DefaultExtendedStatisticsService; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.hamcrest.Matcher; import org.junit.After; @@ -47,10 +49,11 @@ public class EhcacheStatisticsProviderTest { ScheduledExecutorService executor = Executors.newScheduledThreadPool(1); - StatisticsService statisticsService = new DefaultStatisticsService(); + ExtendedStatisticsService statisticsService = new DefaultExtendedStatisticsService(); Context cmContext_0 = Context.create("cacheManagerName", "cache-manager-0"); ManagementRegistryServiceConfiguration cmConfig_0 = new DefaultManagementRegistryConfiguration() .setContext(cmContext_0); + TimeSource timeSource = SystemTimeSource.INSTANCE; @After public void tearDown() throws Exception { @@ -60,7 +63,7 @@ public void tearDown() throws Exception { @Test @SuppressWarnings("unchecked") public void testDescriptions() throws Exception { - EhcacheStatisticsProvider ehcacheStatisticsProvider = new EhcacheStatisticsProvider(cmConfig_0, statisticsService) { + EhcacheStatisticsProvider ehcacheStatisticsProvider = new EhcacheStatisticsProvider(cmConfig_0, statisticsService, timeSource) { @Override protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { StandardEhcacheStatistics mock = mock(StandardEhcacheStatistics.class); @@ -73,7 +76,7 @@ protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { } }; - ehcacheStatisticsProvider.register(new CacheBinding("cache-0", mock(EhcacheWithLoaderWriter.class))); + ehcacheStatisticsProvider.register(new CacheBinding("cache-0", mock(Ehcache.class))); Collection descriptions = ehcacheStatisticsProvider.getDescriptors(); assertThat(descriptions.size(), is(3)); @@ -86,7 +89,7 @@ protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { @Test public void testCapabilityContext() throws Exception { - EhcacheStatisticsProvider ehcacheStatisticsProvider = new EhcacheStatisticsProvider(cmConfig_0, statisticsService) { + EhcacheStatisticsProvider ehcacheStatisticsProvider = new EhcacheStatisticsProvider(cmConfig_0, statisticsService, timeSource) { @Override protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { return mock(StandardEhcacheStatistics.class); @@ -94,14 +97,17 @@ protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { }; - ehcacheStatisticsProvider.register(new CacheBinding("cache-0", mock(EhcacheWithLoaderWriter.class))); + ehcacheStatisticsProvider.register(new CacheBinding("cache-0", mock(Ehcache.class))); CapabilityContext capabilityContext = ehcacheStatisticsProvider.getCapabilityContext(); - assertThat(capabilityContext.getAttributes().size(), is(2)); + assertThat(capabilityContext.getAttributes().size(), is(3)); Iterator iterator = capabilityContext.getAttributes().iterator(); CapabilityContext.Attribute next = iterator.next(); + assertThat(next.getName(), equalTo("instanceId")); + assertThat(next.isRequired(), is(true)); + next = iterator.next(); assertThat(next.getName(), equalTo("cacheManagerName")); assertThat(next.isRequired(), is(true)); next = iterator.next(); @@ -111,7 +117,7 @@ protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { @Test public void testCallAction() throws Exception { - EhcacheStatisticsProvider ehcacheStatisticsProvider = new EhcacheStatisticsProvider(cmConfig_0, statisticsService); + EhcacheStatisticsProvider ehcacheStatisticsProvider = new EhcacheStatisticsProvider(cmConfig_0, statisticsService, timeSource); try { ehcacheStatisticsProvider.callAction(null, null); diff --git a/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/LatencyHistogramConfigurationTest.java b/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/LatencyHistogramConfigurationTest.java new file mode 100644 index 0000000000..75c97ea12e --- /dev/null +++ b/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/LatencyHistogramConfigurationTest.java @@ -0,0 +1,32 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import org.ehcache.management.registry.LatencyHistogramConfiguration; +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; + +public class LatencyHistogramConfigurationTest { + + @Test + public void test() { + LatencyHistogramConfiguration conf = LatencyHistogramConfiguration.DEFAULT; + assertThat(conf.getPhi()).isEqualTo(LatencyHistogramConfiguration.DEFAULT_PHI); + assertThat(conf.getBucketCount()).isEqualTo(LatencyHistogramConfiguration.DEFAULT_BUCKET_COUNT); + assertThat(conf.getWindow()).isEqualTo(LatencyHistogramConfiguration.DEFAULT_WINDOW); + } +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhCacheStatisticsQueryTest.java b/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/StandardEhCacheStatisticsQueryTest.java similarity index 78% rename from management/src/test/java/org/ehcache/management/providers/statistics/StandardEhCacheStatisticsQueryTest.java rename to ehcache-management/src/test/java/org/ehcache/management/providers/statistics/StandardEhCacheStatisticsQueryTest.java index 3b626f822e..ff3e70125b 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhCacheStatisticsQueryTest.java +++ b/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/StandardEhCacheStatisticsQueryTest.java @@ -24,16 +24,15 @@ import org.ehcache.CacheManager; import org.ehcache.config.Builder; import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.core.config.store.StoreStatisticsConfiguration; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.management.ManagementRegistryService; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; import org.hamcrest.Matchers; -import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -51,7 +50,7 @@ import static org.ehcache.config.units.EntryUnit.ENTRIES; import static org.ehcache.config.units.MemoryUnit.MB; import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; @RunWith(Parameterized.class) public class StandardEhCacheStatisticsQueryTest { @@ -95,24 +94,21 @@ public StandardEhCacheStatisticsQueryTest(Builder resou } @Test - public void test() throws InterruptedException, IOException { + public void test() throws IOException { - CacheManager cacheManager = null; + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); - try { - - DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources) - .withEvictionAdvisor((key, value) -> key.equals(2L)) - .build(); + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources) + .withEvictionAdvisor((key, value) -> key.equals(2L)) + .withService(new StoreStatisticsConfiguration(true)) // explicitly enable statistics to make sure they are there even when using only one tier + .build(); - cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("myCache", cacheConfiguration) - .using(managementRegistry) - .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) - .build(true); + try (CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true)) { Context context = StatsUtil.createContext(managementRegistry); @@ -135,14 +131,9 @@ public void test() throws InterruptedException, IOException { } long cacheHitCount = getAndAssertExpectedValueFromCounter("Cache:HitCount", context, managementRegistry, cacheExpectedValue); - Assert.assertThat(tierHitCountSum, is(cacheHitCount)); + assertThat(tierHitCountSum, is(cacheHitCount)); } - finally { - if(cacheManager != null) { - cacheManager.close(); - } - } } /* @@ -150,7 +141,7 @@ public void test() throws InterruptedException, IOException { This should only occur if the stats value is different from your expectedResult, which may happen if the stats calculations change, the stats value isn't accessible or if you enter the wrong expectedResult. */ - public static long getAndAssertExpectedValueFromCounter(String statName, Context context, ManagementRegistryService managementRegistry, long expectedResult) { + public long getAndAssertExpectedValueFromCounter(String statName, Context context, ManagementRegistryService managementRegistry, long expectedResult) { StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") .queryStatistics(singletonList(statName)) @@ -161,11 +152,11 @@ public static long getAndAssertExpectedValueFromCounter(String statName, Context ContextualStatistics statisticsContext = counters.getResult(context); - assertThat(counters.size(), Matchers.is(1)); + assertThat(statName + " for " + resources.getResourceTypeSet(), counters.size(), Matchers.is(1)); - Long counter = (Long) statisticsContext.getStatistic(statName); + Long counter = statisticsContext.getLatestSampleValue(statName).get(); - assertThat(counter, Matchers.is(expectedResult)); + assertThat(statName + " for " + resources.getResourceTypeSet(), counter, Matchers.is(expectedResult)); return counter; } diff --git a/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java b/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java new file mode 100755 index 0000000000..ce8d1749ae --- /dev/null +++ b/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java @@ -0,0 +1,240 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.statistics.CacheOperationOutcomes; +import org.ehcache.management.registry.LatencyHistogramConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.derived.OperationResultFilter; +import org.terracotta.statistics.derived.latency.LatencyHistogramStatistic; +import org.terracotta.statistics.observer.ChainedOperationObserver; +import org.terracotta.utilities.test.rules.TestRetryer; + +import java.time.Duration; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +import static java.time.Duration.ofMillis; +import static java.util.Arrays.asList; +import static org.assertj.core.api.Assertions.assertThat; +import static org.ehcache.core.internal.statistics.StatsUtils.findOperationStatisticOnChildren; +import static org.terracotta.utilities.test.rules.TestRetryer.tryValues; + +public class StandardEhcacheStatisticsTest { + + @ClassRule @Rule + public static final TestRetryer TIME_BASE = tryValues(1, 2, 4, 8, 16, 32).map(i -> ofMillis(50).multipliedBy(i)); + + private CacheManager cacheManager; + private Cache cache; + private ManagementRegistryService managementRegistry; + private Context context; + + private Duration latency = Duration.ZERO; + private final Map systemOfRecords = new HashMap<>(); + + @Before + public void before() { + + // We need a loaderWriter to easily test latencies, to simulate a latency when loading from a SOR. + CacheLoaderWriter loaderWriter = new CacheLoaderWriter() { + @Override + public String load(Long key) throws Exception { + minimumSleep(latency); // latency simulation + return systemOfRecords.get(key); + } + + @Override + public void write(Long key, String value) { + minimumSleep(latency); // latency simulation + systemOfRecords.put(key, value); + } + + @Override + public void delete(Long key) { + minimumSleep(latency); // latency simulation + systemOfRecords.remove(key); + } + }; + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(1, EntryUnit.ENTRIES) + .offheap(10, MemoryUnit.MB)) + .withLoaderWriter(loaderWriter) + .build(); + + LatencyHistogramConfiguration latencyHistogramConfiguration = new LatencyHistogramConfiguration( + LatencyHistogramConfiguration.DEFAULT_PHI, + LatencyHistogramConfiguration.DEFAULT_BUCKET_COUNT, + TIME_BASE.get().multipliedBy(8L) + ); + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration() + .setCacheManagerAlias("myCacheManager3") + .setLatencyHistogramConfiguration(latencyHistogramConfiguration); + managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("cCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + cache = cacheManager.getCache("cCache", Long.class, String.class); + + context = StatsUtil.createContext(managementRegistry); + } + + @After + public void after() { + if(cacheManager != null) { + cacheManager.close(); + } + } + + @Test + public void statTest() throws InterruptedException { + cache.get(1L); // miss + cache.put(1L, "one"); // put + cache.get(1L); // hit + cache.remove(1L); // removal + + assertThat(getStatistic("Cache:MissCount")).isEqualTo(1L); + assertThat(getStatistic("Cache:HitCount")).isEqualTo(1L); + assertThat(getStatistic("Cache:PutCount")).isEqualTo(1L); + assertThat(getStatistic("Cache:RemovalCount")).isEqualTo(1L); + + for (String statistic : asList("GetMiss", "GetHit", "Put", "Remove")) { + long last = 0L; + for (String percentile : asList("50", "95", "99", "100")) { + long value = getStatistic("Cache:" + statistic + "Latency#" + percentile); + assertThat(value).isGreaterThanOrEqualTo(last); + last = value; + } + } + } + + private long getStatistic(String statName) { + ContextualStatistics latency = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Collections.singletonList(statName)) + .on(context) + .build() + .execute() + .getSingleResult(); + + assertThat(latency.size()).isEqualTo(1); + return latency.getLatestSampleValue(statName).get(); + } + + @Test + public void getCacheGetHitMissLatencies() { + + Consumer verifier = histogram -> { + assertThat(histogram.count()).isEqualTo(0L); + + latency = TIME_BASE.get().multipliedBy(2L); + cache.get(1L); + + latency = TIME_BASE.get().multipliedBy(1L); + cache.get(2L); + + assertThat(histogram.count()).isEqualTo(2L); + assertThat(histogram.maximum()).isGreaterThanOrEqualTo(TIME_BASE.get().multipliedBy(2L).toNanos()); + + minimumSleep(TIME_BASE.get().multipliedBy(10)); + + latency = TIME_BASE.get().multipliedBy(1L); + cache.get(3L); + + latency = TIME_BASE.get().multipliedBy(3L); + cache.get(4L); + + assertThat(histogram.count()).isEqualTo(2L); + assertThat(histogram.maximum()).isGreaterThanOrEqualTo(TIME_BASE.get().multipliedBy(3L).toNanos()); + }; + + verifier.accept(getHistogram(CacheOperationOutcomes.GetOutcome.MISS, "get")); + + systemOfRecords.put(1L, "a"); + systemOfRecords.put(2L, "b"); + systemOfRecords.put(3L, "c"); + systemOfRecords.put(4L, "d"); + systemOfRecords.put(5L, "e"); + + verifier.accept(getHistogram(CacheOperationOutcomes.GetOutcome.HIT, "get")); + } + + @SuppressWarnings("unchecked") + private > LatencyHistogramStatistic getHistogram(T type, String statName) { + OperationStatistic stat = findOperationStatisticOnChildren(cache, (Class) type.getClass(), statName); + Collection> derivedStatistics = stat.getDerivedStatistics(); + + LatencyHistogramStatistic histogram = (LatencyHistogramStatistic) derivedStatistics + .stream() + .map(s -> (OperationResultFilter) s) + .filter(s -> s.getTargets().contains(type)) + .map(s -> s.getDerivedStatistics().iterator().next()) + .findAny() + .get(); + + return histogram; + } + + // Java does not provide a guarantee that Thread.sleep will actually sleep long enough. + // In fact, on Windows, it does not sleep for long enough. + // This method keeps sleeping until the full time has passed. + // + // Using System.nanoTime (accurate to 1 micro-second or better) in lieu of System.currentTimeMillis (on Windows + // accurate to ~16ms), the inaccuracy of which compounds when invoked multiple times, as in this method. + + private void minimumSleep(Duration sleep) { + long end = System.nanoTime() + sleep.toNanos(); + while (true) { + long nanosLeft = end - System.nanoTime(); + + if (nanosLeft <= 0) { + break; + } + + try { + TimeUnit.NANOSECONDS.sleep(nanosLeft); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + } + } +} diff --git a/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java b/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java new file mode 100755 index 0000000000..bc9629e138 --- /dev/null +++ b/ehcache-management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java @@ -0,0 +1,37 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import org.ehcache.management.ManagementRegistryService; +import org.hamcrest.Matchers; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.context.ContextContainer; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.registry.ResultSet; +import org.terracotta.management.registry.StatisticQuery; + +import static java.util.Collections.singletonList; +import static org.hamcrest.MatcherAssert.assertThat; + +public class StatsUtil { + + public static Context createContext(ManagementRegistryService managementRegistry) { + ContextContainer cacheManagerCtx = managementRegistry.getContextContainer(); + ContextContainer firstCacheCtx = cacheManagerCtx.getSubContexts().iterator().next(); + return managementRegistry.getConfiguration().getContext() + .with(firstCacheCtx.getName(), firstCacheCtx.getValue()); + } +} diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java b/ehcache-management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java similarity index 90% rename from management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java rename to ehcache-management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java index dfa5f2d331..551a47e2a8 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java +++ b/ehcache-management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java @@ -41,15 +41,15 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; public class DefaultCollectorServiceTest { - @Test(timeout = 6000) + @Test public void test_collector() throws Exception { final Queue messages = new ConcurrentLinkedQueue<>(); - final List notifs = new ArrayList<>(6); - final CountDownLatch num = new CountDownLatch(5); + final List notifs = new ArrayList<>(7); + final CountDownLatch num = new CountDownLatch(6); CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, newResourcePoolsBuilder() @@ -95,20 +95,21 @@ void onEvent(Object event) { .call("startStatisticCollector", new Parameter(1L, long.class.getName()), new Parameter(TimeUnit.SECONDS, TimeUnit.class.getName())) - .on(Context.create("cacheManagerName", "my-cm-1")) + .on(managementRegistry.getConfiguration().getContext()) .build() .execute() .getSingleResult(); Cache cache = cacheManager.createCache("my-cache", cacheConfiguration); cache.put("key", "val"); + cache.clear(); - num.await(); + num.await(10, TimeUnit.SECONDS); cacheManager.removeCache("my-cache"); cacheManager.close(); - assertThat(notifs, equalTo(Arrays.asList("CACHE_MANAGER_AVAILABLE", "CACHE_MANAGER_CLOSED", "CACHE_MANAGER_AVAILABLE", "CACHE_ADDED", "CACHE_REMOVED", "CACHE_MANAGER_CLOSED"))); - assertThat(messages.size(), equalTo(7)); + assertThat(notifs, equalTo(Arrays.asList("CACHE_MANAGER_AVAILABLE", "CACHE_MANAGER_CLOSED", "CACHE_MANAGER_AVAILABLE", "CACHE_ADDED", "CACHE_CLEARED", "CACHE_REMOVED", "CACHE_MANAGER_CLOSED"))); + assertThat(messages.size(), equalTo(8)); } } diff --git a/ehcache-management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/ehcache-management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java new file mode 100644 index 0000000000..9245abef53 --- /dev/null +++ b/ehcache-management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -0,0 +1,463 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.registry; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.concurrent.ExecutionException; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.config.store.StoreStatisticsConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.terracotta.management.model.call.ContextualReturn; +import org.terracotta.management.model.capabilities.Capability; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.context.ContextContainer; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.registry.ResultSet; +import org.terracotta.management.registry.StatisticQuery.Builder; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; + +@SuppressWarnings("try") +public class DefaultManagementRegistryServiceTest { + + private static final Collection ONHEAP_DESCRIPTORS = new ArrayList<>(); + private static final Collection ONHEAP_NO_STATS_DESCRIPTORS = new ArrayList<>(); + private static final Collection OFFHEAP_DESCRIPTORS = new ArrayList<>(); + private static final Collection DISK_DESCRIPTORS = new ArrayList<>(); + private static final Collection CACHE_DESCRIPTORS = new ArrayList<>(); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + + @Test + public void testCanGetContext() { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + try(CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true)) { + + ContextContainer contextContainer = managementRegistry.getContextContainer(); + assertThat(contextContainer.getName()).isEqualTo("cacheManagerName"); + assertThat(contextContainer.getValue()).isEqualTo("myCM"); + assertThat(contextContainer.getSubContexts()).hasSize(1); + + ContextContainer subcontext = contextContainer.getSubContexts().iterator().next(); + assertThat(subcontext.getName()).isEqualTo("cacheName"); + assertThat(subcontext.getValue()).isEqualTo("aCache"); + } + } + + @Test + public void descriptorOnHeapTest() { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .withService(new StoreStatisticsConfiguration(true)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + try(CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true)) { + + List capabilities = new ArrayList<>(managementRegistry.getCapabilities()); + assertThat(capabilities).hasSize(4); + assertThat(capabilities.get(0).getName()).isEqualTo("ActionsCapability"); + assertThat(capabilities.get(1).getName()).isEqualTo("SettingsCapability"); + assertThat(capabilities.get(2).getName()).isEqualTo("StatisticCollectorCapability"); + assertThat(capabilities.get(3).getName()).isEqualTo("StatisticsCapability"); + + assertThat(capabilities.get(0).getDescriptors()).hasSize(4); + + Collection descriptors = capabilities.get(3).getDescriptors(); + Collection allDescriptors = new ArrayList<>(); + allDescriptors.addAll(ONHEAP_DESCRIPTORS); + allDescriptors.addAll(CACHE_DESCRIPTORS); + + assertThat(descriptors).containsOnlyElementsOf(allDescriptors); + } + } + + @Test + public void descriptorOnHeapTest_withoutStats() { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + try(CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true)) { + + List capabilities = new ArrayList<>(managementRegistry.getCapabilities()); + assertThat(managementRegistry.getCapabilities()).hasSize(4); + assertThat(capabilities.get(0).getName()).isEqualTo("ActionsCapability"); + assertThat(capabilities.get(1).getName()).isEqualTo("SettingsCapability"); + assertThat(capabilities.get(2).getName()).isEqualTo("StatisticCollectorCapability"); + assertThat(capabilities.get(3).getName()).isEqualTo("StatisticsCapability"); + + assertThat(capabilities.get(0).getDescriptors()).hasSize(4); + + Collection descriptors = capabilities.get(3).getDescriptors(); + Collection allDescriptors = new ArrayList<>(); + allDescriptors.addAll(ONHEAP_NO_STATS_DESCRIPTORS); + allDescriptors.addAll(CACHE_DESCRIPTORS); + + assertThat(descriptors).containsOnlyElementsOf(allDescriptors); + } + } + + @Test + public void descriptorOffHeapTest() { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder().heap(5, MB).offheap(10, MB)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + try(CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true)) { + + List capabilities = new ArrayList<>(managementRegistry.getCapabilities()); + assertThat(capabilities).hasSize(4); + assertThat(capabilities.get(0).getName()).isEqualTo("ActionsCapability"); + assertThat(capabilities.get(1).getName()).isEqualTo("SettingsCapability"); + assertThat(capabilities.get(2).getName()).isEqualTo("StatisticCollectorCapability"); + assertThat(capabilities.get(3).getName()).isEqualTo("StatisticsCapability"); + + assertThat(capabilities.get(0).getDescriptors()).hasSize(4); + + Collection descriptors = capabilities.get(3).getDescriptors(); + Collection allDescriptors = new ArrayList<>(); + allDescriptors.addAll(ONHEAP_DESCRIPTORS); + allDescriptors.addAll(OFFHEAP_DESCRIPTORS); + allDescriptors.addAll(CACHE_DESCRIPTORS); + allDescriptors.add(new StatisticDescriptor("OnHeap:OccupiedByteSize" , "GAUGE")); + + assertThat(descriptors).containsOnlyElementsOf(allDescriptors); + } + } + + @Test + public void descriptorDiskStoreTest() throws Exception { + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + try(PersistentCacheManager persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .with(CacheManagerBuilder.persistence(getStoragePath() + File.separator + "myData")) + .withCache("persistent-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .disk(10, MemoryUnit.MB, true)) + ) + .using(managementRegistry) + .build(true)) { + + List capabilities = new ArrayList<>(managementRegistry.getCapabilities()); + assertThat(capabilities).hasSize(4); + assertThat(capabilities.get(0).getName()).isEqualTo("ActionsCapability"); + assertThat(capabilities.get(1).getName()).isEqualTo("SettingsCapability"); + assertThat(capabilities.get(2).getName()).isEqualTo("StatisticCollectorCapability"); + assertThat(capabilities.get(3).getName()).isEqualTo("StatisticsCapability"); + + + assertThat(capabilities.get(0).getDescriptors()).hasSize(4); + + Collection descriptors = capabilities.get(3).getDescriptors(); + Collection allDescriptors = new ArrayList<>(); + allDescriptors.addAll(ONHEAP_DESCRIPTORS); + allDescriptors.addAll(DISK_DESCRIPTORS); + allDescriptors.addAll(CACHE_DESCRIPTORS); + + assertThat(descriptors).containsOnlyElementsOf(allDescriptors); + } + } + + private String getStoragePath() throws IOException { + return diskPath.newFolder().getAbsolutePath(); + } + + @Test + public void testCanGetCapabilities() { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .withService(new StoreStatisticsConfiguration(true)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + try(CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true)) { + + assertThat(managementRegistry.getCapabilities()).hasSize(4); + List capabilities = new ArrayList<>(managementRegistry.getCapabilities()); + assertThat(capabilities.get(0).getName()).isEqualTo("ActionsCapability"); + assertThat(capabilities.get(1).getName()).isEqualTo("SettingsCapability"); + assertThat(capabilities.get(2).getName()).isEqualTo("StatisticCollectorCapability"); + assertThat(capabilities.get(3).getName()).isEqualTo("StatisticsCapability"); + + assertThat(capabilities.get(0).getDescriptors()).hasSize(4); + assertThat(capabilities.get(3).getDescriptors()).hasSize(ONHEAP_DESCRIPTORS.size() + CACHE_DESCRIPTORS.size()); + + assertThat(capabilities.get(0).getCapabilityContext().getAttributes()).hasSize(3); + assertThat(capabilities.get(3).getCapabilityContext().getAttributes()).hasSize(3); + } + } + + @Test + public void testCanGetStats() { + String queryStatisticName = "Cache:HitCount"; + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + try(CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache1", cacheConfiguration) + .withCache("aCache2", cacheConfiguration) + .using(managementRegistry) + .build(true)) { + + Context context1 = managementRegistry.getConfiguration().getContext() + .with("cacheName", "aCache1"); + + Context context2 = managementRegistry.getConfiguration().getContext() + .with("cacheName", "aCache2"); + + Cache cache1 = cacheManager.getCache("aCache1", Long.class, String.class); + Cache cache2 = cacheManager.getCache("aCache2", Long.class, String.class); + + cache1.put(1L, "one"); + cache2.put(3L, "three"); + + cache1.get(1L); + cache1.get(2L); + cache2.get(3L); + cache2.get(4L); + + Builder builder1 = managementRegistry.withCapability("StatisticsCapability") + .queryStatistic(queryStatisticName) + .on(context1); + + ContextualStatistics counters = getResultSet(builder1, context1, null, queryStatisticName).getResult(context1); + Number counterHistory1 = counters.getLatestSampleValue(queryStatisticName).get(); + + assertThat(counters.size()).isEqualTo(1); + assertThat(counterHistory1.longValue()).isEqualTo(1L); + + Builder builder2 = managementRegistry.withCapability("StatisticsCapability") + .queryStatistic(queryStatisticName) + .on(context1) + .on(context2); + ResultSet allCounters = getResultSet(builder2, context1, context2, queryStatisticName); + + assertThat(allCounters.size()).isEqualTo(2); + assertThat(allCounters.getResult(context1).size()).isEqualTo(1); + assertThat(allCounters.getResult(context2).size()).isEqualTo(1); + + assertThat(allCounters.getResult(context1).getLatestSampleValue(queryStatisticName).get()).isEqualTo(1L); + assertThat(allCounters.getResult(context2).getLatestSampleValue(queryStatisticName).get()).isEqualTo(1L); + } + } + + private static ResultSet getResultSet(Builder builder, Context context1, Context context2, String statisticsName) { + ResultSet counters = null; + + while(!Thread.currentThread().isInterrupted()) //wait till Counter history(s) is initialized and contains values. + { + counters = builder.build().execute(); + + ContextualStatistics statisticsContext1 = counters.getResult(context1); + Number counterContext1 = statisticsContext1.getLatestSampleValue(statisticsName).get(); + + if(context2 != null) + { + ContextualStatistics statisticsContext2 = counters.getResult(context2); + Number counterHistoryContext2 = statisticsContext2.getLatestSampleValue(statisticsName).get(); + + if(counterHistoryContext2.longValue() > 0 && + counterContext1.longValue() > 0) + { + break; + } + } + else + { + if(counterContext1.longValue() > 0) + { + break; + } + } + } + + return counters; + } + + @Test + public void testCall() throws ExecutionException { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + try(CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache1", cacheConfiguration) + .withCache("aCache2", cacheConfiguration) + .using(managementRegistry) + .build(true)) { + + Context context = managementRegistry.getConfiguration().getContext() + .with("cacheName", "aCache1"); + + cacheManager.getCache("aCache1", Long.class, String.class).put(1L, "1"); + + assertThat(cacheManager.getCache("aCache1", Long.class, String.class).get(1L)).isEqualTo("1"); + + ContextualReturn result = managementRegistry.withCapability("ActionsCapability") + .call("clear") + .on(context) + .build() + .execute() + .getSingleResult(); + + assertThat(result.hasExecuted()).isTrue(); + assertThat(result.getValue()).isNull(); + + assertThat(cacheManager.getCache("aCache1", Long.class, String.class).get(1L)).isNull(); + } + } + + @Test + public void testCallOnInexistignContext() throws ExecutionException { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + try(CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache1", cacheConfiguration) + .withCache("aCache2", cacheConfiguration) + .using(managementRegistry) + .build(true)) { + + Context inexisting = managementRegistry.getConfiguration().getContext() + .with("cacheName", "aCache3"); + + ResultSet> results = managementRegistry.withCapability("ActionsCapability") + .call("clear") + .on(inexisting) + .build() + .execute(); + + assertThat(results.size()).isEqualTo(1); + assertThat(results.getSingleResult().hasExecuted()).isFalse(); + + assertThatThrownBy(() -> results.getSingleResult().getValue()).isInstanceOf(NoSuchElementException.class); + } + } + + @BeforeClass + public static void loadStatsUtil() { + ONHEAP_NO_STATS_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionCount" , "COUNTER")); + ONHEAP_NO_STATS_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:ExpirationCount" , "COUNTER")); + ONHEAP_NO_STATS_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , "GAUGE")); + + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionCount" , "COUNTER")); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:ExpirationCount" , "COUNTER")); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissCount" , "COUNTER")); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , "GAUGE")); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitCount" , "COUNTER")); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:PutCount" , "COUNTER")); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:RemovalCount" , "COUNTER")); + + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissCount", "COUNTER")); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedByteSize", "GAUGE")); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedByteSize", "GAUGE")); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", "GAUGE")); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", "COUNTER")); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:ExpirationCount", "COUNTER")); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitCount", "COUNTER")); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:PutCount", "COUNTER")); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:RemovalCount", "COUNTER")); + + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedByteSize", "GAUGE")); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedByteSize", "GAUGE")); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", "COUNTER")); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", "COUNTER")); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:ExpirationCount", "COUNTER")); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissCount", "COUNTER")); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MappingCount", "GAUGE")); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:PutCount", "COUNTER")); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:RemovalCount", "COUNTER")); + + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitCount", "COUNTER")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissCount", "COUNTER")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:PutCount", "COUNTER")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:RemovalCount", "COUNTER")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:EvictionCount", "COUNTER")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ExpirationCount", "COUNTER")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:GetHitLatency#100", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:GetHitLatency#50", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:GetHitLatency#95", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:GetHitLatency#99", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:GetMissLatency#100", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:GetMissLatency#50", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:GetMissLatency#95", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:GetMissLatency#99", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:PutLatency#100", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:PutLatency#50", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:PutLatency#95", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:PutLatency#99", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:RemoveLatency#100", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:RemoveLatency#50", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:RemoveLatency#95", "GAUGE")); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:RemoveLatency#99", "GAUGE")); + } +} diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java b/ehcache-management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java similarity index 88% rename from management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java rename to ehcache-management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java index ce8f83e953..516564259a 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java +++ b/ehcache-management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java @@ -51,10 +51,10 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.isIn; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @RunWith(JUnit4.class) @@ -158,15 +158,9 @@ public void testStats() { String statisticName = "Cache:MissCount"; List contextList = Arrays.asList( - Context.empty() - .with("cacheManagerName", "myCM1") - .with("cacheName", "aCache1"), - Context.empty() - .with("cacheManagerName", "myCM2") - .with("cacheName", "aCache2"), - Context.empty() - .with("cacheManagerName", "myCM2") - .with("cacheName", "aCache3")); + config1.getContext().with("cacheName", "aCache1"), + config2.getContext().with("cacheName", "aCache2"), + config2.getContext().with("cacheName", "aCache3")); cacheManager1.getCache("aCache1", Long.class, String.class).get(1L); cacheManager2.getCache("aCache2", Long.class, String.class).get(2L); @@ -184,9 +178,9 @@ public void testStats() { assertThat(allCounters.getResult(contextList.get(2)).size(), equalTo(1)); - assertThat(allCounters.getResult(contextList.get(0)).getStatistic(statisticName).longValue(), equalTo(1L)); - assertThat(allCounters.getResult(contextList.get(1)).getStatistic(statisticName).longValue(), equalTo(1L)); - assertThat(allCounters.getResult(contextList.get(2)).getStatistic(statisticName).longValue(), equalTo(1L)); + assertThat(allCounters.getResult(contextList.get(0)).getLatestSampleValue(statisticName).get(), equalTo(1L)); + assertThat(allCounters.getResult(contextList.get(1)).getLatestSampleValue(statisticName).get(), equalTo(1L)); + assertThat(allCounters.getResult(contextList.get(2)).getLatestSampleValue(statisticName).get(), equalTo(1L)); } @@ -197,9 +191,9 @@ private static ResultSet getResultSet(StatisticQuery.Build while(!Thread.currentThread().isInterrupted()) { counters = builder.build().execute(); - if(counters.getResult(contextList.get(0)).getStatistic(statisticsName).longValue()> 0 && - counters.getResult(contextList.get(1)).getStatistic(statisticsName).longValue() > 0 && - counters.getResult(contextList.get(2)).getStatistic(statisticsName).longValue() > 0) { + if(counters.getResult(contextList.get(0)).getLatestSampleValue(statisticsName).get()> 0 && + counters.getResult(contextList.get(1)).getLatestSampleValue(statisticsName).get() > 0 && + counters.getResult(contextList.get(2)).getLatestSampleValue(statisticsName).get() > 0) { break; } } @@ -210,15 +204,9 @@ private static ResultSet getResultSet(StatisticQuery.Build @Test public void testCall() throws ExecutionException { List contextList = Arrays.asList( - Context.empty() - .with("cacheManagerName", "myCM1") - .with("cacheName", "aCache1"), - Context.empty() - .with("cacheManagerName", "myCM1") - .with("cacheName", "aCache4"), - Context.empty() - .with("cacheManagerName", "myCM2") - .with("cacheName", "aCache2"), + config1.getContext().with("cacheName", "aCache1"), + config1.getContext().with("cacheName", "aCache4"), + config2.getContext().with("cacheName", "aCache2"), Context.empty() .with("cacheManagerName", "myCM55") .with("cacheName", "aCache55")); diff --git a/ehcache-management/src/test/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParserTest.java b/ehcache-management/src/test/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParserTest.java new file mode 100644 index 0000000000..afb4751a85 --- /dev/null +++ b/ehcache-management/src/test/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParserTest.java @@ -0,0 +1,93 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.registry; + +import org.hamcrest.Matchers; +import org.junit.Test; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.xml.sax.InputSource; +import org.xml.sax.SAXException; + +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; + +import java.io.IOException; +import java.io.StringReader; + +import static org.ehcache.xml.XmlConfigurationMatchers.isSameConfigurationAs; +import static org.hamcrest.MatcherAssert.assertThat; + +/** + * ManagementRegistryServiceConfigurationParserTest + */ +public class ManagementRegistryServiceConfigurationParserTest { + + @Test + public void testParseTagInsideProperty() throws ParserConfigurationException, IOException, SAXException { + String property = ManagementRegistryServiceConfigurationParserTest.class.getName() + ":tag"; + String inputString = "" + + "tag1${" + property + "}"; + + ManagementRegistryServiceConfigurationParser configParser = new ManagementRegistryServiceConfigurationParser(); + + DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + documentBuilderFactory.setNamespaceAware(true); + Element node = documentBuilderFactory.newDocumentBuilder() + .parse(new InputSource(new StringReader(inputString))).getDocumentElement(); + + System.setProperty(property, "tag2"); + try { + DefaultManagementRegistryConfiguration configuration = + (DefaultManagementRegistryConfiguration) configParser.parseServiceCreationConfiguration(node, null); + + assertThat(configuration.getTags(), Matchers.hasItems("tag1", "tag2")); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testTranslateServiceCreationConfiguration() { + ManagementRegistryServiceConfigurationParser configTranslator = new ManagementRegistryServiceConfigurationParser(); + + DefaultManagementRegistryConfiguration defaultManagementRegistryConfiguration = + new DefaultManagementRegistryConfiguration().setCacheManagerAlias("my-cache-alias"). + setCollectorExecutorAlias("my-executor").addTag("tag1").addTag("tag2"); + + Node retElement = configTranslator.unparseServiceCreationConfiguration(defaultManagementRegistryConfiguration); + String inputString = "" + + "tag1tag2"; + assertThat(retElement, isSameConfigurationAs(inputString)); + } + + @Test + public void testTranslateServiceCreationConfigurationWithoutTags() { + ManagementRegistryServiceConfigurationParser configTranslator = new ManagementRegistryServiceConfigurationParser(); + + DefaultManagementRegistryConfiguration defaultManagementRegistryConfiguration = + new DefaultManagementRegistryConfiguration().setCacheManagerAlias("my-cache-alias"). + setCollectorExecutorAlias("my-executor"); + + Node retElement = configTranslator.unparseServiceCreationConfiguration(defaultManagementRegistryConfiguration); + String inputString = ""; + assertThat(retElement, isSameConfigurationAs(inputString)); + } + +} diff --git a/ehcache-management/src/test/java/org/ehcache/management/registry/XmlConfigTest.java b/ehcache-management/src/test/java/org/ehcache/management/registry/XmlConfigTest.java new file mode 100644 index 0000000000..48e2a097ab --- /dev/null +++ b/ehcache-management/src/test/java/org/ehcache/management/registry/XmlConfigTest.java @@ -0,0 +1,112 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.registry; + +import org.ehcache.CacheManager; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.util.Arrays; +import java.util.Collection; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.MatcherAssert.assertThat; + +@RunWith(Parameterized.class) +public class XmlConfigTest { + + @Parameterized.Parameters + public static Collection data() { + return Arrays.asList(new Object[][]{ + { + "ehcache-management-1.xml", + new DefaultManagementRegistryConfiguration() + }, + { + "ehcache-management-2.xml", + new DefaultManagementRegistryConfiguration() + .setCacheManagerAlias("my-cache-manager-name") + .addTags("webapp-name", "jboss-1", "server-node-1") + }, + { + "ehcache-management-3.xml", + new DefaultManagementRegistryConfiguration() + .setCacheManagerAlias("my-cache-manager-name") + .addTags("webapp-name", "jboss-1", "server-node-1") + .setCollectorExecutorAlias("my-collectorExecutorAlias") + }, + { + "ehcache-management-4.xml", + new DefaultManagementRegistryConfiguration() + .setCacheManagerAlias("my-cache-manager-name") + .addTags("webapp-name", "jboss-1", "server-node-1") + }, + { + "ehcache-management-5.xml", + new DefaultManagementRegistryConfiguration() + .setCacheManagerAlias("my-cache-manager-name") + .addTags("webapp-name", "jboss-1", "server-node-1") + } + }); + } + + private final String xml; + private final DefaultManagementRegistryConfiguration expectedConfiguration; + + public XmlConfigTest(String xml, DefaultManagementRegistryConfiguration expectedConfiguration) { + this.xml = xml; + this.expectedConfiguration = expectedConfiguration; + } + + @Test + public void test_config_loaded() throws Exception { + CacheManager myCacheManager = CacheManagerBuilder.newCacheManager(new XmlConfiguration(getClass().getClassLoader().getResource(xml))); + myCacheManager.init(); + try { + DefaultManagementRegistryConfiguration registryConfiguration = null; + + for (ServiceCreationConfiguration configuration : myCacheManager.getRuntimeConfiguration().getServiceCreationConfigurations()) { + if (configuration instanceof DefaultManagementRegistryConfiguration) { + registryConfiguration = (DefaultManagementRegistryConfiguration) configuration; + break; + } + } + + assertThat(registryConfiguration, is(not(nullValue()))); + + // 1st test: CM alia not set, so generated + if (xml.endsWith("-1.xml")) { + expectedConfiguration.setCacheManagerAlias(registryConfiguration.getContext().get("cacheManagerName")); + } + + assertThat(registryConfiguration.getCacheManagerAlias(), equalTo(expectedConfiguration.getCacheManagerAlias())); + assertThat(registryConfiguration.getCollectorExecutorAlias(), equalTo(expectedConfiguration.getCollectorExecutorAlias())); + assertThat(registryConfiguration.getContext().without("instanceId"), equalTo(expectedConfiguration.getContext().without("instanceId"))); + assertThat(registryConfiguration.getTags(), equalTo(expectedConfiguration.getTags())); + + } finally { + myCacheManager.close(); + } + } + +} diff --git a/ehcache-management/src/test/resources/ehcache-management-1.xml b/ehcache-management/src/test/resources/ehcache-management-1.xml new file mode 100644 index 0000000000..3b6276efd1 --- /dev/null +++ b/ehcache-management/src/test/resources/ehcache-management-1.xml @@ -0,0 +1,16 @@ + + + + + + + + + java.lang.String + java.lang.String + 20 + + + diff --git a/ehcache-management/src/test/resources/ehcache-management-2.xml b/ehcache-management/src/test/resources/ehcache-management-2.xml new file mode 100644 index 0000000000..fd7702f853 --- /dev/null +++ b/ehcache-management/src/test/resources/ehcache-management-2.xml @@ -0,0 +1,21 @@ + + + + + + webapp-name + jboss-1 + server-node-1 + + + + + + java.lang.String + java.lang.String + 20 + + + diff --git a/ehcache-management/src/test/resources/ehcache-management-3.xml b/ehcache-management/src/test/resources/ehcache-management-3.xml new file mode 100644 index 0000000000..4be6c11bf9 --- /dev/null +++ b/ehcache-management/src/test/resources/ehcache-management-3.xml @@ -0,0 +1,21 @@ + + + + + + webapp-name + jboss-1 + server-node-1 + + + + + + java.lang.String + java.lang.String + 20 + + + diff --git a/ehcache-management/src/test/resources/ehcache-management-4.xml b/ehcache-management/src/test/resources/ehcache-management-4.xml new file mode 100644 index 0000000000..fd7702f853 --- /dev/null +++ b/ehcache-management/src/test/resources/ehcache-management-4.xml @@ -0,0 +1,21 @@ + + + + + + webapp-name + jboss-1 + server-node-1 + + + + + + java.lang.String + java.lang.String + 20 + + + diff --git a/ehcache-management/src/test/resources/ehcache-management-5.xml b/ehcache-management/src/test/resources/ehcache-management-5.xml new file mode 100644 index 0000000000..fd7702f853 --- /dev/null +++ b/ehcache-management/src/test/resources/ehcache-management-5.xml @@ -0,0 +1,21 @@ + + + + + + webapp-name + jboss-1 + server-node-1 + + + + + + java.lang.String + java.lang.String + 20 + + + diff --git a/ehcache-management/src/test/resources/ehcache-management.xml b/ehcache-management/src/test/resources/ehcache-management.xml new file mode 100644 index 0000000000..c64d8b5530 --- /dev/null +++ b/ehcache-management/src/test/resources/ehcache-management.xml @@ -0,0 +1,42 @@ + + + + + + + + webapp-name + jboss-1 + server-node-1 + + + + + + java.lang.String + java.lang.String + + + + + 20 + + + + diff --git a/ehcache-management/src/test/resources/settings-capability.json b/ehcache-management/src/test/resources/settings-capability.json new file mode 100644 index 0000000000..f8702c8da7 --- /dev/null +++ b/ehcache-management/src/test/resources/settings-capability.json @@ -0,0 +1 @@ +{"name":"SettingsCapability","descriptors":[{"cacheName":"cache-1","keyType":"java.lang.String","valueType":"java.lang.String","resourcePools":{"heap":{"level":10000,"persistent":false,"type":"ENTRY","size":10,"unit":"entries"},"offheap":{"level":1000,"persistent":false,"type":"MEMORY","size":1,"unit":"MB"},"disk":{"level":100,"persistent":true,"type":"MEMORY","size":2,"unit":"MB"}}},{"cacheName":"cache-2","keyType":"java.lang.String","valueType":"java.lang.String","resourcePools":{"heap":{"level":10000,"persistent":false,"type":"ENTRY","size":10,"unit":"entries"},"offheap":{"level":1000,"persistent":false,"type":"MEMORY","size":1,"unit":"MB"},"disk":{"level":100,"persistent":true,"type":"MEMORY","size":2,"unit":"MB"}}},{"cacheManagerDescription":"","instanceId":"UUID","cacheManagerName":"my-cm-1"},"tags":["baz","boo","foo"]}],"capabilityContext":{"attributes":[{"name":"instanceId","required":true},{"name":"cacheManagerName","required":true},{"name":"cacheName","required":true}]}} diff --git a/ehcache-transactions/build.gradle b/ehcache-transactions/build.gradle new file mode 100644 index 0000000000..0584802e93 --- /dev/null +++ b/ehcache-transactions/build.gradle @@ -0,0 +1,75 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.public-module' + id 'org.ehcache.build.plugins.variant' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 transactions module' + description = 'The transactions module of Ehcache 3' + } +} + +variants { + variant('module') { + capability "org.ehcache:ehcache-transactions-provider:$version" + capability "org.ehcache:ehcache-transactions-modules:$version" + } +} + +configurations { + [apiElements, runtimeElements]*.outgoing { + capability "org.ehcache:ehcache-transactions-provider:$version" + capability "org.ehcache:ehcache-transactions:$version" + } +} + +dependencies { + api group: 'javax.transaction', name: 'jta', version: '1.1' + api project(':ehcache') + implementation(group: 'org.codehaus.btm', name: 'btm', version: '2.1.4') { + exclude group:'org.slf4j', module:'slf4j-api' + } + compileOnly 'org.osgi:org.osgi.service.component.annotations:1.3.0' + + testImplementation(project(':core-spi-test')) { + exclude group:'org.ehcache.modules' + } + testImplementation testFixtures(project(':ehcache-xml')) { + exclude group:'org.ehcache.modules', module:'ehcache-xml' + } + testImplementation "org.terracotta:statistics:$statisticVersion" + + moduleApi group: 'javax.transaction', name: 'jta', version: '1.1' + moduleApi project(':ehcache-core') + moduleImplementation project(':ehcache-impl') + moduleImplementation project(':ehcache-xml') + + moduleImplementation(group: 'org.codehaus.btm', name: 'btm', version: '2.1.4') { + exclude group:'org.slf4j', module:'slf4j-api' + } +} + +jar { + bnd ( + 'Bundle-SymbolicName': 'org.ehcache.transactions', + 'Export-Package': 'org.ehcache.transactions.xa.*', + 'Import-Package': 'bitronix.tm.*;resolution:=optional, javax.transaction.*;resolution:=optional, org.ehcache.xml.*;resolution:=optional, *', + ) +} diff --git a/ehcache-transactions/config/checkstyle-suppressions.xml b/ehcache-transactions/config/checkstyle-suppressions.xml new file mode 100644 index 0000000000..cb41d0baf7 --- /dev/null +++ b/ehcache-transactions/config/checkstyle-suppressions.xml @@ -0,0 +1,9 @@ + + + + + + + diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/EhcacheXAException.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/EhcacheXAException.java similarity index 93% rename from transactions/src/main/java/org/ehcache/transactions/xa/EhcacheXAException.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/EhcacheXAException.java index ee2eea8db5..e7a6d81ed8 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/EhcacheXAException.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/EhcacheXAException.java @@ -24,6 +24,8 @@ */ public class EhcacheXAException extends XAException { + private static final long serialVersionUID = 4369895735968757104L; + public EhcacheXAException(String msg, int errorCode) { super(msg); this.errorCode = errorCode; diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/XACacheException.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/XACacheException.java similarity index 78% rename from transactions/src/main/java/org/ehcache/transactions/xa/XACacheException.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/XACacheException.java index eaf5794309..e833f89fef 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/XACacheException.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/XACacheException.java @@ -15,12 +15,16 @@ */ package org.ehcache.transactions.xa; +import org.ehcache.transactions.xa.internal.XAStore; + /** - * The payload exception thrown by the cache when {@link XAStoreAccessException} is thrown. + * The payload exception thrown by the cache when an {@link XAStore} has issues retrieving the transaction context. * * @author Ludovic Orban */ public class XACacheException extends RuntimeException { + private static final long serialVersionUID = -6691335026252002011L; + public XACacheException(String message) { super(message); } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/configuration/XAStoreConfiguration.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/configuration/XAStoreConfiguration.java similarity index 83% rename from transactions/src/main/java/org/ehcache/transactions/xa/configuration/XAStoreConfiguration.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/configuration/XAStoreConfiguration.java index c18a72a256..7b30dd7647 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/configuration/XAStoreConfiguration.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/configuration/XAStoreConfiguration.java @@ -21,7 +21,7 @@ /** * @author Ludovic Orban */ -public class XAStoreConfiguration implements ServiceConfiguration { +public class XAStoreConfiguration implements ServiceConfiguration { private final String uniqueXAResourceId; @@ -37,4 +37,14 @@ public String getUniqueXAResourceId() { public Class getServiceType() { return XAStore.Provider.class; } + + @Override + public String derive() { + return getUniqueXAResourceId(); + } + + @Override + public XAStoreConfiguration build(String xaResourceId) { + return new XAStoreConfiguration(xaResourceId); + } } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/EhcacheXAResource.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/EhcacheXAResource.java similarity index 98% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/EhcacheXAResource.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/EhcacheXAResource.java index 774c66c65e..ad895866c3 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/EhcacheXAResource.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/EhcacheXAResource.java @@ -16,7 +16,7 @@ package org.ehcache.transactions.xa.internal; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.store.Store; import org.ehcache.transactions.xa.EhcacheXAException; import org.ehcache.transactions.xa.internal.journal.Journal; @@ -104,12 +104,12 @@ public void forget(Xid xid) throws XAException { } @Override - public int getTransactionTimeout() throws XAException { + public int getTransactionTimeout() { return transactionTimeoutInSeconds; } @Override - public boolean isSameRM(XAResource xaResource) throws XAException { + public boolean isSameRM(XAResource xaResource) { return xaResource == this; } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SerializableXid.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SerializableXid.java similarity index 97% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/SerializableXid.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SerializableXid.java index 9fd887700a..08dad98c26 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SerializableXid.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SerializableXid.java @@ -27,6 +27,7 @@ */ public class SerializableXid implements Xid, Serializable { + private static final long serialVersionUID = -508551479291939720L; private final int formatId; private final byte[] globalTransactionId; private final byte[] branchQualifier; diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLock.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLock.java similarity index 98% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLock.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLock.java index 8cc77d25c4..fc5bcfaace 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLock.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLock.java @@ -34,6 +34,7 @@ */ public class SoftLock implements Serializable { + private static final long serialVersionUID = -7281781343721739009L; private final TransactionId transactionId; private final V oldValue; private final byte[] oldValueSerialized; diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java similarity index 92% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java index 1922e88592..3064b950a2 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java @@ -17,7 +17,7 @@ package org.ehcache.transactions.xa.internal; import org.ehcache.spi.serialization.SerializerException; -import org.ehcache.impl.internal.util.ByteBufferInputStream; +import org.ehcache.core.util.ByteBufferInputStream; import org.ehcache.spi.serialization.Serializer; import java.io.ByteArrayOutputStream; @@ -31,6 +31,8 @@ import java.util.HashMap; import java.util.Map; +import static org.ehcache.transactions.xa.internal.TypeUtil.uncheckedCast; + /** * The stateless {@link Serializer} used to serialize {@link SoftLock}s. * @@ -62,13 +64,12 @@ public ByteBuffer serialize(SoftLock object) { return ByteBuffer.wrap(bout.toByteArray()); } - @SuppressWarnings("unchecked") @Override public SoftLock read(ByteBuffer entry) throws SerializerException, ClassNotFoundException { ByteBufferInputStream bin = new ByteBufferInputStream(entry); try { try (OIS ois = new OIS(bin, classLoader)) { - return (SoftLock) ois.readObject(); + return uncheckedCast(ois.readObject()); } } catch (IOException e) { throw new SerializerException(e); @@ -96,7 +97,7 @@ public OIS(InputStream in, ClassLoader classLoader) throws IOException { } @Override - protected Class resolveClass(ObjectStreamClass desc) throws IOException, ClassNotFoundException { + protected Class resolveClass(ObjectStreamClass desc) throws ClassNotFoundException { try { return Class.forName(desc.getName(), false, classLoader); } catch (ClassNotFoundException cnfe) { @@ -109,8 +110,8 @@ protected Class resolveClass(ObjectStreamClass desc) throws IOException, Clas } @Override - protected Class resolveProxyClass(String[] interfaces) throws IOException, ClassNotFoundException { - Class[] interfaceClasses = new Class[interfaces.length]; + protected Class resolveProxyClass(String[] interfaces) throws ClassNotFoundException { + Class[] interfaceClasses = new Class[interfaces.length]; for (int i = 0; i < interfaces.length; i++) { interfaceClasses[i] = Class.forName(interfaces[i], false, classLoader); } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedCopier.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedCopier.java similarity index 98% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedCopier.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedCopier.java index 0f97a926f2..359ce9a1a5 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedCopier.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedCopier.java @@ -36,7 +36,7 @@ public SoftLock copyForRead(SoftLock obj) { T oldValue = valueCopier.copyForRead(obj.getOldValue()); XAValueHolder valueHolder = obj.getNewValueHolder(); XAValueHolder newValueHolder = valueHolder == null ? null : new XAValueHolder<>(valueHolder, valueCopier.copyForRead(valueHolder - .value())); + .get())); return new SoftLock<>(obj.getTransactionId(), oldValue, newValueHolder); } @@ -45,7 +45,7 @@ public SoftLock copyForWrite(SoftLock obj) { T oldValue = valueCopier.copyForWrite(obj.getOldValue()); XAValueHolder valueHolder = obj.getNewValueHolder(); XAValueHolder newValueHolder = valueHolder == null ? null : new XAValueHolder<>(valueHolder, valueCopier.copyForWrite(valueHolder - .value())); + .get())); return new SoftLock<>(obj.getTransactionId(), oldValue, newValueHolder); } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedSerializer.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedSerializer.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedSerializer.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedSerializer.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/StatefulSoftLockValueCombinedSerializer.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/StatefulSoftLockValueCombinedSerializer.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/StatefulSoftLockValueCombinedSerializer.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/StatefulSoftLockValueCombinedSerializer.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/StoreEventSourceWrapper.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/StoreEventSourceWrapper.java similarity index 79% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/StoreEventSourceWrapper.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/StoreEventSourceWrapper.java index a85326b32d..cc8c0ef442 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/StoreEventSourceWrapper.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/StoreEventSourceWrapper.java @@ -16,19 +16,15 @@ package org.ehcache.transactions.xa.internal; -import org.ehcache.event.EventType; -import org.ehcache.impl.internal.events.StoreEventImpl; import org.ehcache.core.spi.store.events.StoreEvent; import org.ehcache.core.spi.store.events.StoreEventFilter; import org.ehcache.core.spi.store.events.StoreEventListener; import org.ehcache.core.spi.store.events.StoreEventSource; +import org.ehcache.event.EventType; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import static org.ehcache.impl.internal.events.StoreEvents.createEvent; -import static org.ehcache.impl.internal.events.StoreEvents.updateEvent; - /** * StoreEventSourceWrapper */ @@ -87,6 +83,11 @@ public void setEventOrdering(boolean ordering) { underlying.setEventOrdering(ordering); } + @Override + public void setSynchronous(boolean synchronous) throws IllegalArgumentException { + underlying.setSynchronous(synchronous); + } + @Override public boolean isEventOrdering() { return underlying.isEventOrdering(); @@ -105,23 +106,7 @@ private StoreEventListenerWrapper(StoreEventListener wrappedOne) { @Override public void onEvent(StoreEvent> event) { - StoreEvent eventToPropagate = null; - switch (event.getType()) { - case CREATED: - eventToPropagate = createEvent(event.getKey(), event.getNewValue().getOldValue()); - break; - case UPDATED: - eventToPropagate = updateEvent(event.getKey(), event.getOldValue().getOldValue(), event.getNewValue() - .getOldValue()); - break; - case REMOVED: - case EXPIRED: - case EVICTED: - eventToPropagate = new StoreEventImpl<>(event.getType(), event.getKey(), event.getOldValue() - .getOldValue(), null); - break; - } - wrappedOne.onEvent(eventToPropagate); + wrappedOne.onEvent(new XaEvent<>(event)); } @Override @@ -140,4 +125,43 @@ public int hashCode() { return wrappedOne.hashCode(); } } + + static class XaEvent implements StoreEvent { + + private final StoreEvent> delegate; + + XaEvent(StoreEvent> delegate) { + this.delegate = delegate; + } + + @Override + public EventType getType() { + return delegate.getType(); + } + + @Override + public K getKey() { + return delegate.getKey(); + } + + @Override + public V getNewValue() { + SoftLock newValue = delegate.getNewValue(); + if (newValue == null) { + return null; + } else { + return newValue.getOldValue(); + } + } + + @Override + public V getOldValue() { + SoftLock oldValue = delegate.getOldValue(); + if (oldValue == null) { + return null; + } else { + return oldValue.getOldValue(); + } + } + } } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/TransactionId.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/TransactionId.java similarity index 95% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/TransactionId.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/TransactionId.java index 00941be622..86243df075 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/TransactionId.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/TransactionId.java @@ -26,6 +26,7 @@ */ public class TransactionId implements Serializable { + private static final long serialVersionUID = 3283565495631790142L; private final SerializableXid serializableXid; public TransactionId(Xid xid) { diff --git a/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/TypeUtil.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/TypeUtil.java new file mode 100644 index 0000000000..92c635d904 --- /dev/null +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/TypeUtil.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.transactions.xa.internal; + +/** + * Holder for static helper methods related to types and casting. + */ +public final class TypeUtil { + + private TypeUtil() { + //static holder + } + + /** + * Performs a (warning suppressed) unchecked cast to an infered type {@code U}. + */ + @SuppressWarnings("unchecked") + public static U uncheckedCast(Object o) { + return (U) o; + } +} diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java similarity index 81% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java index e451539d78..a9c506820a 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java @@ -17,21 +17,23 @@ package org.ehcache.transactions.xa.internal; import org.ehcache.Cache; -import org.ehcache.ValueSupplier; import org.ehcache.config.ResourceType; import org.ehcache.core.CacheConfigurationChangeListener; import org.ehcache.config.EvictionAdvisor; -import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.core.internal.store.StoreSupport; import org.ehcache.core.spi.service.DiskResourceService; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.core.spi.store.WrapperStore; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.core.store.StoreSupport; +import org.ehcache.impl.store.BaseStore; +import org.ehcache.spi.resilience.StoreAccessException; +import org.ehcache.expiry.ExpiryPolicy; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expiry; import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.spi.serialization.StatefulSerializer; +import org.ehcache.spi.service.OptionalServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.events.StoreEventSource; @@ -52,9 +54,9 @@ import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.terracotta.context.ContextManager; import java.io.IOException; +import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -64,10 +66,12 @@ import java.util.List; import java.util.Map; import java.util.NoSuchElementException; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; @@ -78,17 +82,18 @@ import static org.ehcache.core.spi.service.ServiceUtils.findAmongst; import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; -import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; +import static org.ehcache.transactions.xa.internal.TypeUtil.uncheckedCast; /** * A {@link Store} implementation wrapping another {@link Store} driven by a JTA * {@link javax.transaction.TransactionManager} using the XA 2-phase commit protocol. */ -public class XAStore implements Store { +public class XAStore extends BaseStore implements WrapperStore { private static final Logger LOGGER = LoggerFactory.getLogger(XAStore.class); - private final Class keyType; - private final Class valueType; + + private static final Supplier REPLACE_EQUALS_TRUE = () -> Boolean.TRUE; + private final Store> underlyingStore; private final TransactionManagerWrapper transactionManagerWrapper; private final Map> xaResources = new ConcurrentHashMap<>(); @@ -96,13 +101,12 @@ public class XAStore implements Store { private final Journal journal; private final String uniqueXAResourceId; private final XATransactionContextFactory transactionContextFactory; - private final EhcacheXAResource recoveryXaResource; + private final EhcacheXAResource recoveryXaResource; private final StoreEventSourceWrapper eventSourceWrapper; public XAStore(Class keyType, Class valueType, Store> underlyingStore, TransactionManagerWrapper transactionManagerWrapper, - TimeSource timeSource, Journal journal, String uniqueXAResourceId) { - this.keyType = keyType; - this.valueType = valueType; + TimeSource timeSource, Journal journal, String uniqueXAResourceId, StatisticsService statisticsService) { + super(keyType, valueType, true, statisticsService); this.underlyingStore = underlyingStore; this.transactionManagerWrapper = transactionManagerWrapper; this.timeSource = timeSource; @@ -112,7 +116,6 @@ public XAStore(Class keyType, Class valueType, Store> under this.recoveryXaResource = new EhcacheXAResource<>(underlyingStore, journal, transactionContextFactory); this.eventSourceWrapper = new StoreEventSourceWrapper<>(underlyingStore.getStoreEventSource()); - ContextManager.associate(underlyingStore).withParent(this); } private static boolean isInDoubt(SoftLock softLock) { @@ -160,31 +163,6 @@ public void afterCompletion(int status) { } } - private static boolean eq(Object o1, Object o2) { - return (o1 == o2) || (o1 != null && o1.equals(o2)); - } - - private void checkKey(K keyObject) { - if (keyObject == null) { - throw new NullPointerException(); - } - if (!keyType.isAssignableFrom(keyObject.getClass())) { - throw new ClassCastException("Invalid key type, expected : " + keyType.getName() + " but was : " + keyObject.getClass().getName()); - } - } - - private void checkValue(V valueObject) { - if (valueObject == null) { - throw new NullPointerException(); - } - if (!valueType.isAssignableFrom(valueObject.getClass())) { - throw new ClassCastException("Invalid value type, expected : " + valueType.getName() + " but was : " + valueObject.getClass().getName()); - } - } - - private static final Supplier REPLACE_EQUALS_TRUE = () -> Boolean.TRUE; - - @Override public ValueHolder get(K key) throws StoreAccessException { checkKey(key); @@ -203,7 +181,7 @@ public ValueHolder get(K key) throws StoreAccessException { return null; } - SoftLock softLock = softLockValueHolder.value(); + SoftLock softLock = softLockValueHolder.get(); if (isInDoubt(softLock)) { currentContext.addCommand(key, new StoreEvictCommand<>(softLock.getOldValue())); return null; @@ -219,7 +197,7 @@ public boolean containsKey(K key) throws StoreAccessException { return getCurrentContext().newValueHolderOf(key) != null; } ValueHolder> softLockValueHolder = getSoftLockValueHolderFromUnderlyingStore(key); - return softLockValueHolder != null && softLockValueHolder.value().getTransactionId() == null && softLockValueHolder.value().getOldValue() != null; + return softLockValueHolder != null && softLockValueHolder.get().getTransactionId() == null && softLockValueHolder.get().getOldValue() != null; } @Override @@ -235,7 +213,7 @@ public PutStatus put(K key, V value) throws StoreAccessException { ValueHolder> softLockValueHolder = getSoftLockValueHolderFromUnderlyingStore(key); if (softLockValueHolder != null) { - SoftLock softLock = softLockValueHolder.value(); + SoftLock softLock = softLockValueHolder.get(); if (isInDoubt(softLock)) { currentContext.addCommand(key, new StoreEvictCommand<>(softLock.getOldValue())); } else { @@ -266,7 +244,7 @@ public boolean remove(K key) throws StoreAccessException { ValueHolder> softLockValueHolder = getSoftLockValueHolderFromUnderlyingStore(key); boolean status = false; if (softLockValueHolder != null) { - SoftLock softLock = softLockValueHolder.value(); + SoftLock softLock = softLockValueHolder.get(); if (isInDoubt(softLock)) { currentContext.addCommand(key, new StoreEvictCommand<>(softLock.getOldValue())); } else { @@ -277,7 +255,7 @@ public boolean remove(K key) throws StoreAccessException { } @Override - public ValueHolder putIfAbsent(K key, V value) throws StoreAccessException { + public ValueHolder putIfAbsent(K key, V value, Consumer put) throws StoreAccessException { checkKey(key); checkValue(value); XATransactionContext currentContext = getCurrentContext(); @@ -294,7 +272,7 @@ public ValueHolder putIfAbsent(K key, V value) throws StoreAccessException { ValueHolder> softLockValueHolder = getSoftLockValueHolderFromUnderlyingStore(key); if (softLockValueHolder != null) { - SoftLock softLock = softLockValueHolder.value(); + SoftLock softLock = softLockValueHolder.get(); if (isInDoubt(softLock)) { currentContext.addCommand(key, new StoreEvictCommand<>(softLock.getOldValue())); return null; @@ -327,7 +305,7 @@ public RemoveStatus remove(K key, V value) throws StoreAccessException { ValueHolder> softLockValueHolder = getSoftLockValueHolderFromUnderlyingStore(key); if (softLockValueHolder != null) { - SoftLock softLock = softLockValueHolder.value(); + SoftLock softLock = softLockValueHolder.get(); if (isInDoubt(softLock)) { currentContext.addCommand(key, new StoreEvictCommand<>(softLock.getOldValue())); return RemoveStatus.KEY_MISSING; @@ -361,14 +339,14 @@ public ValueHolder replace(K key, V value) throws StoreAccessException { ValueHolder> softLockValueHolder = getSoftLockValueHolderFromUnderlyingStore(key); if (softLockValueHolder != null) { - SoftLock softLock = softLockValueHolder.value(); + SoftLock softLock = softLockValueHolder.get(); if (isInDoubt(softLock)) { currentContext.addCommand(key, new StoreEvictCommand<>(softLock.getOldValue())); return null; } else { V oldValue = softLock.getOldValue(); currentContext.addCommand(key, new StorePutCommand<>(oldValue, new XAValueHolder<>(value, timeSource.getTimeMillis()))); - return new XAValueHolder<>(oldValue, softLockValueHolder.creationTime(XAValueHolder.NATIVE_TIME_UNIT)); + return new XAValueHolder<>(oldValue, softLockValueHolder.creationTime()); } } else { return null; @@ -396,7 +374,7 @@ public ReplaceStatus replace(K key, V oldValue, V newValue) throws StoreAccessEx ValueHolder> softLockValueHolder = getSoftLockValueHolderFromUnderlyingStore(key); if (softLockValueHolder != null) { - SoftLock softLock = softLockValueHolder.value(); + SoftLock softLock = softLockValueHolder.get(); V previousValue = softLock.getOldValue(); if (isInDoubt(softLock)) { currentContext.addCommand(key, new StoreEvictCommand<>(previousValue)); @@ -430,6 +408,11 @@ public Iterator>> iterator() { return new XAIterator(valueHolderMap, underlyingStore.iterator(), currentContext.getTransactionId()); } + @Override + protected String getStatisticsTag() { + return "XaStore"; + } + class XAIterator implements Iterator>> { private final java.util.Iterator>> iterator; @@ -476,10 +459,10 @@ public ValueHolder getValue() { if (!transactionContextFactory.isTouched(transactionId, next.getKey())) { ValueHolder> valueHolder = next.getValue(); - SoftLock softLock = valueHolder.value(); + SoftLock softLock = valueHolder.get(); final XAValueHolder xaValueHolder; if (softLock.getTransactionId() == transactionId) { - xaValueHolder = new XAValueHolder<>(valueHolder, softLock.getNewValueHolder().value()); + xaValueHolder = new XAValueHolder<>(valueHolder, softLock.getNewValueHolder().get()); } else if (isInDoubt(softLock)) { continue; } else { @@ -528,7 +511,7 @@ public Cache.Entry> next() throws StoreAccessException { } @Override - public ValueHolder compute(K key, BiFunction mappingFunction, Supplier replaceEqual) throws StoreAccessException { + public ValueHolder computeAndGet(K key, BiFunction mappingFunction, Supplier replaceEqual, Supplier invokeWriter) throws StoreAccessException { checkKey(key); XATransactionContext currentContext = getCurrentContext(); if (currentContext.touched(key)) { @@ -537,11 +520,11 @@ public ValueHolder compute(K key, BiFunction> softLockValueHolder = getSoftLockValueHolderFromUnderlyingStore(key); - SoftLock softLock = softLockValueHolder == null ? null : softLockValueHolder.value(); + SoftLock softLock = softLockValueHolder == null ? null : softLockValueHolder.get(); V oldValue = softLock == null ? null : softLock.getOldValue(); V newValue = mappingFunction.apply(key, oldValue); XAValueHolder xaValueHolder = newValue == null ? null : new XAValueHolder<>(newValue, timeSource.getTimeMillis()); - if (eq(oldValue, newValue) && !replaceEqual.get()) { + if (Objects.equals(oldValue, newValue) && !replaceEqual.get()) { return xaValueHolder; } if (newValue != null) { @@ -564,8 +547,64 @@ public ValueHolder compute(K key, BiFunction compute(K key, BiFunction mappingFunction) throws StoreAccessException { - return compute(key, mappingFunction, REPLACE_EQUALS_TRUE); + public ValueHolder getAndCompute(K key, BiFunction mappingFunction) throws StoreAccessException { + checkKey(key); + XATransactionContext currentContext = getCurrentContext(); + if (currentContext.touched(key)) { + V computed = mappingFunction.apply(key, currentContext.newValueOf(key)); + XAValueHolder returnValueholder = null; + if (computed != null) { + checkValue(computed); + XAValueHolder xaValueHolder = new XAValueHolder<>(computed, timeSource.getTimeMillis()); + V returnValue = currentContext.newValueOf(key); + V oldValue = currentContext.oldValueOf(key); + if (returnValue != null) { + returnValueholder = new XAValueHolder<>(returnValue, timeSource.getTimeMillis()); + } + currentContext.addCommand(key, new StorePutCommand<>(oldValue, xaValueHolder)); + } else { + V returnValue = currentContext.newValueOf(key); + V oldValue = currentContext.oldValueOf(key); + if (returnValue != null) { + returnValueholder = new XAValueHolder<>(returnValue, timeSource.getTimeMillis()); + } + if (oldValue != null) { + currentContext.addCommand(key, new StoreRemoveCommand<>(oldValue)); + } else { + currentContext.removeCommand(key); + } + } + return returnValueholder; + } + + ValueHolder> softLockValueHolder = getSoftLockValueHolderFromUnderlyingStore(key); + + XAValueHolder oldValueHolder = null; + SoftLock softLock = softLockValueHolder == null ? null : softLockValueHolder.get(); + V oldValue = softLock == null ? null : softLock.getOldValue(); + V newValue = mappingFunction.apply(key, oldValue); + XAValueHolder xaValueHolder = newValue == null ? null : new XAValueHolder<>(newValue, timeSource.getTimeMillis()); + if (newValue != null) { + checkValue(newValue); + } + + if (softLock != null && isInDoubt(softLock)) { + currentContext.addCommand(key, new StoreEvictCommand<>(oldValue)); + } else { + if (xaValueHolder == null) { + if (oldValue != null) { + currentContext.addCommand(key, new StoreRemoveCommand<>(oldValue)); + } + } else { + currentContext.addCommand(key, new StorePutCommand<>(oldValue, xaValueHolder)); + } + } + + if (oldValue != null) { + oldValueHolder = new XAValueHolder<>(oldValue, timeSource.getTimeMillis()); + } + + return oldValueHolder; } @Override @@ -594,14 +633,14 @@ public ValueHolder computeIfAbsent(K key, final Function(softLockValueHolder.value().getOldValue())); - xaValueHolder = new XAValueHolder<>(softLockValueHolder, softLockValueHolder.value().getNewValueHolder().value()); + } else if (isInDoubt(softLockValueHolder.get())) { + currentContext.addCommand(key, new StoreEvictCommand<>(softLockValueHolder.get().getOldValue())); + xaValueHolder = new XAValueHolder<>(softLockValueHolder, softLockValueHolder.get().getNewValueHolder().get()); } else { if (updated) { xaValueHolder = currentContext.newValueHolderOf(key); } else { - xaValueHolder = new XAValueHolder<>(softLockValueHolder, softLockValueHolder.value().getOldValue()); + xaValueHolder = new XAValueHolder<>(softLockValueHolder, softLockValueHolder.get().getOldValue()); } } @@ -621,7 +660,7 @@ private ValueHolder updateCommandForKey(K key, BiFunction(newValue, timeSource.getTimeMillis()); - if (!(eq(oldValue, newValue) && !replaceEqual.get())) { + if (!(Objects.equals(oldValue, newValue) && !replaceEqual.get())) { currentContext.addCommand(key, new StorePutCommand<>(oldValue, xaValueHolder)); } } @@ -651,7 +690,7 @@ public Map> bulkCompute(Set keys, final Function< for (K key : keys) { checkKey(key); - final ValueHolder newValue = compute(key, (k, oldValue) -> { + final ValueHolder newValue = computeAndGet(key, (k, oldValue) -> { final Set> entrySet = Collections.singletonMap(k, oldValue).entrySet(); final Iterable> entries = remappingFunction.apply(entrySet); final java.util.Iterator> iterator = entries.iterator(); @@ -664,7 +703,7 @@ public Map> bulkCompute(Set keys, final Function< checkValue(value); } return value; - }, replaceEqual); + }, replaceEqual, () -> false); result.put(key, newValue); } return result; @@ -713,39 +752,29 @@ private static final class SoftLockValueCombinedSerializerLifecycleHelper { private static final class CreatedStoreRef { final Store.Provider storeProvider; - final SoftLockValueCombinedSerializerLifecycleHelper lifecycleHelper; + final SoftLockValueCombinedSerializerLifecycleHelper lifecycleHelper; - public CreatedStoreRef(final Store.Provider storeProvider, final SoftLockValueCombinedSerializerLifecycleHelper lifecycleHelper) { + public CreatedStoreRef(final Store.Provider storeProvider, final SoftLockValueCombinedSerializerLifecycleHelper lifecycleHelper) { this.storeProvider = storeProvider; this.lifecycleHelper = lifecycleHelper; } } @ServiceDependencies({TimeSourceService.class, JournalProvider.class, CopyProvider.class, TransactionManagerProvider.class}) - public static class Provider implements Store.Provider { + @OptionalServiceDependencies("org.ehcache.core.spi.service.StatisticsService") + public static class Provider implements WrapperStore.Provider { private volatile ServiceProvider serviceProvider; private volatile TransactionManagerProvider transactionManagerProvider; private final Map, CreatedStoreRef> createdStores = new ConcurrentWeakIdentityHashMap<>(); @Override - public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { - final XAStoreConfiguration xaServiceConfiguration = findSingletonAmongst(XAStoreConfiguration.class, serviceConfigs); - if (xaServiceConfiguration == null) { - // An XAStore must be configured for use - return 0; - } else { - if (this.transactionManagerProvider == null) { - throw new IllegalStateException("A TransactionManagerProvider is mandatory to use XA caches"); - } - } - - final Store.Provider candidateUnderlyingProvider = selectProvider(resourceTypes, serviceConfigs, xaServiceConfiguration); - return 1000 + candidateUnderlyingProvider.rank(resourceTypes, serviceConfigs); + public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { + throw new UnsupportedOperationException("Its a Wrapper store provider, does not support regular ranking"); } @Override - public Store createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + public Store createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { Set supportedTypes = EnumSet.allOf(ResourceType.Core.class); Set> configuredTypes = storeConfig.getResourcePools().getResourceTypeSet(); @@ -761,12 +790,14 @@ public Store createStore(Configuration storeConfig, ServiceCo throw new IllegalStateException("XAStore.Provider.createStore called without XAStoreConfiguration"); } - final Store.Provider underlyingStoreProvider = - selectProvider(configuredTypes, Arrays.asList(serviceConfigs), xaServiceConfiguration); + List> serviceConfigList = Arrays.asList(serviceConfigs); + + Store.Provider underlyingStoreProvider = StoreSupport.selectStoreProvider(serviceProvider, + storeConfig.getResourcePools().getResourceTypeSet(), serviceConfigList); String uniqueXAResourceId = xaServiceConfiguration.getUniqueXAResourceId(); - List> underlyingServiceConfigs = new ArrayList<>(); - underlyingServiceConfigs.addAll(Arrays.asList(serviceConfigs)); + List> underlyingServiceConfigs = new ArrayList<>(serviceConfigList.size() + 5); // pad a bit because we add stuff + underlyingServiceConfigs.addAll(serviceConfigList); // eviction advisor EvictionAdvisor realEvictionAdvisor = storeConfig.getEvictionAdvisor(); @@ -778,18 +809,18 @@ public Store createStore(Configuration storeConfig, ServiceCo } // expiry - final Expiry configuredExpiry = storeConfig.getExpiry(); - Expiry> expiry = new Expiry>() { + final ExpiryPolicy configuredExpiry = storeConfig.getExpiry(); + ExpiryPolicy> expiry = new ExpiryPolicy>() { @Override public Duration getExpiryForCreation(K key, SoftLock softLock) { if (softLock.getTransactionId() != null) { // phase 1 prepare, create -> forever - return Duration.INFINITE; + return ExpiryPolicy.INFINITE; } else { // phase 2 commit, or during a TX's lifetime, create -> some time Duration duration; try { - duration = configuredExpiry.getExpiryForCreation(key, (V) softLock.getOldValue()); + duration = configuredExpiry.getExpiryForCreation(key, softLock.getOldValue()); } catch (RuntimeException re) { LOGGER.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); return Duration.ZERO; @@ -799,15 +830,15 @@ public Duration getExpiryForCreation(K key, SoftLock softLock) { } @Override - public Duration getExpiryForAccess(K key, final ValueSupplier> softLock) { - if (softLock.value().getTransactionId() != null) { + public Duration getExpiryForAccess(K key, Supplier> softLock) { + if (softLock.get().getTransactionId() != null) { // phase 1 prepare, access -> forever - return Duration.INFINITE; + return ExpiryPolicy.INFINITE; } else { // phase 2 commit, or during a TX's lifetime, access -> some time Duration duration; try { - duration = configuredExpiry.getExpiryForAccess(key, supplierOf(softLock.value().getOldValue())); + duration = configuredExpiry.getExpiryForAccess(key, () -> softLock.get().getOldValue()); } catch (RuntimeException re) { LOGGER.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); return Duration.ZERO; @@ -817,11 +848,11 @@ public Duration getExpiryForAccess(K key, final ValueSupplier> oldSoftLockSupplier, SoftLock newSoftLock) { - SoftLock oldSoftLock = oldSoftLockSupplier.value(); + public Duration getExpiryForUpdate(K key, Supplier> oldSoftLockSupplier, SoftLock newSoftLock) { + SoftLock oldSoftLock = oldSoftLockSupplier.get(); if (oldSoftLock.getTransactionId() == null) { // phase 1 prepare, update -> forever - return Duration.INFINITE; + return ExpiryPolicy.INFINITE; } else { // phase 2 commit, or during a TX's lifetime if (oldSoftLock.getOldValue() == null) { @@ -837,10 +868,10 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o } else { // there is an old value -> it's an UPDATE, update -> some time V value = oldSoftLock.getNewValueHolder() == null ? null : oldSoftLock - .getNewValueHolder().value(); + .getNewValueHolder().get(); Duration duration; try { - duration = configuredExpiry.getExpiryForUpdate(key, supplierOf(oldSoftLock.getOldValue()), value); + duration = configuredExpiry.getExpiryForUpdate(key, oldSoftLock::getOldValue, value); } catch (RuntimeException re) { LOGGER.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); return Duration.ZERO; @@ -852,17 +883,17 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o }; // get the PersistenceSpaceIdentifier if the cache is persistent, null otherwise - DiskResourceService.PersistenceSpaceIdentifier persistenceSpaceId = findSingletonAmongst(DiskResourceService.PersistenceSpaceIdentifier.class, (Object[]) serviceConfigs); + DiskResourceService.PersistenceSpaceIdentifier persistenceSpaceId = findSingletonAmongst(DiskResourceService.PersistenceSpaceIdentifier.class, (Object[]) serviceConfigs); // find the copiers - Collection copierConfigs = findAmongst(DefaultCopierConfiguration.class, underlyingServiceConfigs); - DefaultCopierConfiguration keyCopierConfig = null; - DefaultCopierConfiguration valueCopierConfig = null; - for (DefaultCopierConfiguration copierConfig : copierConfigs) { + Collection> copierConfigs = uncheckedCast(findAmongst(DefaultCopierConfiguration.class, underlyingServiceConfigs)); + DefaultCopierConfiguration keyCopierConfig = null; + DefaultCopierConfiguration> valueCopierConfig = null; + for (DefaultCopierConfiguration copierConfig : copierConfigs) { if (copierConfig.getType().equals(DefaultCopierConfiguration.Type.KEY)) { - keyCopierConfig = copierConfig; + keyCopierConfig = uncheckedCast(copierConfig); } else if (copierConfig.getType().equals(DefaultCopierConfiguration.Type.VALUE)) { - valueCopierConfig = copierConfig; + valueCopierConfig = uncheckedCast(copierConfig); } underlyingServiceConfigs.remove(copierConfig); } @@ -892,28 +923,33 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o AtomicReference> softLockSerializerRef = new AtomicReference<>(); SoftLockValueCombinedSerializer softLockValueCombinedSerializer; if (storeConfig.getValueSerializer() instanceof StatefulSerializer) { - softLockValueCombinedSerializer = new StatefulSoftLockValueCombinedSerializer(softLockSerializerRef, storeConfig.getValueSerializer()); + softLockValueCombinedSerializer = new StatefulSoftLockValueCombinedSerializer(softLockSerializerRef, storeConfig + .getValueSerializer()); } else { - softLockValueCombinedSerializer = new SoftLockValueCombinedSerializer<>(softLockSerializerRef, storeConfig + softLockValueCombinedSerializer = new SoftLockValueCombinedSerializer(softLockSerializerRef, storeConfig .getValueSerializer()); } // create the underlying store - @SuppressWarnings("unchecked") - Class> softLockClass = (Class) SoftLock.class; + Class> softLockClass = uncheckedCast(SoftLock.class); Store.Configuration> underlyingStoreConfig = new StoreConfigurationImpl<>(storeConfig.getKeyType(), softLockClass, evictionAdvisor, storeConfig.getClassLoader(), expiry, storeConfig.getResourcePools(), storeConfig.getDispatcherConcurrency(), storeConfig .getKeySerializer(), softLockValueCombinedSerializer); - Store> underlyingStore = underlyingStoreProvider.createStore(underlyingStoreConfig, underlyingServiceConfigs.toArray(new ServiceConfiguration[0])); + Store> underlyingStore = underlyingStoreProvider.createStore(underlyingStoreConfig, underlyingServiceConfigs.toArray(new ServiceConfiguration[0])); // create the XA store + StatisticsService statisticsService = serviceProvider.getService(StatisticsService.class); TransactionManagerWrapper transactionManagerWrapper = transactionManagerProvider.getTransactionManagerWrapper(); Store store = new XAStore<>(storeConfig.getKeyType(), storeConfig.getValueType(), underlyingStore, - transactionManagerWrapper, timeSource, journal, uniqueXAResourceId); + transactionManagerWrapper, timeSource, journal, uniqueXAResourceId, statisticsService); + + if (statisticsService != null) { + statisticsService.registerWithParent(underlyingStore, store); + } // create the softLockSerializer lifecycle helper SoftLockValueCombinedSerializerLifecycleHelper helper = - new SoftLockValueCombinedSerializerLifecycleHelper<>(softLockSerializerRef, storeConfig.getClassLoader()); + new SoftLockValueCombinedSerializerLifecycleHelper(softLockSerializerRef, storeConfig.getClassLoader()); createdStores.put(store, new CreatedStoreRef(underlyingStoreProvider, helper)); return store; @@ -946,7 +982,6 @@ public void releaseStore(Store resource) { } @Override - @SuppressWarnings("unchecked") public void initStore(Store resource) { CreatedStoreRef createdStoreRef = createdStores.get(resource); if (createdStoreRef == null) { @@ -959,7 +994,7 @@ public void initStore(Store resource) { XAStore xaStore = (XAStore) resource; underlyingStoreProvider.initStore(xaStore.underlyingStore); - helper.softLockSerializerRef.set(new SoftLockSerializer(helper.classLoader)); + helper.softLockSerializerRef.set(new SoftLockSerializer<>(helper.classLoader)); try { xaStore.journal.open(); } catch (IOException ioe) { @@ -983,12 +1018,18 @@ public void stop() { this.serviceProvider = null; } - private Store.Provider selectProvider(final Set> resourceTypes, - final Collection> serviceConfigs, - final XAStoreConfiguration xaConfig) { - List> configsWithoutXA = new ArrayList<>(serviceConfigs); - configsWithoutXA.remove(xaConfig); - return StoreSupport.selectStoreProvider(serviceProvider, resourceTypes, configsWithoutXA); + @Override + public int wrapperStoreRank(Collection> serviceConfigs) { + XAStoreConfiguration xaServiceConfiguration = findSingletonAmongst(XAStoreConfiguration.class, serviceConfigs); + if (xaServiceConfiguration == null) { + // An XAStore must be configured for use + return 0; + } else { + if (this.transactionManagerProvider == null) { + throw new IllegalStateException("A TransactionManagerProvider is mandatory to use XA caches"); + } + } + return 1; } } @@ -1005,5 +1046,4 @@ public boolean adviseAgainstEviction(K key, SoftLock softLock) { return isInDoubt(softLock) || wrappedEvictionAdvisor.adviseAgainstEviction(key, softLock.getOldValue()); } } - } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XATransactionContext.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/XATransactionContext.java similarity index 97% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/XATransactionContext.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/XATransactionContext.java index 6c784b50e2..c4b92a9387 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XATransactionContext.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/XATransactionContext.java @@ -16,7 +16,7 @@ package org.ehcache.transactions.xa.internal; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.Store.RemoveStatus; @@ -123,7 +123,7 @@ public XAValueHolder newValueHolderOf(K key) { public V newValueOf(K key) { Command command = commands.get(key); XAValueHolder valueHolder = command == null ? null : command.getNewValueHolder(); - return valueHolder == null ? null : valueHolder.value(); + return valueHolder == null ? null : valueHolder.get(); } public int prepare() throws StoreAccessException, IllegalStateException, TransactionTimeoutException { @@ -186,7 +186,7 @@ public void commit(boolean recovering) throws StoreAccessException, IllegalState for (K key : keys) { SoftLock preparedSoftLock = getFromUnderlyingStore(key); XAValueHolder newValueHolder = preparedSoftLock == null ? null : preparedSoftLock.getNewValueHolder(); - SoftLock definitiveSoftLock = newValueHolder == null ? null : new SoftLock<>(null, newValueHolder.value(), null); + SoftLock definitiveSoftLock = newValueHolder == null ? null : new SoftLock<>(null, newValueHolder.get(), null); if (preparedSoftLock != null) { if (preparedSoftLock.getTransactionId() != null && !preparedSoftLock.getTransactionId().equals(transactionId)) { @@ -293,12 +293,12 @@ private boolean replaceInUnderlyingStore(K key, SoftLock preparedSoftLock, So } private Store.ValueHolder> putIfAbsentInUnderlyingStore(Map.Entry> entry, SoftLock newSoftLock) throws StoreAccessException { - return underlyingStore.putIfAbsent(entry.getKey(), newSoftLock); + return underlyingStore.putIfAbsent(entry.getKey(), newSoftLock, b -> {}); } private SoftLock getFromUnderlyingStore(K key) throws StoreAccessException { Store.ValueHolder> softLockValueHolder = underlyingStore.get(key); - return softLockValueHolder == null ? null : softLockValueHolder.value(); + return softLockValueHolder == null ? null : softLockValueHolder.get(); } private void evictFromUnderlyingStore(K key) throws StoreAccessException { @@ -306,6 +306,7 @@ private void evictFromUnderlyingStore(K key) throws StoreAccessException { } static class TransactionTimeoutException extends RuntimeException { + private static final long serialVersionUID = -4629992436523905812L; } } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XATransactionContextFactory.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/XATransactionContextFactory.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/XATransactionContextFactory.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/XATransactionContextFactory.java diff --git a/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java new file mode 100644 index 0000000000..db1f629b87 --- /dev/null +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java @@ -0,0 +1,142 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.transactions.xa.internal; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import org.ehcache.core.spi.store.AbstractValueHolder; +import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.serialization.Serializer; + +import java.io.Serializable; +import java.nio.ByteBuffer; + +import static org.ehcache.transactions.xa.internal.TypeUtil.uncheckedCast; + +/** + * The {@link XAStore} {@link Store.ValueHolder} implementation. + * + * @author Ludovic Orban + */ +@SuppressFBWarnings("SE_NO_SUITABLE_CONSTRUCTOR") +@SuppressWarnings("serial") //this class has writeReplace/readResolve methods +public class XAValueHolder extends AbstractValueHolder implements Serializable { + + private final V value; + private final byte[] valueSerialized; + + public XAValueHolder(Store.ValueHolder> valueHolder, V value) { + super(-1, valueHolder.creationTime(), valueHolder.expirationTime()); + this.value = value; + this.valueSerialized = null; + } + + public XAValueHolder(V value, long creationTime) { + super(-1, creationTime, NO_EXPIRE); + if (value == null) { + throw new NullPointerException("null value"); + } + this.value = value; + this.valueSerialized = null; + } + + private XAValueHolder(XAValueHolder valueHolder, ByteBuffer serializedValue) { + super(-1, valueHolder.creationTime(), valueHolder.expirationTime()); + this.value = null; + this.valueSerialized = new byte[serializedValue.remaining()]; + serializedValue.get(this.valueSerialized); + } + + public XAValueHolder(XAValueHolder valueHolder, V value) { + super(-1, valueHolder.creationTime(), valueHolder.expirationTime()); + this.value = value; + this.valueSerialized = null; + } + + private XAValueHolder(long id, long creationTime, long lastAccessTime, long expirationTime, V value, byte[] valueSerialized) { + super(id, creationTime, expirationTime); + setLastAccessTime(lastAccessTime); + this.value = value; + this.valueSerialized = valueSerialized; + } + + protected XAValueHolder copyForSerialization(Serializer valueSerializer) { + ByteBuffer serializedValue = valueSerializer.serialize(value); + return new XAValueHolder<>(this, serializedValue); + } + + protected XAValueHolder copyAfterDeserialization(Serializer valueSerializer) throws ClassNotFoundException { + return new XAValueHolder<>(this, valueSerializer.read(ByteBuffer.wrap(valueSerialized))); + } + + @Override + public V get() { + return value; + } + + @Override + public int hashCode() { + int result = 1; + result = 31 * result + value.hashCode(); + result = 31 * result + super.hashCode(); + return result; + } + + @Override + public boolean equals(Object other) { + if (this == other) return true; + if (other == null || getClass() != other.getClass()) return false; + + XAValueHolder that = uncheckedCast(other); + + if (!super.equals(that)) return false; + return value.equals(that.value); + } + + private Object writeReplace() { + return new SerializedXAValueHolder<>(getId(), creationTime(), lastAccessTime(), expirationTime(), + get(), valueSerialized); + } + + /** + * Synthetic type used as serialized form of XAValueHolder + * + * @param the value type + */ + private static class SerializedXAValueHolder implements Serializable { + private static final long serialVersionUID = -9126450990666297321L; + private final long id; + private final long creationTime; + private final long lastAccessTime; + private final long expirationTime; + private final V value; + private final byte[] valueSerialized; + + SerializedXAValueHolder(long id, long creationTime, long lastAccessTime, long expirationTime, V value, byte[] valueSerialized) { + this.id = id; + this.creationTime = creationTime; + this.lastAccessTime = lastAccessTime; + this.expirationTime = expirationTime; + this.value = value; + this.valueSerialized = valueSerialized; + } + + private Object readResolve() { + return new XAValueHolder<>(id, creationTime, lastAccessTime, expirationTime, value, valueSerialized); + } + } + +} diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/Command.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/Command.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/Command.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/Command.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/StoreEvictCommand.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/StoreEvictCommand.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/StoreEvictCommand.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/StoreEvictCommand.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/StorePutCommand.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/StorePutCommand.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/StorePutCommand.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/StorePutCommand.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/StoreRemoveCommand.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/StoreRemoveCommand.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/StoreRemoveCommand.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/commands/StoreRemoveCommand.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/configuration/XAStoreProviderFactory.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/configuration/XAStoreProviderFactory.java similarity index 86% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/configuration/XAStoreProviderFactory.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/configuration/XAStoreProviderFactory.java index 3427f5fa24..dfd7b9ba2a 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/configuration/XAStoreProviderFactory.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/configuration/XAStoreProviderFactory.java @@ -19,18 +19,20 @@ import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.transactions.xa.internal.XAStore; +import org.osgi.service.component.annotations.Component; /** * @author Ludovic Orban */ +@Component public class XAStoreProviderFactory implements ServiceFactory { @Override - public XAStore.Provider create(ServiceCreationConfiguration configuration) { + public XAStore.Provider create(ServiceCreationConfiguration configuration) { return new XAStore.Provider(); } @Override - public Class getServiceType() { + public Class getServiceType() { return XAStore.Provider.class; } } diff --git a/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java new file mode 100644 index 0000000000..9b75bf375b --- /dev/null +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java @@ -0,0 +1,68 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.transactions.xa.internal.journal; + +import org.ehcache.CachePersistenceException; +import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.spi.persistence.PersistableResourceService; +import org.ehcache.spi.service.OptionalServiceDependencies; +import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.core.spi.service.FileBasedPersistenceContext; +import org.ehcache.spi.service.Service; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Ludovic Orban + */ +@OptionalServiceDependencies("org.ehcache.core.spi.service.DiskResourceService") +public class DefaultJournalProvider implements JournalProvider { + + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultJournalProvider.class); + + private volatile DiskResourceService diskResourceService; + + @Override + public void start(ServiceProvider serviceProvider) { + this.diskResourceService = serviceProvider.getService(DiskResourceService.class); + } + + @Override + public void stop() { + this.diskResourceService = null; + } + + @Override + public Journal getJournal(PersistableResourceService.PersistenceSpaceIdentifier persistentSpaceId, Serializer keySerializer) { + if (persistentSpaceId == null) { + LOGGER.info("Using transient XAStore journal"); + return new TransientJournal<>(); + } else if (diskResourceService == null) { + throw new AssertionError("Null diskResourceService with non-null persistentSpaceId [" + persistentSpaceId + "]"); + } else { + try { + LOGGER.info("Using persistent XAStore journal"); + FileBasedPersistenceContext persistenceContext = diskResourceService.createPersistenceContextWithin(persistentSpaceId, "XAJournal"); + return new PersistentJournal<>(persistenceContext.getDirectory(), keySerializer); + } catch (CachePersistenceException cpe) { + throw new RuntimeException(cpe); + } + } + } +} diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProviderFactory.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProviderFactory.java similarity index 83% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProviderFactory.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProviderFactory.java index 3ae49dee0a..15f777336c 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProviderFactory.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProviderFactory.java @@ -18,19 +18,21 @@ import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.core.spi.service.ServiceFactory; +import org.osgi.service.component.annotations.Component; /** * @author Ludovic Orban */ +@Component public class DefaultJournalProviderFactory implements ServiceFactory { @Override - public JournalProvider create(ServiceCreationConfiguration configuration) { + public JournalProvider create(ServiceCreationConfiguration configuration) { return new DefaultJournalProvider(); } @Override - public Class getServiceType() { - return JournalProvider.class; + public Class getServiceType() { + return DefaultJournalProvider.class; } } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/Journal.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/Journal.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/Journal.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/Journal.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/JournalProvider.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/JournalProvider.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/JournalProvider.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/JournalProvider.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java similarity index 96% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java index 1d3e8830d4..76c82b8f51 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java @@ -34,6 +34,8 @@ import java.util.HashMap; import java.util.Map; +import static org.ehcache.transactions.xa.internal.TypeUtil.uncheckedCast; + /** * A persistent, but not durable {@link Journal} implementation. * This implementation will persist saved states during close and restore them during open. If close is not called, @@ -47,6 +49,7 @@ public class PersistentJournal extends TransientJournal { private static final String JOURNAL_FILENAME = "journal.data"; protected static class SerializableEntry implements Serializable { + private static final long serialVersionUID = -6586025792671381923L; final XAState state; final boolean heuristic; final Collection serializedKeys; @@ -95,8 +98,7 @@ public void open() throws IOException { boolean valid = ois.readBoolean(); states.clear(); if (valid) { - @SuppressWarnings("unchecked") - Map> readStates = (Map>) ois.readObject(); + Map> readStates = uncheckedCast(ois.readObject()); for (Map.Entry> entry : readStates.entrySet()) { SerializableEntry value = entry.getValue(); states.put(entry.getKey(), new Entry<>(value.state, value.heuristic, value.deserializeKeys(keySerializer))); diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/TransientJournal.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/TransientJournal.java similarity index 93% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/TransientJournal.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/TransientJournal.java index 795964bfe6..bcca1387e0 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/TransientJournal.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/TransientJournal.java @@ -65,12 +65,12 @@ public Entry(XAState state, boolean heuristic, Collection keys) { @Override public void saveCommitted(TransactionId transactionId, boolean heuristicDecision) { - save(transactionId, XAState.COMMITTED, heuristicDecision, Collections.emptySet()); + save(transactionId, XAState.COMMITTED, heuristicDecision, Collections.emptySet()); } @Override public void saveRolledBack(TransactionId transactionId, boolean heuristicDecision) { - save(transactionId, XAState.ROLLED_BACK, heuristicDecision, Collections.emptySet()); + save(transactionId, XAState.ROLLED_BACK, heuristicDecision, Collections.emptySet()); } @Override @@ -82,12 +82,12 @@ private void save(TransactionId transactionId, XAState xaState, boolean heuristi if (!heuristicDecision) { // check for heuristics if (xaState == XAState.IN_DOUBT) { - Entry existing = states.putIfAbsent(transactionId, new Entry<>(xaState, false, inDoubtKeys)); + Entry existing = states.putIfAbsent(transactionId, new Entry<>(xaState, false, inDoubtKeys)); if (existing != null) { throw new IllegalStateException("A transaction cannot go back to in-doubt state"); } } else { - Entry entry = states.get(transactionId); + Entry entry = states.get(transactionId); if (entry != null && entry.heuristic) { throw new IllegalStateException("A heuristically terminated transaction cannot be normally terminated, it must be forgotten"); } @@ -97,7 +97,7 @@ private void save(TransactionId transactionId, XAState xaState, boolean heuristi if (xaState == XAState.IN_DOUBT) { throw new IllegalStateException("A transaction cannot enter in-doubt state heuristically"); } else { - Entry replaced = states.replace(transactionId, new Entry<>(xaState, true, Collections.emptySet())); + Entry replaced = states.replace(transactionId, new Entry<>(xaState, true, Collections.emptySet())); if (replaced == null) { throw new IllegalStateException("Only in-doubt transactions can be heuristically terminated"); } @@ -139,7 +139,7 @@ public boolean isHeuristicallyTerminated(TransactionId transactionId) { @Override public void forget(TransactionId transactionId) { - Entry entry = states.get(transactionId); + Entry entry = states.get(transactionId); if (entry != null) { if (entry.heuristic) { states.remove(transactionId); diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/NullXAResourceRegistry.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/NullXAResourceRegistry.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/NullXAResourceRegistry.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/NullXAResourceRegistry.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java similarity index 86% rename from transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java index 5edb71e685..2cbf253513 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java @@ -21,12 +21,14 @@ import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProvider; import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProviderConfiguration; import org.ehcache.transactions.xa.txmgr.provider.TransactionManagerProvider; +import org.osgi.service.component.annotations.Component; /** * {@link ServiceFactory} for the default {@link TransactionManagerProvider} * * @see LookupTransactionManagerProvider */ +@Component @ServiceFactory.RequiresConfiguration public class DefaultTransactionManagerProviderFactory implements ServiceFactory { @@ -34,7 +36,7 @@ public class DefaultTransactionManagerProviderFactory implements ServiceFactory< * {@inheritDoc} */ @Override - public TransactionManagerProvider create(ServiceCreationConfiguration configuration) { + public TransactionManagerProvider create(ServiceCreationConfiguration configuration) { return new LookupTransactionManagerProvider((LookupTransactionManagerProviderConfiguration) configuration); } @@ -42,7 +44,7 @@ public TransactionManagerProvider create(ServiceCreationConfiguration getServiceType() { - return TransactionManagerProvider.class; + public Class getServiceType() { + return LookupTransactionManagerProvider.class; } } diff --git a/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java new file mode 100644 index 0000000000..1a58b26816 --- /dev/null +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java @@ -0,0 +1,102 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.transactions.xa.internal.xml; + +import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProviderConfiguration; +import org.ehcache.transactions.xa.txmgr.provider.TransactionManagerLookup; +import org.ehcache.transactions.xa.txmgr.provider.TransactionManagerProvider; +import org.ehcache.xml.BaseConfigParser; +import org.ehcache.xml.CacheManagerServiceConfigurationParser; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.JaxbParsers; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.osgi.service.component.annotations.Component; +import org.w3c.dom.Document; +import org.w3c.dom.Element; + +import javax.xml.transform.Source; +import javax.xml.transform.stream.StreamSource; + +import java.io.IOException; +import java.net.URI; +import java.net.URL; + +import static org.ehcache.core.util.ClassLoading.delegationChain; +import static org.ehcache.transactions.xa.internal.TypeUtil.uncheckedCast; + +/** + * @author Ludovic Orban + */ +@Component +public class TxCacheManagerServiceConfigurationParser extends BaseConfigParser implements CacheManagerServiceConfigurationParser { + private static final URI NAMESPACE = URI.create("http://www.ehcache.org/v3/tx"); + private static final URL XML_SCHEMA = TxCacheManagerServiceConfigurationParser.class.getResource("/ehcache-tx-ext.xsd"); + public static final String TRANSACTION_NAMESPACE_PREFIX = "tx:"; + private static final String TRANSACTION_ELEMENT_NAME = "jta-tm"; + private static final String TRANSACTION_LOOKUP_CLASS = "transaction-manager-lookup-class"; + + @Override + public Source getXmlSchema() throws IOException { + return new StreamSource(XML_SCHEMA.openStream()); + } + + @Override + public URI getNamespace() { + return NAMESPACE; + } + + @Override + public ServiceCreationConfiguration parseServiceCreationConfiguration(Element fragment, ClassLoader classLoader) { + String localName = fragment.getLocalName(); + if ("jta-tm".equals(localName)) { + String transactionManagerProviderConfigurationClassName = JaxbParsers.parsePropertyOrString(fragment.getAttribute("transaction-manager-lookup-class")); + try { + Class aClass = Class.forName(transactionManagerProviderConfigurationClassName, true, delegationChain( + () -> Thread.currentThread().getContextClassLoader(), + getClass().getClassLoader(), + classLoader + )); + Class clazz = uncheckedCast(aClass); + return new LookupTransactionManagerProviderConfiguration(clazz); + } catch (Exception e) { + throw new XmlConfigurationException("Error configuring XA transaction manager", e); + } + } else { + throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported", + fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); + } + } + + @Override + public Class getServiceType() { + return TransactionManagerProvider.class; + } + + @Override + public Element unparseServiceCreationConfiguration(ServiceCreationConfiguration serviceCreationConfiguration) { + return unparseConfig(serviceCreationConfiguration); + } + + @Override + protected Element createRootElement(Document doc, LookupTransactionManagerProviderConfiguration lookupTransactionManagerProviderConfiguration) { + Element rootElement = doc.createElementNS(NAMESPACE.toString(), TRANSACTION_NAMESPACE_PREFIX + TRANSACTION_ELEMENT_NAME); + rootElement.setAttribute(TRANSACTION_LOOKUP_CLASS, lookupTransactionManagerProviderConfiguration.getTransactionManagerLookup() + .getName()); + return rootElement; + } + +} diff --git a/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheServiceConfigurationParser.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheServiceConfigurationParser.java new file mode 100644 index 0000000000..f5b5993385 --- /dev/null +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheServiceConfigurationParser.java @@ -0,0 +1,88 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.transactions.xa.internal.xml; + +import org.ehcache.xml.BaseConfigParser; +import org.ehcache.xml.CacheServiceConfigurationParser; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.transactions.xa.internal.XAStore; +import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; +import org.ehcache.xml.JaxbParsers; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.osgi.service.component.annotations.Component; +import org.w3c.dom.Document; +import org.w3c.dom.Element; + +import javax.xml.transform.Source; +import javax.xml.transform.stream.StreamSource; +import java.io.IOException; +import java.net.URI; +import java.net.URL; + +import static org.ehcache.transactions.xa.internal.xml.TxCacheManagerServiceConfigurationParser.TRANSACTION_NAMESPACE_PREFIX; + +/** + * @author Ludovic Orban + */ +@Component +public class TxCacheServiceConfigurationParser extends BaseConfigParser implements CacheServiceConfigurationParser { + + private static final URI NAMESPACE = URI.create("http://www.ehcache.org/v3/tx"); + private static final URL XML_SCHEMA = TxCacheManagerServiceConfigurationParser.class.getResource("/ehcache-tx-ext.xsd"); + private static final String STORE_ELEMENT_NAME = "xa-store"; + private static final String UNIQUE_RESOURCE_NAME = "unique-XAResource-id"; + + @Override + public Source getXmlSchema() throws IOException { + return new StreamSource(XML_SCHEMA.openStream()); + } + + @Override + public URI getNamespace() { + return NAMESPACE; + } + + @Override + public ServiceConfiguration parseServiceConfiguration(Element fragment, ClassLoader classLoader) { + String localName = fragment.getLocalName(); + if ("xa-store".equals(localName)) { + String uniqueXAResourceId = JaxbParsers.parsePropertyOrString(fragment.getAttribute("unique-XAResource-id")); + return new XAStoreConfiguration(uniqueXAResourceId); + } else { + throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported", + fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); + } + } + + @Override + public Class getServiceType() { + return XAStore.Provider.class; + } + + @Override + public Element unparseServiceConfiguration(ServiceConfiguration serviceConfiguration) { + return unparseConfig(serviceConfiguration); + } + + @Override + protected Element createRootElement(Document doc, XAStoreConfiguration storeConfiguration) { + Element rootElement = doc.createElementNS(NAMESPACE.toString(), TRANSACTION_NAMESPACE_PREFIX + STORE_ELEMENT_NAME); + rootElement.setAttribute(UNIQUE_RESOURCE_NAME, storeConfiguration.getUniqueXAResourceId()); + return rootElement; + } + +} diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/TransactionManagerWrapper.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/TransactionManagerWrapper.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/txmgr/TransactionManagerWrapper.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/TransactionManagerWrapper.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/XAResourceRegistry.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/XAResourceRegistry.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/txmgr/XAResourceRegistry.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/XAResourceRegistry.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixTransactionManagerLookup.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixTransactionManagerLookup.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixTransactionManagerLookup.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixTransactionManagerLookup.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixXAResourceRegistry.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixXAResourceRegistry.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixXAResourceRegistry.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixXAResourceRegistry.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/Ehcache3XAResourceHolder.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/Ehcache3XAResourceHolder.java similarity index 92% rename from transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/Ehcache3XAResourceHolder.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/Ehcache3XAResourceHolder.java index 27dbadca65..d439ae6762 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/Ehcache3XAResourceHolder.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/Ehcache3XAResourceHolder.java @@ -59,14 +59,14 @@ public ResourceBean getResourceBean() { /** * {@inheritDoc} */ - public void close() throws Exception { + public void close() { throw new UnsupportedOperationException("Ehcache3XAResourceHolder cannot be used with an XAPool"); } /** * {@inheritDoc} */ - public Object getConnectionHandle() throws Exception { + public Object getConnectionHandle() { throw new UnsupportedOperationException("Ehcache3XAResourceHolder cannot be used with an XAPool"); } @@ -81,7 +81,7 @@ public Date getLastReleaseDate() { * {@inheritDoc} */ public List getXAResourceHolders() { - return Collections.singletonList((XAResourceHolder) this); + return Collections.singletonList(this); } } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/Ehcache3XAResourceProducer.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/Ehcache3XAResourceProducer.java similarity index 96% rename from transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/Ehcache3XAResourceProducer.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/Ehcache3XAResourceProducer.java index 1160abbed1..84fa57bec9 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/Ehcache3XAResourceProducer.java +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/Ehcache3XAResourceProducer.java @@ -78,7 +78,7 @@ boolean isEmpty() { return xaResourceHolders.isEmpty(); } - public void endRecovery() throws RecoveryException { + public void endRecovery() { recoveryXAResourceHolder = null; } @@ -109,12 +109,12 @@ public void close() { } @Override - public XAStatefulHolder createPooledConnection(Object xaFactory, ResourceBean bean) throws Exception { + public XAStatefulHolder createPooledConnection(Object xaFactory, ResourceBean bean) { throw new UnsupportedOperationException("Ehcache is not connection-oriented"); } @Override - public Reference getReference() throws NamingException { + public Reference getReference() { return new Reference(Ehcache3XAResourceProducer.class.getName(), new StringRefAddr("uniqueName", getUniqueName()), ResourceObjectFactory.class.getName(), null); diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProvider.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProvider.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProvider.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProvider.java diff --git a/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java new file mode 100644 index 0000000000..3a6935512e --- /dev/null +++ b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java @@ -0,0 +1,61 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.transactions.xa.txmgr.provider; + +import org.ehcache.spi.service.ServiceCreationConfiguration; + +import static org.ehcache.transactions.xa.internal.TypeUtil.uncheckedCast; + +/** + * Specialized {@link ServiceCreationConfiguration} for the {@link LookupTransactionManagerProvider}. + */ +public class LookupTransactionManagerProviderConfiguration implements ServiceCreationConfiguration> { + + private final Class lookupClass; + + public LookupTransactionManagerProviderConfiguration(String className) throws ClassNotFoundException { + this.lookupClass = uncheckedCast(Class.forName(className)); + } + + public LookupTransactionManagerProviderConfiguration(Class clazz) { + this.lookupClass = clazz; + } + + /** + * Returns the class to be used for transaction manager lookup. + * + * @return the transaction manager lookup class + */ + public Class getTransactionManagerLookup() { + return lookupClass; + } + + @Override + public Class getServiceType() { + return TransactionManagerProvider.class; + } + + @Override + public Class derive() { + return getTransactionManagerLookup(); + } + + @Override + public LookupTransactionManagerProviderConfiguration build(Class clazz) { + return new LookupTransactionManagerProviderConfiguration(clazz); + } +} diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/TransactionManagerLookup.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/TransactionManagerLookup.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/TransactionManagerLookup.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/TransactionManagerLookup.java diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/TransactionManagerProvider.java b/ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/TransactionManagerProvider.java similarity index 100% rename from transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/TransactionManagerProvider.java rename to ehcache-transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/TransactionManagerProvider.java diff --git a/transactions/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory b/ehcache-transactions/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory similarity index 100% rename from transactions/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory rename to ehcache-transactions/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory diff --git a/transactions/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser b/ehcache-transactions/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser similarity index 100% rename from transactions/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser rename to ehcache-transactions/src/main/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser diff --git a/transactions/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser b/ehcache-transactions/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser similarity index 100% rename from transactions/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser rename to ehcache-transactions/src/main/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser diff --git a/transactions/src/main/resources/ehcache-tx-ext.xsd b/ehcache-transactions/src/main/resources/ehcache-tx-ext.xsd similarity index 91% rename from transactions/src/main/resources/ehcache-tx-ext.xsd rename to ehcache-transactions/src/main/resources/ehcache-tx-ext.xsd index 23f5bd6ab6..266c992aa5 100644 --- a/transactions/src/main/resources/ehcache-tx-ext.xsd +++ b/ehcache-transactions/src/main/resources/ehcache-tx-ext.xsd @@ -28,11 +28,11 @@ - + - + - \ No newline at end of file + diff --git a/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java b/ehcache-transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java similarity index 87% rename from transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java rename to ehcache-transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java index 6293b0a4b6..5caf90a9d7 100644 --- a/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java +++ b/ehcache-transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java @@ -20,18 +20,16 @@ import org.ehcache.Cache; import org.ehcache.CacheManager; import org.ehcache.PersistentCacheManager; -import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.Configuration; +import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; -import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.transactions.xa.txmgr.btm.BitronixTransactionManagerLookup; import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProviderConfiguration; import org.ehcache.xml.XmlConfiguration; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.transactions.xa.XACacheException; import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; @@ -45,15 +43,14 @@ import java.io.File; import java.io.IOException; -import java.net.URISyntaxException; import java.net.URL; import java.util.HashMap; import java.util.Map; import java.util.concurrent.locks.ReentrantReadWriteLock; import static java.util.Collections.singletonMap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; /** @@ -86,12 +83,12 @@ public void testSimpleXACache() throws Exception { .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) // <2> .withCache("xaCache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, // <3> ResourcePoolsBuilder.heap(10)) // <4> - .add(new XAStoreConfiguration("xaCache")) // <5> + .withService(new XAStoreConfiguration("xaCache")) // <5> .build() ) .build(true); - final Cache xaCache = cacheManager.getCache("xaCache", Long.class, String.class); + Cache xaCache = cacheManager.getCache("xaCache", Long.class, String.class); transactionManager.begin(); // <6> { @@ -114,12 +111,12 @@ public void testNonTransactionalAccess() throws Exception { .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) // <2> .withCache("xaCache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, // <3> ResourcePoolsBuilder.heap(10)) // <4> - .add(new XAStoreConfiguration("xaCache")) // <5> + .withService(new XAStoreConfiguration("xaCache")) // <5> .build() ) .build(true); - final Cache xaCache = cacheManager.getCache("xaCache", Long.class, String.class); + Cache xaCache = cacheManager.getCache("xaCache", Long.class, String.class); try { xaCache.get(1L); // <6> @@ -146,13 +143,13 @@ public void testXACacheWithWriteThrough() throws Exception { .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) // <2> .withCache("xaCache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, // <3> ResourcePoolsBuilder.heap(10)) // <4> - .add(new XAStoreConfiguration("xaCache")) // <5> - .add(new DefaultCacheLoaderWriterConfiguration(klazz, singletonMap(1L, "eins"))) // <6> + .withService(new XAStoreConfiguration("xaCache")) // <5> + .withService(new DefaultCacheLoaderWriterConfiguration(klazz, singletonMap(1L, "eins"))) // <6> .build() ) .build(true); - final Cache xaCache = cacheManager.getCache("xaCache", Long.class, String.class); + Cache xaCache = cacheManager.getCache("xaCache", Long.class, String.class); transactionManager.begin(); // <7> { @@ -181,12 +178,12 @@ public void testXACacheWithThreeTiers() throws Exception { .offheap(10, MemoryUnit.MB) .disk(20, MemoryUnit.MB, true) ) - .add(new XAStoreConfiguration("xaCache")) // <6> + .withService(new XAStoreConfiguration("xaCache")) // <6> .build() ) .build(true); - final Cache xaCache = persistentCacheManager.getCache("xaCache", Long.class, String.class); + Cache xaCache = persistentCacheManager.getCache("xaCache", Long.class, String.class); transactionManager.begin(); // <7> { @@ -231,7 +228,7 @@ public SampleLoaderWriter(Map initialData) { } @Override - public V load(K key) throws Exception { + public V load(K key) { lock.readLock().lock(); try { V v = data.get(key); @@ -243,12 +240,12 @@ public V load(K key) throws Exception { } @Override - public Map loadAll(Iterable keys) throws Exception { + public Map loadAll(Iterable keys) { throw new UnsupportedOperationException("Implement me!"); } @Override - public void write(K key, V value) throws Exception { + public void write(K key, V value) { lock.writeLock().lock(); try { data.put(key, value); @@ -259,7 +256,7 @@ public void write(K key, V value) throws Exception { } @Override - public void writeAll(Iterable> entries) throws BulkCacheWritingException, Exception { + public void writeAll(Iterable> entries) { lock.writeLock().lock(); try { for (Map.Entry entry : entries) { @@ -272,7 +269,7 @@ public void writeAll(Iterable> ent } @Override - public void delete(K key) throws Exception { + public void delete(K key) { lock.writeLock().lock(); try { data.remove(key); @@ -283,7 +280,7 @@ public void delete(K key) throws Exception { } @Override - public void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception { + public void deleteAll(Iterable keys) { lock.writeLock().lock(); try { for (K key : keys) { diff --git a/ehcache-transactions/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreLifecycleHelper.java b/ehcache-transactions/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreLifecycleHelper.java new file mode 100644 index 0000000000..7668106c27 --- /dev/null +++ b/ehcache-transactions/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreLifecycleHelper.java @@ -0,0 +1,42 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.store.offheap; + +/* + * This is an import of a shaded class because we depend on the shaded distribution jar. + * This means we have to use the shaded StatisticsManager when digging in to the internals like this. + */ +import org.ehcache.shadow.org.terracotta.statistics.StatisticsManager; + +/** + * @author Ludovic Orban + */ +public class OffHeapStoreLifecycleHelper { + + private OffHeapStoreLifecycleHelper() { + } + + public static void init(OffHeapStore offHeapStore) { + OffHeapStore.Provider.init(offHeapStore); + } + + public static void close(OffHeapStore offHeapStore) { + OffHeapStore.Provider.close(offHeapStore); + StatisticsManager.nodeFor(offHeapStore).clean(); + } + +} diff --git a/ehcache-transactions/src/test/java/org/ehcache/transactions/NonXACacheTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/NonXACacheTest.java new file mode 100644 index 0000000000..81bcae99fb --- /dev/null +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/NonXACacheTest.java @@ -0,0 +1,70 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.transactions; + +import org.ehcache.CacheManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.core.util.ClassLoading; +import org.ehcache.transactions.xa.internal.XAStore; +import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProvider; +import org.junit.Test; + +import static java.util.Spliterators.spliterator; +import static java.util.stream.Collectors.toList; +import static java.util.stream.StreamSupport.stream; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.IsCollectionContaining.hasItems; + +/** + * Ensures that a non-XA {@code CacheManager} can be created when XA classes are + * available in classpath. + */ +public class NonXACacheTest { + + @Test + public void testNonXA() throws Exception { + + /* + * Ensure the XA provider classes are loadable through the ServiceLoader mechanism. + */ + assertThat(stream(spliterator(ClassLoading.servicesOfType(ServiceFactory.class).iterator(), Long.MAX_VALUE, 0), false).map(s -> s.getServiceType()).collect(toList()), + hasItems(XAStore.Provider.class, LookupTransactionManagerProvider.class)); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder( + String.class, + String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .build()) + .build(); + + + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().build(true); + + cacheManager.createCache("cache-1", cacheConfiguration); + cacheManager.createCache("cache-2", cacheConfiguration); + + cacheManager.close(); + } +} diff --git a/ehcache-transactions/src/test/java/org/ehcache/transactions/TransactionalCacheParserIT.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/TransactionalCacheParserIT.java new file mode 100644 index 0000000000..9a1d723fc8 --- /dev/null +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/TransactionalCacheParserIT.java @@ -0,0 +1,39 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.transactions; + +import org.ehcache.config.Configuration; +import org.ehcache.xml.XmlConfiguration; +import org.junit.Test; + +import java.net.URL; + +import static org.ehcache.xml.XmlConfigurationMatchers.isSameConfigurationAs; +import static org.hamcrest.MatcherAssert.assertThat; + +/** + * TransactionalCacheParserIT + */ +public class TransactionalCacheParserIT { + + @Test + public void testTransactionalCacheXmlTranslationToString() { + URL resource = TransactionalCacheParserIT.class.getResource("/configs/transactional-cache.xml"); + Configuration config = new XmlConfiguration(resource); + XmlConfiguration xmlConfig = new XmlConfiguration(config); + assertThat(xmlConfig.toString(), isSameConfigurationAs(resource)); + } +} diff --git a/ehcache-transactions/src/test/java/org/ehcache/transactions/XmlConfigTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/XmlConfigTest.java new file mode 100644 index 0000000000..2334523844 --- /dev/null +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/XmlConfigTest.java @@ -0,0 +1,63 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.transactions; + +import bitronix.tm.BitronixTransactionManager; +import bitronix.tm.TransactionManagerServices; +import org.ehcache.CacheManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.core.spi.service.ServiceUtils; +import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.junit.Test; + +import java.net.URL; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +/** + * @author Ludovic Orban + */ +public class XmlConfigTest { + + @Test + public void testSimpleConfig() throws Exception { + TransactionManagerServices.getConfiguration().setJournal("null").setServerId("XmlConfigTest"); + BitronixTransactionManager transactionManager = TransactionManagerServices.getTransactionManager(); + + final URL myUrl = this.getClass().getResource("/configs/simple-xa.xml"); + Configuration xmlConfig = new XmlConfiguration(myUrl); + CacheManager myCacheManager = CacheManagerBuilder.newCacheManager(xmlConfig); + myCacheManager.init(); + + myCacheManager.close(); + transactionManager.shutdown(); + } + + @Test + public void testTemplateConfigOverride() throws Exception { + final URL myUrl = this.getClass().getResource("/configs/template-xa.xml"); + Configuration xmlConfig = new XmlConfiguration(myUrl); + CacheConfiguration cacheConfiguration = xmlConfig.getCacheConfigurations().get("xaCache1"); + XAStoreConfiguration xaStoreConfiguration = ServiceUtils.findSingletonAmongst(XAStoreConfiguration.class, cacheConfiguration + .getServiceConfigurations()); + + assertThat(xaStoreConfiguration.getUniqueXAResourceId(), is("xaCache1")); + } +} diff --git a/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/configuration/XAStoreConfigurationTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/configuration/XAStoreConfigurationTest.java new file mode 100644 index 0000000000..6d05489192 --- /dev/null +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/configuration/XAStoreConfigurationTest.java @@ -0,0 +1,36 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.transactions.xa.configuration; + +import org.hamcrest.core.IsNot; +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsSame.sameInstance; + +public class XAStoreConfigurationTest { + + @Test + public void testDeriveDetachesProperly() { + XAStoreConfiguration configuration = new XAStoreConfiguration("foobar"); + XAStoreConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(IsNot.not(sameInstance(configuration)))); + assertThat(derived.getUniqueXAResourceId(), is(configuration.getUniqueXAResourceId())); + } +} diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/integration/StatefulSerializerTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/integration/StatefulSerializerTest.java similarity index 93% rename from transactions/src/test/java/org/ehcache/transactions/xa/integration/StatefulSerializerTest.java rename to ehcache-transactions/src/test/java/org/ehcache/transactions/xa/integration/StatefulSerializerTest.java index 8338b374a1..c88b2d279c 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/integration/StatefulSerializerTest.java +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/integration/StatefulSerializerTest.java @@ -23,8 +23,8 @@ import org.ehcache.CacheManager; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.expiry.Expirations; import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; import org.ehcache.transactions.xa.txmgr.btm.BitronixTransactionManagerLookup; import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProviderConfiguration; @@ -63,7 +63,7 @@ public void testXAWithStatefulSerializer() throws Exception { CacheConfigurationBuilder .newCacheConfigurationBuilder(Long.class, Person.class, ResourcePoolsBuilder.heap(5)) - .withExpiry(Expirations.noExpiration()).add(new XAStoreConfiguration("xaCache")) + .withExpiry(ExpiryPolicyBuilder.noExpiration()).withService(new XAStoreConfiguration("xaCache")) .build()) .build(true)) { @@ -81,6 +81,9 @@ public void testXAWithStatefulSerializer() throws Exception { } public static class Person implements Serializable { + + private static final long serialVersionUID = 1L; + public final String name; public final int age; diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java similarity index 99% rename from transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java rename to ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java index 4129ce1c1b..34a17f97bb 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java @@ -32,8 +32,8 @@ import java.util.Collection; import java.util.Collections; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyInt; diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java similarity index 84% rename from transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java rename to ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java index e2d7ab530a..e87ece536d 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java @@ -16,13 +16,14 @@ package org.ehcache.transactions.xa.internal; +import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.service.ServiceConfiguration; import org.junit.Test; -import java.util.HashSet; +import java.util.Collections; import java.util.Set; import static org.junit.Assert.fail; @@ -41,14 +42,13 @@ public void testUnSupportedResourceType() { Store.Configuration configuration = mock(Store.Configuration.class); ResourcePools resourcePools = mock(ResourcePools.class); - Set> resourceTypes = new HashSet<>(); - resourceTypes.add(new TestResourceType()); + Set> resourceTypes = Collections.singleton(new TestResourceType()); when(configuration.getResourcePools()).thenReturn(resourcePools); when(resourcePools.getResourceTypeSet()).thenReturn(resourceTypes); try { - provider.createStore(configuration, (ServiceConfiguration) null); + provider.createStore(configuration, (ServiceConfiguration) null); fail("IllegalStateException expected"); } catch (IllegalStateException e) { @@ -57,11 +57,11 @@ public void testUnSupportedResourceType() { } - private static class TestResourceType implements ResourceType { + private static class TestResourceType implements ResourceType { @Override - public Class getResourcePoolClass() { - return TestResourceType.class; + public Class getResourcePoolClass() { + return ResourcePool.class; } @Override diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreProviderTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreProviderTest.java similarity index 83% rename from transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreProviderTest.java rename to ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreProviderTest.java index fc768b7fdd..e6546f7f83 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreProviderTest.java +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreProviderTest.java @@ -18,12 +18,15 @@ import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.spi.service.StatisticsService; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; import org.ehcache.impl.internal.DefaultTimeSourceService; import org.ehcache.impl.internal.store.offheap.OffHeapStore; import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.StatefulSerializer; +import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; import org.ehcache.transactions.xa.internal.journal.Journal; @@ -55,16 +58,17 @@ public void testXAStoreProviderStatefulSerializer() { TransactionManagerProvider transactionManagerProvider = mock(TransactionManagerProvider.class); when(transactionManagerProvider.getTransactionManagerWrapper()).thenReturn(mock(TransactionManagerWrapper.class)); - ServiceProvider serviceProvider = mock(ServiceProvider.class); + ServiceProvider serviceProvider = mock(ServiceProvider.class); when(serviceProvider.getService(JournalProvider.class)).thenReturn(journalProvider); when(serviceProvider.getService(TimeSourceService.class)).thenReturn(new DefaultTimeSourceService(null)); when(serviceProvider.getService(TransactionManagerProvider.class)).thenReturn(transactionManagerProvider); + when(serviceProvider.getService(StatisticsService.class)).thenReturn(new DefaultStatisticsService()); when(serviceProvider.getServicesOfType(Store.Provider.class)).thenReturn(Collections.singleton(underlyingStoreProvider)); - Store.Configuration configuration = mock(Store.Configuration.class); + Store.Configuration configuration = mock(Store.Configuration.class); when(configuration.getResourcePools()).thenReturn(ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(1, MemoryUnit.MB).build()); when(configuration.getDispatcherConcurrency()).thenReturn(1); - StatefulSerializer valueSerializer = mock(StatefulSerializer.class); + StatefulSerializer valueSerializer = mock(StatefulSerializer.class); when(configuration.getValueSerializer()).thenReturn(valueSerializer); underlyingStoreProvider.start(serviceProvider); @@ -72,7 +76,7 @@ public void testXAStoreProviderStatefulSerializer() { XAStore.Provider provider = new XAStore.Provider(); provider.start(serviceProvider); - Store store = provider.createStore(configuration, mock(XAStoreConfiguration.class)); + Store store = provider.createStore(configuration, mock(XAStoreConfiguration.class)); provider.initStore(store); verify(valueSerializer).init(any(StateRepository.class)); diff --git a/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java new file mode 100644 index 0000000000..dc2c0c2e8d --- /dev/null +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java @@ -0,0 +1,1710 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.transactions.xa.internal; + +import org.ehcache.Cache; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.events.StoreEventDispatcher; +import org.ehcache.core.spi.ServiceLocator; +import org.ehcache.core.spi.service.CacheManagerProviderService; +import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.store.StoreConfigurationImpl; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; +import org.ehcache.core.events.NullStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; +import org.ehcache.impl.internal.spi.copy.DefaultCopyProvider; +import org.ehcache.impl.internal.store.heap.OnHeapStore; +import org.ehcache.impl.internal.store.offheap.MemorySizeParser; +import org.ehcache.impl.internal.store.offheap.OffHeapStore; +import org.ehcache.impl.internal.store.offheap.OffHeapStoreLifecycleHelper; +import org.ehcache.impl.internal.store.tiering.TieredStore; +import org.ehcache.internal.TestTimeSource; +import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.copy.CopyProvider; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.transactions.xa.XACacheException; +import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; +import org.ehcache.transactions.xa.internal.journal.Journal; +import org.ehcache.transactions.xa.internal.journal.TransientJournal; +import org.ehcache.transactions.xa.internal.txmgr.NullXAResourceRegistry; +import org.ehcache.transactions.xa.txmgr.TransactionManagerWrapper; +import org.ehcache.transactions.xa.txmgr.provider.TransactionManagerProvider; +import org.ehcache.transactions.xa.utils.JavaSerializer; +import org.ehcache.transactions.xa.utils.TestXid; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestName; +import org.mockito.Answers; + +import java.time.Duration; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; + +import javax.transaction.HeuristicMixedException; +import javax.transaction.HeuristicRollbackException; +import javax.transaction.RollbackException; +import javax.transaction.Status; +import javax.transaction.Synchronization; +import javax.transaction.SystemException; +import javax.transaction.Transaction; +import javax.transaction.TransactionManager; +import javax.transaction.xa.XAException; +import javax.transaction.xa.XAResource; + +import static java.time.Duration.ofSeconds; +import static java.util.Collections.emptySet; +import static org.ehcache.config.builders.ExpiryPolicyBuilder.timeToLiveExpiration; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.mock; + +/** + * Tests for {@link XAStore} and {@link org.ehcache.transactions.xa.internal.XAStore.Provider}. + */ +public class XAStoreTest { + + private static final Supplier SUPPLY_TRUE = () -> true; + private static final Supplier SUPPLY_FALSE = () -> false; + + @Rule + public TestName testName = new TestName(); + + @SuppressWarnings("unchecked") + private final Class> valueClass = (Class) SoftLock.class; + private final TestTransactionManager testTransactionManager = new TestTransactionManager(); + private TransactionManagerWrapper transactionManagerWrapper; + private OnHeapStore> onHeapStore; + private Journal journal; + private TestTimeSource testTimeSource; + private ClassLoader classLoader; + private Serializer keySerializer; + private Serializer> valueSerializer; + private StoreEventDispatcher> eventDispatcher; + private final ExpiryPolicy expiry = timeToLiveExpiration(ofSeconds(1)); + private Copier keyCopier; + private Copier> valueCopier; + + @Before + public void setUp() { + transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); + classLoader = ClassLoader.getSystemClassLoader(); + keySerializer = new JavaSerializer<>(classLoader); + valueSerializer = new JavaSerializer<>(classLoader); + CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); + keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); + valueCopier = copyProvider.createValueCopier(valueClass, valueSerializer); + Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, + null, classLoader, ExpiryPolicyBuilder.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .build(), + 0, keySerializer, valueSerializer); + testTimeSource = new TestTimeSource(); + eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); + onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher, new DefaultStatisticsService()); + journal = new TransientJournal<>(); + } + + @Test + public void testXAStoreProviderFailsToRankWhenNoTMProviderConfigured() throws Exception { + XAStore.Provider provider = new XAStore.Provider(); + provider.start(new ServiceProvider() { + @Override + public U getService(Class serviceType) { + return null; + } + + @Override + public Collection getServicesOfType(Class serviceType) { + return emptySet(); + } + }); + try { + provider.wrapperStoreRank(Collections.singleton(mock(XAStoreConfiguration.class))); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), containsString("TransactionManagerProvider")); + } + } + + @Test + public void testSimpleGetPutRemove() throws Exception { + XAStore xaStore = getXAStore(onHeapStore); + + testTransactionManager.begin(); + { + assertThat(xaStore.remove(1L), equalTo(false)); + assertThat(xaStore.get(1L), is(nullValue())); + assertThat(xaStore.put(1L, "1"), equalTo(Store.PutStatus.PUT)); + assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); + assertThat(xaStore.get(1L).get(), equalTo("one")); + } + testTransactionManager.rollback(); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + assertThat(xaStore.get(1L), is(nullValue())); + assertThat(xaStore.put(1L, "1"), equalTo(Store.PutStatus.PUT)); + assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); + assertThat(xaStore.get(1L).get(), equalTo("one")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "one"); + + testTransactionManager.begin(); + { + assertThat(xaStore.remove(1L), equalTo(true)); + assertThat(xaStore.remove(1L), equalTo(false)); + assertThat(xaStore.get(1L), is(nullValue())); + assertThat(xaStore.put(1L, "1"), equalTo(Store.PutStatus.PUT)); + } + testTransactionManager.rollback(); + + assertMapping(xaStore, 1L, "one"); + + testTransactionManager.begin(); + { + assertThat(xaStore.put(1L, "un"), equalTo(Store.PutStatus.PUT)); + assertThat(xaStore.remove(1L), equalTo(true)); + assertThat(xaStore.remove(1L), equalTo(false)); + assertThat(xaStore.get(1L), is(nullValue())); + assertThat(xaStore.put(1L, "un"), equalTo(Store.PutStatus.PUT)); + assertThat(xaStore.get(1L).get(), equalTo("un")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "un"); + } + + @Test + public void testConflictingGetPutRemove() throws Exception { + final XAStore xaStore = getXAStore(onHeapStore); + final AtomicReference exception = new AtomicReference<>(); + + testTransactionManager.begin(); + { + xaStore.put(1L, "one"); + } + testTransactionManager.commit(); + + testTransactionManager.begin(); + { + assertThat(xaStore.put(1L, "un"), equalTo(Store.PutStatus.PUT)); + + executeWhileIn2PC(exception, () -> { + testTransactionManager.begin(); + assertThat(xaStore.put(1L, "uno"), equalTo(Store.PutStatus.NOOP)); + testTransactionManager.commit(); + return null; + }); + + assertThat(xaStore.put(1L, "eins"), equalTo(Store.PutStatus.PUT)); + } + testTransactionManager.commit(); + assertThat(exception.get(), is(nullValue())); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + xaStore.put(1L, "one"); + } + testTransactionManager.commit(); + + testTransactionManager.begin(); + { + assertThat(xaStore.put(1L, "un"), equalTo(Store.PutStatus.PUT)); + + executeWhileIn2PC(exception, () -> { + testTransactionManager.begin(); + + assertThat(xaStore.remove(1L), is(false)); + + testTransactionManager.commit(); + return null; + }); + + assertThat(xaStore.put(1L, "een"), equalTo(Store.PutStatus.PUT)); + } + testTransactionManager.commit(); + + assertThat(exception.get(), is(nullValue())); + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + xaStore.put(1L, "one"); + } + testTransactionManager.commit(); + + testTransactionManager.begin(); + { + assertThat(xaStore.put(1L, "un"), equalTo(Store.PutStatus.PUT)); + + executeWhileIn2PC(exception, () -> { + testTransactionManager.begin(); + + assertThat(xaStore.get(1L), is(nullValue())); + + testTransactionManager.commit(); + return null; + }); + + assertThat(xaStore.put(1L, "yksi"), equalTo(Store.PutStatus.PUT)); + } + testTransactionManager.commit(); + + assertThat(exception.get(), is(nullValue())); + assertMapping(xaStore, 1L, null); + } + + private void executeWhileIn2PC(AtomicReference exception, Callable callable) { + testTransactionManager.getCurrentTransaction().registerTwoPcListener(() -> { + try { + Thread t = new Thread(() -> { + try { + // this runs while the committing TX is in-doubt + callable.call(); + } catch (Throwable t1) { + exception.set(t1); + } + }); + t.start(); + t.join(); + } catch (Throwable e) { + exception.set(e); + } + }); + } + + @Test + public void testIterate() throws Exception { + XAStore xaStore = getXAStore(onHeapStore); + + testTransactionManager.begin(); + { + xaStore.put(1L, "one"); + xaStore.put(2L, "two"); + xaStore.put(3L, "three"); + } + testTransactionManager.commit(); + + testTransactionManager.begin(); + { + xaStore.put(0L, "zero"); + xaStore.put(1L, "un"); + xaStore.put(2L, "two"); + xaStore.remove(3L); + + Map iterated = new HashMap<>(); + Store.Iterator>> iterator = xaStore.iterator(); + while (iterator.hasNext()) { + Cache.Entry> next = iterator.next(); + iterated.put(next.getKey(), next.getValue().get()); + } + assertThat(iterated.size(), is(3)); + assertThat(iterated.get(0L), equalTo("zero")); + assertThat(iterated.get(1L), equalTo("un")); + assertThat(iterated.get(2L), equalTo("two")); + } + testTransactionManager.commit(); + + testTransactionManager.begin(); + { + Map iterated = new HashMap<>(); + Store.Iterator>> iterator = xaStore.iterator(); + while (iterator.hasNext()) { + Cache.Entry> next = iterator.next(); + iterated.put(next.getKey(), next.getValue().get()); + } + assertThat(iterated.size(), is(3)); + assertThat(iterated.get(0L), equalTo("zero")); + assertThat(iterated.get(1L), equalTo("un")); + assertThat(iterated.get(2L), equalTo("two")); + } + testTransactionManager.commit(); + + Store.Iterator>> iterator; + testTransactionManager.begin(); + { + iterator = xaStore.iterator(); + iterator.next(); + } + testTransactionManager.commit(); + + // cannot use iterator outside of tx context + try { + iterator.hasNext(); + fail(); + } catch (XACacheException e) { + // expected + } + try { + iterator.next(); + fail(); + } catch (XACacheException e) { + // expected + } + + // cannot use iterator outside of original tx context + testTransactionManager.begin(); + { + try { + iterator.hasNext(); + fail(); + } catch (IllegalStateException e) { + // expected + } + try { + iterator.next(); + fail(); + } catch (IllegalStateException e) { + // expected + } + } + testTransactionManager.commit(); + } + + @Test + public void testPutIfAbsent() throws Exception { + final XAStore xaStore = getXAStore(onHeapStore); + final AtomicReference exception = new AtomicReference<>(); + + testTransactionManager.begin(); + { + assertThat(xaStore.putIfAbsent(1L, "one", b -> {}), is(nullValue())); + assertThat(xaStore.get(1L).get(), equalTo("one")); + assertThat(xaStore.putIfAbsent(1L, "un", b -> {}).get(), equalTo("one")); + assertThat(xaStore.get(1L).get(), equalTo("one")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "one"); + + testTransactionManager.begin(); + { + assertThat(xaStore.putIfAbsent(1L, "un", b -> {}).get(), equalTo("one")); + assertThat(xaStore.get(1L).get(), equalTo("one")); + assertThat(xaStore.remove(1L), equalTo(true)); + assertThat(xaStore.putIfAbsent(1L, "uno", b -> {}), is(nullValue())); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "uno"); + + testTransactionManager.begin(); + { + xaStore.put(1L, "eins"); + executeWhileIn2PC(exception, () -> { + testTransactionManager.begin(); + + assertThat(xaStore.putIfAbsent(1L, "un", b -> {}), is(nullValue())); + + testTransactionManager.commit(); + return null; + }); + } + testTransactionManager.commit(); + assertThat(exception.get(), is(nullValue())); + + assertMapping(xaStore, 1L, null); + } + + @Test + public void testRemove2Args() throws Exception { + final XAStore xaStore = getXAStore(onHeapStore); + final AtomicReference exception = new AtomicReference<>(); + + testTransactionManager.begin(); + { + assertThat(xaStore.remove(1L, "one"), equalTo(Store.RemoveStatus.KEY_MISSING)); + assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); + assertThat(xaStore.remove(1L, "un"), equalTo(Store.RemoveStatus.KEY_PRESENT)); + assertThat(xaStore.remove(1L, "one"), equalTo(Store.RemoveStatus.REMOVED)); + assertThat(xaStore.remove(1L, "eins"), equalTo(Store.RemoveStatus.KEY_MISSING)); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "one"); + + testTransactionManager.begin(); + { + assertThat(xaStore.remove(1L, "een"), equalTo(Store.RemoveStatus.KEY_PRESENT)); + assertThat(xaStore.remove(1L, "one"), equalTo(Store.RemoveStatus.REMOVED)); + assertThat(xaStore.remove(1L, "eins"), equalTo(Store.RemoveStatus.KEY_MISSING)); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + xaStore.put(1L, "eins"); + executeWhileIn2PC(exception, () -> { + testTransactionManager.begin(); + + assertThat(xaStore.remove(1L, "un"), equalTo(Store.RemoveStatus.KEY_MISSING)); + + testTransactionManager.commit(); + return null; + }); + } + testTransactionManager.commit(); + assertThat(exception.get(), is(nullValue())); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "one"); + + testTransactionManager.begin(); + { + xaStore.put(1L, "eins"); + executeWhileIn2PC(exception, () -> { + testTransactionManager.begin(); + + assertThat(xaStore.remove(1L, "un"), equalTo(Store.RemoveStatus.KEY_MISSING)); + + testTransactionManager.commit(); + return null; + }); + } + testTransactionManager.commit(); + assertThat(exception.get(), is(nullValue())); + + assertMapping(xaStore, 1L, null); + } + + @Test + public void testReplace2Args() throws Exception { + final XAStore xaStore = getXAStore(onHeapStore); + final AtomicReference exception = new AtomicReference<>(); + + testTransactionManager.begin(); + { + assertThat(xaStore.replace(1L, "one"), is(nullValue())); + assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); + assertThat(xaStore.replace(1L, "un").get(), equalTo("one")); + assertThat(xaStore.replace(1L, "uno").get(), equalTo("un")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "uno"); + + testTransactionManager.begin(); + { + assertThat(xaStore.replace(1L, "een").get(), equalTo("uno")); + assertThat(xaStore.replace(1L, "eins").get(), equalTo("een")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "eins"); + + testTransactionManager.begin(); + { + assertThat(xaStore.remove(1L), is(true)); + assertThat(xaStore.replace(1L, "yksi"), is(nullValue())); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + xaStore.put(1L, "eins"); + executeWhileIn2PC(exception, () -> { + testTransactionManager.begin(); + + assertThat(xaStore.replace(1L, "un"), is(nullValue())); + + testTransactionManager.commit(); + return null; + }); + } + testTransactionManager.commit(); + assertThat(exception.get(), is(nullValue())); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + assertThat(xaStore.put(1L, "one"), is(Store.PutStatus.PUT)); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "one"); + + testTransactionManager.begin(); + { + xaStore.put(1L, "eins"); + executeWhileIn2PC(exception, () -> { + testTransactionManager.begin(); + + assertThat(xaStore.replace(1L, "un"), is(nullValue())); + + testTransactionManager.commit(); + return null; + }); + } + testTransactionManager.commit(); + assertThat(exception.get(), is(nullValue())); + + assertMapping(xaStore, 1L, null); + } + + @Test + public void testReplace3Args() throws Exception { + final XAStore xaStore = getXAStore(onHeapStore); + final AtomicReference exception = new AtomicReference<>(); + + testTransactionManager.begin(); + { + assertThat(xaStore.replace(1L, "one", "un"), equalTo(Store.ReplaceStatus.MISS_NOT_PRESENT)); + assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); + assertThat(xaStore.replace(1L, "eins", "un"), equalTo(Store.ReplaceStatus.MISS_PRESENT)); + assertThat(xaStore.replace(1L, "one", "un"), equalTo(Store.ReplaceStatus.HIT)); + assertThat(xaStore.get(1L).get(), equalTo("un")); + assertThat(xaStore.replace(1L, "eins", "een"), equalTo(Store.ReplaceStatus.MISS_PRESENT)); + assertThat(xaStore.replace(1L, "un", "uno"), equalTo(Store.ReplaceStatus.HIT)); + assertThat(xaStore.get(1L).get(), equalTo("uno")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "uno"); + + testTransactionManager.begin(); + { + assertThat(xaStore.replace(1L, "one", "uno"), equalTo(Store.ReplaceStatus.MISS_PRESENT)); + assertThat(xaStore.replace(1L, "uno", "un"), equalTo(Store.ReplaceStatus.HIT)); + assertThat(xaStore.get(1L).get(), equalTo("un")); + assertThat(xaStore.remove(1L), equalTo(true)); + assertThat(xaStore.replace(1L, "un", "eins"), equalTo(Store.ReplaceStatus.MISS_NOT_PRESENT)); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + xaStore.put(1L, "eins"); + executeWhileIn2PC(exception, () -> { + testTransactionManager.begin(); + + assertThat(xaStore.replace(1L, "eins", "one"), is(Store.ReplaceStatus.MISS_NOT_PRESENT)); + + testTransactionManager.commit(); + return null; + }); + } + testTransactionManager.commit(); + assertThat(exception.get(), is(nullValue())); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + assertThat(xaStore.put(1L, "one"), is(Store.PutStatus.PUT)); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "one"); + + testTransactionManager.begin(); + { + xaStore.put(1L, "eins"); + executeWhileIn2PC(exception, () -> { + testTransactionManager.begin(); + + assertThat(xaStore.replace(1L, "one", "un"), is(Store.ReplaceStatus.MISS_NOT_PRESENT)); + + testTransactionManager.commit(); + return null; + }); + } + testTransactionManager.commit(); + assertThat(exception.get(), is(nullValue())); + + assertMapping(xaStore, 1L, null); + } + + @Test + public void testGetAndCompute() throws Exception { + Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, + null, classLoader, ExpiryPolicyBuilder.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder() + .offheap(10, MemoryUnit.MB) + .build(), + 0, keySerializer, valueSerializer); + OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser + .parse("10M"), new DefaultStatisticsService()); + OffHeapStoreLifecycleHelper.init(offHeapStore); + TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); + + XAStore xaStore = getXAStore(tieredStore); + + testTransactionManager.begin(); + { + Store.ValueHolder computed1 = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, is(nullValue())); + return "one"; + }); + assertThat(computed1, is(nullValue())); + assertThat(xaStore.get(1L).get(), equalTo("one")); + Store.ValueHolder computed2 = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("one")); + return "un"; + }); + assertThat(computed2.get(), equalTo("one")); + assertThat(xaStore.get(1L).get(), equalTo("un")); + Store.ValueHolder computed3 = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("un")); + return null; + }); + assertThat(computed3.get(), equalTo("un")); + assertThat(xaStore.get(1L), is(nullValue())); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + Store.ValueHolder computed1 = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, is(nullValue())); + return "one"; + }); + assertThat(computed1, is(nullValue())); + assertThat(xaStore.get(1L).get(), equalTo("one")); + Store.ValueHolder computed2 = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("one")); + return null; + }); + assertThat(computed2.get(), equalTo("one")); + assertThat(xaStore.get(1L), is(nullValue())); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + Store.ValueHolder computed1 = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, is(nullValue())); + return "one"; + }); + assertThat(computed1, is(nullValue())); + assertThat(xaStore.get(1L).get(), equalTo("one")); + Store.ValueHolder computed2 = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("one")); + return null; + }); + assertThat(computed2.get(), equalTo("one")); + assertThat(xaStore.get(1L), is(nullValue())); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + Store.ValueHolder computed1 = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, is(nullValue())); + return "one"; + }); + assertThat(computed1, is(nullValue())); + assertThat(xaStore.get(1L).get(), equalTo("one")); + Store.ValueHolder computed2 = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("one")); + return "un"; + }); + assertThat(computed2.get(), equalTo("one")); + assertThat(xaStore.get(1L).get(), equalTo("un")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "un"); + + testTransactionManager.begin(); + { + Store.ValueHolder computed = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("un")); + return "eins"; + }); + assertThat(computed.get(), equalTo("un")); + assertThat(xaStore.get(1L).get(), equalTo("eins")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "eins"); + + testTransactionManager.begin(); + { + Store.ValueHolder computed = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("eins")); + return null; + }); + assertThat(computed.get(), equalTo("eins")); + assertThat(xaStore.get(1L), is(nullValue())); + } + testTransactionManager.rollback(); + + assertMapping(xaStore, 1L, "eins"); + + testTransactionManager.begin(); + { + Store.ValueHolder computed1 = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("eins")); + return null; + }); + assertThat(computed1.get(), equalTo("eins")); + assertThat(xaStore.get(1L), is(nullValue())); + Store.ValueHolder computed2 = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, is(nullValue())); + return null; + }); + assertThat(computed2, is(nullValue())); + assertThat(xaStore.get(1L), is(nullValue())); + Store.ValueHolder computed3 = xaStore.getAndCompute(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, is(nullValue())); + return "uno"; + }); + assertThat(computed3, is(nullValue())); + assertThat(xaStore.get(1L).get(), equalTo("uno")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "uno"); + + testTransactionManager.begin(); + { + xaStore.remove(1L); + } + testTransactionManager.commit(); + + testTransactionManager.begin(); + { + assertThat(xaStore.containsKey(1L), is(false)); + xaStore.put(1L, "uno"); + assertThat(xaStore.containsKey(1L), is(true)); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "uno"); + + testTransactionManager.begin(); + { + assertThat(xaStore.containsKey(1L), is(true)); + xaStore.remove(1L); + assertThat(xaStore.containsKey(1L), is(false)); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, null); + + OffHeapStoreLifecycleHelper.close(offHeapStore); + } + + @Test + public void testCompute() throws Exception { + Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, + null, classLoader, ExpiryPolicyBuilder.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder() + .offheap(10, MemoryUnit.MB) + .build(), + 0, keySerializer, valueSerializer); + OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser + .parse("10M"), new DefaultStatisticsService()); + OffHeapStoreLifecycleHelper.init(offHeapStore); + TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); + + XAStore xaStore = getXAStore(tieredStore); + + testTransactionManager.begin(); + { + Store.ValueHolder computed1 = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, is(nullValue())); + return "one"; + }, SUPPLY_TRUE, SUPPLY_FALSE); + assertThat(computed1.get(), equalTo("one")); + Store.ValueHolder computed2 = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("one")); + return "un"; + }, SUPPLY_TRUE, SUPPLY_FALSE); + assertThat(computed2.get(), equalTo("un")); + Store.ValueHolder computed3 = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("un")); + return null; + }, SUPPLY_TRUE, SUPPLY_FALSE); + assertThat(computed3, is(nullValue())); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + Store.ValueHolder computed1 = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, is(nullValue())); + return "one"; + }, SUPPLY_FALSE, SUPPLY_FALSE); + assertThat(computed1.get(), equalTo("one")); + Store.ValueHolder computed2 = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("one")); + return null; + }, SUPPLY_FALSE, SUPPLY_FALSE); + assertThat(computed2, is(nullValue())); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + Store.ValueHolder computed1 = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, is(nullValue())); + return "one"; + }, SUPPLY_TRUE, SUPPLY_FALSE); + assertThat(computed1.get(), equalTo("one")); + Store.ValueHolder computed2 = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("one")); + return null; + }, SUPPLY_TRUE, SUPPLY_FALSE); + assertThat(computed2, is(nullValue())); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, null); + + testTransactionManager.begin(); + { + Store.ValueHolder computed1 = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, is(nullValue())); + return "one"; + }, SUPPLY_TRUE, SUPPLY_FALSE); + assertThat(computed1.get(), equalTo("one")); + Store.ValueHolder computed2 = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("one")); + return "un"; + }, SUPPLY_TRUE, SUPPLY_FALSE); + assertThat(computed2.get(), equalTo("un")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "un"); + + testTransactionManager.begin(); + { + Store.ValueHolder computed = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("un")); + return "eins"; + }, SUPPLY_TRUE, SUPPLY_FALSE); + assertThat(computed.get(), equalTo("eins")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "eins"); + + testTransactionManager.begin(); + { + Store.ValueHolder computed = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("eins")); + return null; + }, SUPPLY_TRUE, SUPPLY_FALSE); + assertThat(computed, is(nullValue())); + } + testTransactionManager.rollback(); + + assertMapping(xaStore, 1L, "eins"); + + testTransactionManager.begin(); + { + Store.ValueHolder computed1 = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, equalTo("eins")); + return null; + }, SUPPLY_TRUE, SUPPLY_FALSE); + assertThat(computed1, is(nullValue())); + Store.ValueHolder computed2 = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, is(nullValue())); + return null; + }, SUPPLY_TRUE, SUPPLY_FALSE); + assertThat(computed2, is(nullValue())); + Store.ValueHolder computed3 = xaStore.computeAndGet(1L, (aLong, s) -> { + assertThat(aLong, is(1L)); + assertThat(s, is(nullValue())); + return "uno"; + }, SUPPLY_TRUE, SUPPLY_FALSE); + assertThat(computed3.get(), equalTo("uno")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "uno"); + + testTransactionManager.begin(); + { + xaStore.remove(1L); + } + testTransactionManager.commit(); + + testTransactionManager.begin(); + { + assertThat(xaStore.containsKey(1L), is(false)); + xaStore.put(1L, "uno"); + assertThat(xaStore.containsKey(1L), is(true)); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "uno"); + + testTransactionManager.begin(); + { + assertThat(xaStore.containsKey(1L), is(true)); + xaStore.remove(1L); + assertThat(xaStore.containsKey(1L), is(false)); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, null); + + OffHeapStoreLifecycleHelper.close(offHeapStore); + } + + @Test + public void testComputeIfAbsent() throws Exception { + Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, + classLoader, ExpiryPolicyBuilder.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder() + .offheap(10, MemoryUnit.MB) + .build(), + 0, keySerializer, valueSerializer); + OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser + .parse("10M"), new DefaultStatisticsService()); + OffHeapStoreLifecycleHelper.init(offHeapStore); + TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); + + XAStore xaStore = getXAStore(tieredStore); + + testTransactionManager.begin(); + { + Store.ValueHolder computed1 = xaStore.computeIfAbsent(1L, aLong -> { + assertThat(aLong, is(1L)); + return "one"; + }); + assertThat(computed1.get(), equalTo("one")); + Store.ValueHolder computed2 = xaStore.computeIfAbsent(1L, aLong -> { + fail("should not be absent"); + throw new AssertionError(); + }); + assertThat(computed2.get(), equalTo("one")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "one"); + + testTransactionManager.begin(); + { + Store.ValueHolder computed1 = xaStore.computeIfAbsent(1L, aLong -> { + fail("should not be absent"); + throw new AssertionError(); + }); + assertThat(computed1.get(), equalTo("one")); + + xaStore.remove(1L); + + Store.ValueHolder computed2 = xaStore.computeIfAbsent(1L, aLong -> { + assertThat(aLong, is(1L)); + return "un"; + }); + assertThat(computed2.get(), equalTo("un")); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "un"); + + OffHeapStoreLifecycleHelper.close(offHeapStore); + } + + @Test + public void testExpiry() throws Exception { + Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, + null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), + 0, keySerializer, valueSerializer); + onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher, new DefaultStatisticsService()); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, + classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), + 0, keySerializer, valueSerializer); + OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser + .parse("10M"), new DefaultStatisticsService()); + OffHeapStoreLifecycleHelper.init(offHeapStore); + TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); + + XAStore xaStore = getXAStore(tieredStore); + + testTransactionManager.begin(); + { + xaStore.put(1L, "one"); + } + testTransactionManager.commit(); + + assertMapping(xaStore, 1L, "one"); + + testTimeSource.advanceTime(2000); + + assertMapping(xaStore, 1L, null); + + OffHeapStoreLifecycleHelper.close(offHeapStore); + } + + @Test + public void testExpiryCreateException() throws Exception { + ExpiryPolicy expiry = new ExpiryPolicy() { + + @Override + public Duration getExpiryForCreation(Object key, Object value) { + throw new RuntimeException(); + } + + @Override + public Duration getExpiryForAccess(Object key, Supplier value) { + throw new AssertionError(); + } + + @Override + public Duration getExpiryForUpdate(Object key, Supplier oldValue, Object newValue) { + throw new AssertionError(); + } + }; + Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, + classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), + 0, keySerializer, valueSerializer); + OnHeapStore> onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher, new DefaultStatisticsService()); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, + classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), + 0, keySerializer, valueSerializer); + OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser + .parse("10M"), new DefaultStatisticsService()); + OffHeapStoreLifecycleHelper.init(offHeapStore); + TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); + + XAStore xaStore = getXAStore(tieredStore); + + testTransactionManager.begin(); + xaStore.put(1L, "one"); + testTransactionManager.commit(); + assertMapping(xaStore, 1L, null); + } + + @Test + public void testExpiryAccessException() throws Exception { + String uniqueXAResourceId = "testExpiryAccessException"; + ExpiryPolicy expiry = new ExpiryPolicy() { + + @Override + public Duration getExpiryForCreation(Object key, Object value) { + return ExpiryPolicy.INFINITE; + } + + @Override + public Duration getExpiryForAccess(Object key, Supplier value) { + if (testTimeSource.getTimeMillis() > 0) { + throw new RuntimeException(); + } + return ExpiryPolicy.INFINITE; + } + + @Override + public Duration getExpiryForUpdate(Object key, Supplier oldValue, Object newValue) { + return ExpiryPolicy.INFINITE; + } + }; + Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, + classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), + 0, keySerializer, valueSerializer); + OnHeapStore> onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher, new DefaultStatisticsService()); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, + classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), + 0, keySerializer, valueSerializer); + OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser + .parse("10M"), new DefaultStatisticsService()); + OffHeapStoreLifecycleHelper.init(offHeapStore); + TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); + + XAStore xaStore = getXAStore(tieredStore); + + testTransactionManager.begin(); + xaStore.put(1L, "one"); + testTransactionManager.commit(); + + testTimeSource.advanceTime(1000); + testTransactionManager.begin(); + assertThat(xaStore.get(1L).get(), is("one")); + testTransactionManager.commit(); + + testTransactionManager.begin(); + assertThat(xaStore.get(1L), nullValue()); + testTransactionManager.commit(); + } + + @Test + public void testExpiryUpdateException() throws Exception{ + ExpiryPolicy expiry = new ExpiryPolicy() { + + @Override + public Duration getExpiryForCreation(Object key, Object value) { + return ExpiryPolicy.INFINITE; + } + + @Override + public Duration getExpiryForAccess(Object key, Supplier value) { + return ExpiryPolicy.INFINITE; + } + + @Override + public Duration getExpiryForUpdate(Object key, Supplier oldValue, Object newValue) { + if (testTimeSource.getTimeMillis() > 0) { + throw new RuntimeException(); + } + return ExpiryPolicy.INFINITE; + } + }; + Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, + classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), + 0, keySerializer, valueSerializer); + OnHeapStore> onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher, new DefaultStatisticsService()); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, + classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), + 0, keySerializer, valueSerializer); + OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser + .parse("10M"), new DefaultStatisticsService()); + OffHeapStoreLifecycleHelper.init(offHeapStore); + TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); + + XAStore xaStore = getXAStore(tieredStore); + + testTransactionManager.begin(); + xaStore.put(1L, "one"); + xaStore.get(1L); + testTransactionManager.commit(); + + testTimeSource.advanceTime(1000); + testTransactionManager.begin(); + xaStore.put(1L, "two"); + testTransactionManager.commit(); + assertMapping(xaStore, 1L, null); + } + + @Test + public void testBulkCompute() throws Exception { + ExpiryPolicy expiry = ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1)); + Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, + classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), + 0, keySerializer, valueSerializer); + OnHeapStore> onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher, new DefaultStatisticsService()); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, + classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), + 0, keySerializer, valueSerializer); + OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser + .parse("10M"), new DefaultStatisticsService()); + OffHeapStoreLifecycleHelper.init(offHeapStore); + TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); + + XAStore xaStore = getXAStore(tieredStore); + + testTransactionManager.begin(); + { + Map> computedMap = xaStore.bulkCompute(asSet(1L, 2L, 3L), entries -> { + Map result = new HashMap<>(); + for (Map.Entry entry : entries) { + Long key = entry.getKey(); + String value = entry.getValue(); + assertThat(value, is(nullValue())); + result.put(key, "stuff#" + key); + } + return result.entrySet(); + }); + + assertThat(computedMap.size(), is(3)); + assertThat(computedMap.get(1L).get(), equalTo("stuff#1")); + assertThat(computedMap.get(2L).get(), equalTo("stuff#2")); + assertThat(computedMap.get(3L).get(), equalTo("stuff#3")); + + computedMap = xaStore.bulkCompute(asSet(0L, 1L, 3L), entries -> { + Map result = new HashMap<>(); + for (Map.Entry entry : entries) { + Long key = entry.getKey(); + String value = entry.getValue(); + + switch (key.intValue()) { + case 0: + assertThat(value, is(nullValue())); + break; + case 1: + case 3: + assertThat(value, equalTo("stuff#" + key)); + break; + } + + if (key != 3L) { + result.put(key, "otherStuff#" + key); + } else { + result.put(key, null); + } + } + return result.entrySet(); + }); + + assertThat(computedMap.size(), is(3)); + assertThat(computedMap.get(0L).get(), equalTo("otherStuff#0")); + assertThat(computedMap.get(1L).get(), equalTo("otherStuff#1")); + assertThat(computedMap.get(3L), is(nullValue())); + } + testTransactionManager.commit(); + + assertSize(xaStore, 3); + assertMapping(xaStore, 0L, "otherStuff#0"); + assertMapping(xaStore, 1L, "otherStuff#1"); + assertMapping(xaStore, 2L, "stuff#2"); + + OffHeapStoreLifecycleHelper.close(offHeapStore); + } + + @Test + public void testBulkComputeIfAbsent() throws Exception { + ExpiryPolicy expiry = ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1)); + Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, + classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), + 0, keySerializer, valueSerializer); + OnHeapStore> onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher, new DefaultStatisticsService()); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, + classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), + 0, keySerializer, valueSerializer); + OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser + .parse("10M"), new DefaultStatisticsService()); + OffHeapStoreLifecycleHelper.init(offHeapStore); + TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); + + XAStore xaStore = getXAStore(tieredStore); + + testTransactionManager.begin(); + { + Map> computedMap = xaStore.bulkComputeIfAbsent(asSet(1L, 2L, 3L), keys -> { + Map result = new HashMap<>(); + for (Long key : keys) { + result.put(key, "stuff#" + key); + } + return result.entrySet(); + }); + + assertThat(computedMap.size(), is(3)); + assertThat(computedMap.get(1L).get(), equalTo("stuff#1")); + assertThat(computedMap.get(2L).get(), equalTo("stuff#2")); + assertThat(computedMap.get(3L).get(), equalTo("stuff#3")); + + computedMap = xaStore.bulkComputeIfAbsent(asSet(0L, 1L, 3L), keys -> { + Map result = new HashMap<>(); + for (Long key : keys) { + switch (key.intValue()) { + case 0: + result.put(key, "otherStuff#" + key); + break; + case 1: + case 3: + fail("key " + key + " should not be absent"); + break; + } + } + return result.entrySet(); + }); + + assertThat(computedMap.size(), is(3)); + assertThat(computedMap.get(0L).get(), equalTo("otherStuff#0")); + assertThat(computedMap.get(1L).get(), equalTo("stuff#1")); + assertThat(computedMap.get(3L).get(), equalTo("stuff#3")); + } + testTransactionManager.commit(); + + assertSize(xaStore, 4); + assertMapping(xaStore, 0L, "otherStuff#0"); + assertMapping(xaStore, 1L, "stuff#1"); + assertMapping(xaStore, 2L, "stuff#2"); + assertMapping(xaStore, 3L, "stuff#3"); + + OffHeapStoreLifecycleHelper.close(offHeapStore); + } + + @Test + public void testCustomEvictionAdvisor() throws Exception { + final AtomicBoolean invoked = new AtomicBoolean(); + + EvictionAdvisor> evictionAdvisor = (key, value) -> { + invoked.set(true); + return false; + }; + Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, + evictionAdvisor, classLoader, ExpiryPolicyBuilder.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .build(), + 0, keySerializer, valueSerializer); + OnHeapStore> onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher, new DefaultStatisticsService()); + + final XAStore xaStore = getXAStore(onHeapStore); + + testTransactionManager.begin(); + { + xaStore.put(1L, "1"); + } + testTransactionManager.rollback(); + assertThat(invoked.get(), is(false)); + + testTransactionManager.begin(); + { + xaStore.put(1L, "1"); + } + testTransactionManager.commit(); + assertThat(invoked.get(), is(true)); + } + + @Test + public void testRank() throws Exception { + XAStore.Provider provider = new XAStore.Provider(); + XAStoreConfiguration configuration = new XAStoreConfiguration("testXAResourceId"); + ServiceLocator serviceLocator = dependencySet() + .with(provider) + .with(Store.Provider.class) + .with(mock(DiskResourceService.class)) + .with(mock(CacheManagerProviderService.class, Answers.RETURNS_DEEP_STUBS)) + .with(mock(TransactionManagerProvider.class)).build(); + + serviceLocator.startAllServices(); + + Set> xaStoreConfigs = Collections.singleton(configuration); + assertThat(provider.wrapperStoreRank(xaStoreConfigs), is(1)); + + Set> emptyConfigs = emptySet(); + assertThat(provider.wrapperStoreRank(emptyConfigs), is(0)); + + } + + private Set asSet(Long... longs) { + return new HashSet<>(Arrays.asList(longs)); + } + + private void assertMapping(XAStore xaStore, long key, String value) throws Exception { + testTransactionManager.begin(); + + Store.ValueHolder valueHolder = xaStore.get(key); + if (value != null) { + assertThat(valueHolder.get(), equalTo(value)); + } else { + assertThat(valueHolder, is(nullValue())); + } + + testTransactionManager.commit(); + } + + private void assertSize(XAStore xaStore, int expectedSize) throws Exception { + testTransactionManager.begin(); + + int counter = 0; + Store.Iterator>> iterator = xaStore.iterator(); + while (iterator.hasNext()) { + iterator.next(); + counter++; + } + assertThat(counter, is(expectedSize)); + + testTransactionManager.commit(); + } + + private XAStore getXAStore(Store> store) { + return new XAStore<>(Long.class, String.class, store, transactionManagerWrapper, testTimeSource, journal, testName.getMethodName(), new DefaultStatisticsService()); + } + + static class TestTransactionManager implements TransactionManager { + + volatile TestTransaction currentTransaction; + final AtomicLong gtridGenerator = new AtomicLong(); + + public TestTransaction getCurrentTransaction() { + return currentTransaction; + } + + @Override + public void begin() { + currentTransaction = new TestTransaction(gtridGenerator.incrementAndGet()); + } + + @Override + public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { + currentTransaction.commit(); + currentTransaction = null; + } + + @Override + public int getStatus() { + return 0; + } + + @Override + public Transaction getTransaction() { + return currentTransaction; + } + + @Override + public void resume(Transaction tobj) { + + } + + @Override + public void rollback() throws IllegalStateException, SecurityException, SystemException { + currentTransaction.rollback(); + currentTransaction = null; + } + + @Override + public void setRollbackOnly() { + + } + + @Override + public void setTransactionTimeout(int seconds) { + + } + + @Override + public Transaction suspend() { + return null; + } + } + + static class TestTransaction implements Transaction { + + final long gtrid; + final Map xids = new IdentityHashMap<>(); + final AtomicLong bqualGenerator = new AtomicLong(); + final List synchronizations = new CopyOnWriteArrayList<>(); + final List twoPcListeners = new CopyOnWriteArrayList<>(); + + public TestTransaction(long gtrid) { + this.gtrid = gtrid; + } + + @Override + public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { + try { + Set> entries = xids.entrySet(); + + // delist + for (Map.Entry entry : entries) { + try { + entry.getKey().end(entry.getValue(), XAResource.TMSUCCESS); + } catch (XAException e) { + throw (SystemException) new SystemException(XAException.XAER_RMERR).initCause(e); + } + } + + fireBeforeCompletion(); + + Set preparedResources = new HashSet<>(); + + // prepare + for (Map.Entry entry : entries) { + try { + int prepareStatus = entry.getKey().prepare(entry.getValue()); + if (prepareStatus != XAResource.XA_RDONLY) { + preparedResources.add(entry.getKey()); + } + } catch (XAException e) { + throw (SystemException) new SystemException(XAException.XAER_RMERR).initCause(e); + } + } + + fireInMiddleOf2PC(); + + // commit + for (Map.Entry entry : entries) { + try { + if (preparedResources.contains(entry.getKey())) { + entry.getKey().commit(entry.getValue(), false); + } + } catch (XAException e) { + throw (SystemException) new SystemException(XAException.XAER_RMERR).initCause(e); + } + } + } finally { + fireAfterCompletion(Status.STATUS_COMMITTED); + } + } + + @Override + public boolean delistResource(XAResource xaRes, int flag) { + return true; + } + + @Override + public boolean enlistResource(XAResource xaRes) throws IllegalStateException, SystemException { + TestXid testXid = xids.get(xaRes); + if (testXid == null) { + testXid = new TestXid(gtrid, bqualGenerator.incrementAndGet()); + xids.put(xaRes, testXid); + } + + try { + xaRes.start(testXid, XAResource.TMNOFLAGS); + } catch (XAException e) { + throw (SystemException) new SystemException(XAException.XAER_RMERR).initCause(e); + } + return true; + } + + @Override + public int getStatus() { + return 0; + } + + public void registerTwoPcListener(TwoPcListener listener) { + twoPcListeners.add(listener); + } + + @Override + public void registerSynchronization(Synchronization sync) { + synchronizations.add(sync); + } + + @Override + public void rollback() throws IllegalStateException, SystemException { + try { + Set> entries = xids.entrySet(); + + // delist + for (Map.Entry entry : entries) { + try { + entry.getKey().end(entry.getValue(), XAResource.TMSUCCESS); + } catch (XAException e) { + throw (SystemException) new SystemException(XAException.XAER_RMERR).initCause(e); + } + } + + // rollback + for (Map.Entry entry : entries) { + try { + entry.getKey().rollback(entry.getValue()); + } catch (XAException e) { + throw (SystemException) new SystemException(XAException.XAER_RMERR).initCause(e); + } + } + } finally { + fireAfterCompletion(Status.STATUS_ROLLEDBACK); + } + } + + private void fireBeforeCompletion() { + for (Synchronization synchronization : synchronizations) { + synchronization.beforeCompletion(); + } + } + + private void fireAfterCompletion(int status) { + for (Synchronization synchronization : synchronizations) { + synchronization.afterCompletion(status); + } + } + + private void fireInMiddleOf2PC() { + for (TwoPcListener twoPcListener : twoPcListeners) { + twoPcListener.inMiddleOf2PC(); + } + } + + @Override + public void setRollbackOnly() { + + } + } + + interface TwoPcListener { + void inMiddleOf2PC(); + } + +} diff --git a/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java new file mode 100644 index 0000000000..a1989cf3a9 --- /dev/null +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java @@ -0,0 +1,557 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.transactions.xa.internal; + +import org.ehcache.internal.TestTimeSource; +import org.ehcache.core.spi.store.AbstractValueHolder; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.Store.RemoveStatus; +import org.ehcache.transactions.xa.internal.commands.StoreEvictCommand; +import org.ehcache.transactions.xa.internal.commands.StorePutCommand; +import org.ehcache.transactions.xa.internal.commands.StoreRemoveCommand; +import org.ehcache.transactions.xa.internal.journal.Journal; +import org.ehcache.core.spi.store.Store.ReplaceStatus; +import org.ehcache.transactions.xa.utils.TestXid; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.isA; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +/** + * @author Ludovic Orban + */ +public class XATransactionContextTest { + + @Rule + public MockitoRule rule = MockitoJUnit.rule(); + + @Mock + private Store> underlyingStore; + @Mock + private Journal journal; + + private final TestTimeSource timeSource = new TestTimeSource(); + + private XATransactionContext getXaTransactionContext() { + return new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, + timeSource.getTimeMillis() + 30000); + } + + @Test + public void testSimpleCommands() { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + + assertThat(xaTransactionContext.touched(1L), is(false)); + assertThat(xaTransactionContext.removed(1L), is(false)); + assertThat(xaTransactionContext.updated(1L), is(false)); + assertThat(xaTransactionContext.evicted(1L), is(false)); + assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); + assertThat(xaTransactionContext.oldValueOf(1L), is(nullValue())); + assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); + + xaTransactionContext.addCommand(1L, new StorePutCommand<>("old", new XAValueHolder<>("new", timeSource.getTimeMillis()))); + assertThat(xaTransactionContext.touched(1L), is(true)); + assertThat(xaTransactionContext.removed(1L), is(false)); + assertThat(xaTransactionContext.updated(1L), is(true)); + assertThat(xaTransactionContext.evicted(1L), is(false)); + assertThat(xaTransactionContext.newValueHolderOf(1L).get(), equalTo("new")); + assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); + assertThat(xaTransactionContext.newValueOf(1L), equalTo("new")); + + xaTransactionContext.addCommand(1L, new StoreRemoveCommand<>("old")); + assertThat(xaTransactionContext.touched(1L), is(true)); + assertThat(xaTransactionContext.removed(1L), is(true)); + assertThat(xaTransactionContext.updated(1L), is(false)); + assertThat(xaTransactionContext.evicted(1L), is(false)); + assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); + assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); + assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); + + xaTransactionContext.addCommand(1L, new StoreEvictCommand<>("old")); + assertThat(xaTransactionContext.touched(1L), is(true)); + assertThat(xaTransactionContext.removed(1L), is(false)); + assertThat(xaTransactionContext.updated(1L), is(false)); + assertThat(xaTransactionContext.evicted(1L), is(true)); + assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); + assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); + assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); + } + + @Test + public void testCommandsOverrideEachOther() { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + xaTransactionContext.addCommand(1L, new StorePutCommand<>("old", new XAValueHolder<>("new", timeSource.getTimeMillis()))); + assertThat(xaTransactionContext.touched(1L), is(true)); + assertThat(xaTransactionContext.removed(1L), is(false)); + assertThat(xaTransactionContext.updated(1L), is(true)); + assertThat(xaTransactionContext.evicted(1L), is(false)); + assertThat(xaTransactionContext.newValueHolderOf(1L).get(), equalTo("new")); + assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); + assertThat(xaTransactionContext.newValueOf(1L), equalTo("new")); + + xaTransactionContext.addCommand(1L, new StoreRemoveCommand<>("old")); + assertThat(xaTransactionContext.touched(1L), is(true)); + assertThat(xaTransactionContext.removed(1L), is(true)); + assertThat(xaTransactionContext.updated(1L), is(false)); + assertThat(xaTransactionContext.evicted(1L), is(false)); + assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); + assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); + assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); + + xaTransactionContext.addCommand(1L, new StoreRemoveCommand<>("old2")); + assertThat(xaTransactionContext.touched(1L), is(true)); + assertThat(xaTransactionContext.removed(1L), is(true)); + assertThat(xaTransactionContext.updated(1L), is(false)); + assertThat(xaTransactionContext.evicted(1L), is(false)); + assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); + assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old2")); + assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); + + xaTransactionContext.addCommand(1L, new StorePutCommand<>("old2", new XAValueHolder<>("new2", timeSource.getTimeMillis()))); + assertThat(xaTransactionContext.touched(1L), is(true)); + assertThat(xaTransactionContext.removed(1L), is(false)); + assertThat(xaTransactionContext.updated(1L), is(true)); + assertThat(xaTransactionContext.evicted(1L), is(false)); + assertThat(xaTransactionContext.newValueHolderOf(1L).get(), equalTo("new2")); + assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old2")); + assertThat(xaTransactionContext.newValueOf(1L), equalTo("new2")); + } + + @Test + public void testEvictCommandCannotBeOverridden() { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + + xaTransactionContext.addCommand(1L, new StorePutCommand<>("old", new XAValueHolder<>("new", timeSource.getTimeMillis()))); + assertThat(xaTransactionContext.touched(1L), is(true)); + assertThat(xaTransactionContext.removed(1L), is(false)); + assertThat(xaTransactionContext.updated(1L), is(true)); + assertThat(xaTransactionContext.evicted(1L), is(false)); + assertThat(xaTransactionContext.newValueHolderOf(1L).get(), equalTo("new")); + assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); + assertThat(xaTransactionContext.newValueOf(1L), equalTo("new")); + + xaTransactionContext.addCommand(1L, new StoreEvictCommand<>("old")); + assertThat(xaTransactionContext.touched(1L), is(true)); + assertThat(xaTransactionContext.removed(1L), is(false)); + assertThat(xaTransactionContext.updated(1L), is(false)); + assertThat(xaTransactionContext.evicted(1L), is(true)); + assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); + assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); + assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); + + xaTransactionContext.addCommand(1L, new StorePutCommand<>("old2", new XAValueHolder<>("new2", timeSource.getTimeMillis()))); + assertThat(xaTransactionContext.touched(1L), is(true)); + assertThat(xaTransactionContext.removed(1L), is(false)); + assertThat(xaTransactionContext.updated(1L), is(false)); + assertThat(xaTransactionContext.evicted(1L), is(true)); + assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); + assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); + assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); + } + + @Test + public void testHasTimedOut() { + XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), null, null, timeSource, timeSource + .getTimeMillis() + 30000); + assertThat(xaTransactionContext.hasTimedOut(), is(false)); + timeSource.advanceTime(30000); + assertThat(xaTransactionContext.hasTimedOut(), is(true)); + } + + @Test + public void testPrepareReadOnly() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + + assertThat(xaTransactionContext.prepare(), is(0)); + + verify(journal, times(1)).saveInDoubt(eq(new TransactionId(new TestXid(0, 0))), eq(Collections.emptySet())); + verify(journal, times(0)).saveCommitted(eq(new TransactionId(new TestXid(0, 0))), anyBoolean()); + verify(journal, times(1)).saveRolledBack(eq(new TransactionId(new TestXid(0, 0))), eq(false)); + } + + @Test + @SuppressWarnings("unchecked") + public void testPrepare() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + + xaTransactionContext.addCommand(1L, new StorePutCommand<>(null, new XAValueHolder<>("un", timeSource.getTimeMillis()))); + xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); + xaTransactionContext.addCommand(3L, new StoreEvictCommand<>("three")); + + Store.ValueHolder> mockValueHolder = mock(Store.ValueHolder.class); + when(mockValueHolder.get()).thenReturn(new SoftLock<>(null, "two", null)); + when(underlyingStore.get(eq(2L))).thenReturn(mockValueHolder); + when(underlyingStore.replace(eq(2L), eq(new SoftLock<>(null, "two", null)), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null)))).thenReturn(ReplaceStatus.HIT); + + AtomicReference> savedInDoubt = new AtomicReference<>(); + // doAnswer is required to make a copy of the keys collection because xaTransactionContext.prepare() clears it before the verify(journal, times(1)).saveInDoubt(...) assertion can be made. + // See: http://stackoverflow.com/questions/17027368/mockito-what-if-argument-passed-to-mock-is-modified + doAnswer(invocation -> { + Collection o = (Collection) invocation.getArguments()[1]; + savedInDoubt.set(new HashSet<>(o)); + return null; + }).when(journal).saveInDoubt(eq(new TransactionId(new TestXid(0, 0))), any(Collection.class)); + + assertThat(xaTransactionContext.prepare(), is(3)); + + assertThat(savedInDoubt.get(), containsInAnyOrder(1L, 2L, 3L)); + + verify(journal, times(1)).saveInDoubt(eq(new TransactionId(new TestXid(0, 0))), any(Collection.class)); + verify(journal, times(0)).saveCommitted(eq(new TransactionId(new TestXid(0, 0))), anyBoolean()); + verify(journal, times(0)).saveRolledBack(eq(new TransactionId(new TestXid(0, 0))), anyBoolean()); + + verify(underlyingStore, times(0)).get(1L); + verify(underlyingStore, times(1)).putIfAbsent(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), null, new XAValueHolder<>("un", timeSource + .getTimeMillis()))), any(Consumer.class)); + verify(underlyingStore, times(0)).get(2L); + verify(underlyingStore, times(1)).replace(eq(2L), eq(new SoftLock<>(null, "two", null)), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null))); + verify(underlyingStore, times(0)).get(3L); + verify(underlyingStore, times(1)).remove(eq(3L)); + } + + @Test + public void testCommitNotPreparedInFlightThrows() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); + xaTransactionContext.addCommand(2L, new StorePutCommand<>("two", new XAValueHolder<>("deux", timeSource.getTimeMillis()))); + + @SuppressWarnings("unchecked") + Store.ValueHolder> mockValueHolder = mock(Store.ValueHolder.class); + when(mockValueHolder.get()).thenReturn(new SoftLock<>(null, "two", null)); + when(underlyingStore.get(eq(2L))).thenReturn(mockValueHolder); + + try { + xaTransactionContext.commit(false); + fail("expected IllegalArgumentException"); + } catch (IllegalArgumentException ise) { + // expected + } + } + + @Test + @SuppressWarnings("unchecked") + public void testCommit() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + + xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); + xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); + xaTransactionContext.addCommand(3L, new StoreEvictCommand<>("three")); + + Store.ValueHolder> mockValueHolder1 = mock(Store.ValueHolder.class); + when(mockValueHolder1.get()).thenReturn(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "one", new XAValueHolder<>("un", timeSource + .getTimeMillis()))); + when(underlyingStore.get(eq(1L))).thenReturn(mockValueHolder1); + Store.ValueHolder> mockValueHolder2 = mock(Store.ValueHolder.class); + when(mockValueHolder2.get()).thenReturn(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null)); + when(underlyingStore.get(eq(2L))).thenReturn(mockValueHolder2); + Store.ValueHolder> mockValueHolder3 = mock(Store.ValueHolder.class); + when(mockValueHolder3.get()).thenReturn(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "three", null)); + when(underlyingStore.get(eq(3L))).thenReturn(mockValueHolder3); + + when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); + when(journal.getInDoubtKeys(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(Arrays.asList(1L, 2L, 3L)); + + when(underlyingStore.replace(any(Long.class), any(SoftLock.class), any(SoftLock.class))).thenReturn(ReplaceStatus.MISS_NOT_PRESENT); + when(underlyingStore.remove(any(Long.class), any(SoftLock.class))).thenReturn(RemoveStatus.KEY_MISSING); + + xaTransactionContext.commit(false); + verify(journal, times(1)).saveCommitted(eq(new TransactionId(new TestXid(0, 0))), eq(false)); + verify(journal, times(0)).saveRolledBack(eq(new TransactionId(new TestXid(0, 0))), anyBoolean()); + verify(journal, times(0)).saveInDoubt(eq(new TransactionId(new TestXid(0, 0))), any(Collection.class)); + + verify(underlyingStore, times(1)).get(1L); + verify(underlyingStore, times(1)).replace(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "one", new XAValueHolder<>("un", timeSource + .getTimeMillis()))), eq(new SoftLock<>(null, "un", null))); + verify(underlyingStore, times(1)).get(2L); + verify(underlyingStore, times(1)).remove(eq(2L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null))); + verify(underlyingStore, times(1)).get(3L); + verify(underlyingStore, times(1)).remove(eq(3L)); + } + + @Test + public void testCommitInOnePhasePreparedThrows() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + + when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); + + try { + xaTransactionContext.commitInOnePhase(); + fail("expected IllegalStateException"); + } catch (IllegalStateException ise) { + // expected + } + } + + @Test + @SuppressWarnings("unchecked") + public void testCommitInOnePhase() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + + xaTransactionContext.addCommand(1L, new StorePutCommand<>(null, new XAValueHolder<>("un", timeSource.getTimeMillis()))); + xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); + xaTransactionContext.addCommand(3L, new StoreEvictCommand<>("three")); + + Store.ValueHolder> mockValueHolder = mock(Store.ValueHolder.class); + when(mockValueHolder.get()).thenReturn(new SoftLock<>(null, "two", null)); + when(underlyingStore.get(eq(2L))).thenReturn(mockValueHolder); + + AtomicReference> savedInDoubtCollectionRef = new AtomicReference<>(); + doAnswer(invocation -> { + savedInDoubtCollectionRef.set(new HashSet<>((Collection) invocation.getArguments()[1])); + return null; + }).when(journal).saveInDoubt(eq(new TransactionId(new TestXid(0, 0))), any(Collection.class)); + when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).then(invocation -> savedInDoubtCollectionRef.get() != null); + when(journal.getInDoubtKeys(eq(new TransactionId(new TestXid(0, 0))))).then(invocation -> savedInDoubtCollectionRef.get()); + AtomicReference> softLock1Ref = new AtomicReference<>(); + when(underlyingStore.get(eq(1L))).then(invocation -> softLock1Ref.get() == null ? null : new AbstractValueHolder(-1, -1) { + @Override + public Object get() { + return softLock1Ref.get(); + } + }); + when(underlyingStore.putIfAbsent(eq(1L), isA(SoftLock.class), any(Consumer.class))).then(invocation -> { + softLock1Ref.set((SoftLock) invocation.getArguments()[1]); + return null; + }); + when(underlyingStore.replace(eq(1L), isA(SoftLock.class), isA(SoftLock.class))).then(invocation -> { + if (softLock1Ref.get() != null) { + return ReplaceStatus.HIT; + } + return ReplaceStatus.MISS_PRESENT; + }); + AtomicReference> softLock2Ref = new AtomicReference<>(new SoftLock<>(null, "two", null)); + when(underlyingStore.get(eq(2L))).then(invocation -> softLock2Ref.get() == null ? null : new AbstractValueHolder(-1, -1) { + @Override + public Object get() { + return softLock2Ref.get(); + } + }); + when(underlyingStore.replace(eq(2L), isA(SoftLock.class), isA(SoftLock.class))).then(invocation -> { + softLock2Ref.set((SoftLock) invocation.getArguments()[2]); + return ReplaceStatus.HIT; + }); + + when(underlyingStore.remove(any(Long.class), any(SoftLock.class))).thenReturn(RemoveStatus.REMOVED); + + xaTransactionContext.commitInOnePhase(); + + assertThat(savedInDoubtCollectionRef.get(), containsInAnyOrder(1L, 2L, 3L)); + + verify(journal, times(1)).saveCommitted(eq(new TransactionId(new TestXid(0, 0))), eq(false)); + verify(journal, times(0)).saveRolledBack(eq(new TransactionId(new TestXid(0, 0))), anyBoolean()); + verify(journal, times(1)).saveInDoubt(eq(new TransactionId(new TestXid(0, 0))), any(Collection.class)); + + verify(underlyingStore, times(1)).putIfAbsent(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), null, new XAValueHolder<>("un", timeSource + .getTimeMillis()))), any(Consumer.class)); + verify(underlyingStore, times(1)).replace(eq(2L), eq(new SoftLock<>(null, "two", null)), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null))); + verify(underlyingStore, times(1)).remove(eq(3L)); + + verify(underlyingStore, times(1)).get(1L); + verify(underlyingStore, times(1)).replace(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), null, new XAValueHolder<>("un", timeSource + .getTimeMillis()))), eq(new SoftLock<>(null, "un", null))); + verify(underlyingStore, times(1)).get(2L); + verify(underlyingStore, times(1)).remove(eq(2L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null))); + verify(underlyingStore, times(1)).get(3L); + verify(underlyingStore, times(1)).remove(eq(3L)); + } + + @Test + public void testRollbackPhase1() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + + xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); + xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); + + xaTransactionContext.rollback(false); + + verifyNoMoreInteractions(underlyingStore); + } + + @Test + @SuppressWarnings("unchecked") + public void testRollbackPhase2() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + + xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); + xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); + + when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); + when(journal.getInDoubtKeys(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(Arrays.asList(1L, 2L)); + + when(underlyingStore.get(1L)).thenReturn(new AbstractValueHolder>(-1, -1) { + @Override + public SoftLock get() { + return new SoftLock<>(new TransactionId(new TestXid(0, 0)), "one", new XAValueHolder<>("un", timeSource.getTimeMillis())); + } + }); + when(underlyingStore.get(2L)).thenReturn(new AbstractValueHolder>(-1, -1) { + @Override + public SoftLock get() { + return new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null); + } + }); + + when(underlyingStore.replace(any(Long.class), any(SoftLock.class), any(SoftLock.class))).thenReturn(ReplaceStatus.HIT); + xaTransactionContext.rollback(false); + + verify(underlyingStore, times(1)).get(1L); + verify(underlyingStore, times(1)).replace(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "one", new XAValueHolder<>("un", timeSource + .getTimeMillis()))), eq(new SoftLock<>(null, "one", null))); + verify(underlyingStore, times(1)).get(2L); + verify(underlyingStore, times(1)).replace(eq(2L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null)), eq(new SoftLock<>(null, "two", null))); + } + + @Test + public void testCommitInOnePhaseTimeout() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); + xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); + + timeSource.advanceTime(30000); + + try { + xaTransactionContext.commitInOnePhase(); + fail("expected TransactionTimeoutException"); + } catch (XATransactionContext.TransactionTimeoutException tte) { + // expected + } + } + + @Test + public void testPrepareTimeout() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); + xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); + + timeSource.advanceTime(30000); + + try { + xaTransactionContext.prepare(); + fail("expected TransactionTimeoutException"); + } catch (XATransactionContext.TransactionTimeoutException tte) { + // expected + } + } + + @Test + @SuppressWarnings("unchecked") + public void testCommitConflictsEvicts() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); + when(journal.getInDoubtKeys(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(Arrays.asList(1L, 2L)); + when(underlyingStore.get(eq(1L))).thenReturn(new AbstractValueHolder>(-1, -1) { + @Override + public SoftLock get() { + return new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old1", new XAValueHolder<>("new1", timeSource + .getTimeMillis())); + } + }); + when(underlyingStore.get(eq(2L))).thenReturn(new AbstractValueHolder>(-1, -1) { + @Override + public SoftLock get() { + return new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old2", null); + } + }); + + when(underlyingStore.replace(any(Long.class), any(SoftLock.class), any(SoftLock.class))).thenReturn(ReplaceStatus.MISS_NOT_PRESENT); + when(underlyingStore.remove(any(Long.class), any(SoftLock.class))).thenReturn(RemoveStatus.KEY_MISSING); + + xaTransactionContext.commit(false); + + verify(underlyingStore, times(1)).replace(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old1", new XAValueHolder<>("new1", timeSource + .getTimeMillis()))), eq(new SoftLock<>(null, "new1", null))); + verify(underlyingStore, times(1)).remove(eq(1L)); + verify(underlyingStore, times(1)).remove(eq(2L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old2", null))); + verify(underlyingStore, times(1)).remove(eq(2L)); + } + + @Test + @SuppressWarnings("unchecked") + public void testPrepareConflictsEvicts() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); + xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); + + when(underlyingStore.replace(any(Long.class), any(SoftLock.class), any(SoftLock.class))).thenReturn(ReplaceStatus.MISS_NOT_PRESENT); + + xaTransactionContext.prepare(); + + verify(underlyingStore).replace(eq(1L), eq(new SoftLock<>(null, "one", null)), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "one", new XAValueHolder<>("un", timeSource + .getTimeMillis())))); + verify(underlyingStore).remove(1L); + verify(underlyingStore).replace(eq(2L), eq(new SoftLock<>(null, "two", null)), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null))); + verify(underlyingStore).remove(2L); + } + + @Test + @SuppressWarnings("unchecked") + public void testRollbackConflictsEvicts() throws Exception { + XATransactionContext xaTransactionContext = getXaTransactionContext(); + when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); + when(journal.getInDoubtKeys(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(Arrays.asList(1L, 2L)); + when(underlyingStore.get(eq(1L))).thenReturn(new AbstractValueHolder>(-1, -1) { + @Override + public SoftLock get() { + return new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old1", new XAValueHolder<>("new1", timeSource + .getTimeMillis())); + } + }); + when(underlyingStore.get(eq(2L))).thenReturn(new AbstractValueHolder>(-1, -1) { + @Override + public SoftLock get() { + return new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old2", null); + } + }); + + when(underlyingStore.replace(any(Long.class), any(SoftLock.class), any(SoftLock.class))).thenReturn(ReplaceStatus.MISS_NOT_PRESENT); + when(underlyingStore.remove(any(Long.class), any(SoftLock.class))).thenReturn(RemoveStatus.KEY_MISSING); + + xaTransactionContext.rollback(false); + + verify(underlyingStore, times(1)).replace(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old1", new XAValueHolder<>("new1", timeSource + .getTimeMillis()))), eq(new SoftLock<>(null, "old1", null))); + verify(underlyingStore, times(1)).remove(eq(1L)); + verify(underlyingStore, times(1)).replace(eq(2L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old2", null)), eq(new SoftLock<>(null, "old2", null))); + verify(underlyingStore, times(1)).remove(eq(2L)); + } +} diff --git a/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java new file mode 100644 index 0000000000..7419cf27a0 --- /dev/null +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java @@ -0,0 +1,57 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.transactions.xa.internal; + +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.time.Duration; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +/** + * XAValueHolderTest + */ +public class XAValueHolderTest { + + @Test + public void testSerialization() throws Exception { + + long now = System.currentTimeMillis(); + XAValueHolder valueHolder = new XAValueHolder<>("value", now - 1000); + valueHolder.accessed(now, Duration.ofSeconds(100)); + + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + ObjectOutputStream outputStream = new ObjectOutputStream(baos); + outputStream.writeObject(valueHolder); + outputStream.close(); + + @SuppressWarnings("unchecked") + XAValueHolder result = (XAValueHolder) new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray())).readObject(); + + assertThat(result.getId(), is(valueHolder.getId())); + assertThat(result.creationTime(), is(valueHolder.creationTime())); + assertThat(result.lastAccessTime(), is(valueHolder.lastAccessTime())); + assertThat(result.expirationTime(), is(valueHolder.expirationTime())); + assertThat(result.get(), is(valueHolder.get())); + } +} diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/AbstractJournalTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/AbstractJournalTest.java similarity index 99% rename from transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/AbstractJournalTest.java rename to ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/AbstractJournalTest.java index 23db80924b..754f756936 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/AbstractJournalTest.java +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/AbstractJournalTest.java @@ -27,10 +27,10 @@ import java.util.Collection; import java.util.Map; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; /** diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/PersistentJournalTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/PersistentJournalTest.java similarity index 97% rename from transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/PersistentJournalTest.java rename to ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/PersistentJournalTest.java index acbb274054..0a8685d48f 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/PersistentJournalTest.java +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/PersistentJournalTest.java @@ -25,9 +25,9 @@ import java.util.Arrays; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; /** * @author Ludovic Orban diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/TransientJournalTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/TransientJournalTest.java similarity index 100% rename from transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/TransientJournalTest.java rename to ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/journal/TransientJournalTest.java diff --git a/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParserTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParserTest.java new file mode 100644 index 0000000000..33bcbc3360 --- /dev/null +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParserTest.java @@ -0,0 +1,76 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.transactions.xa.internal.xml; + +import org.ehcache.transactions.xa.txmgr.btm.BitronixTransactionManagerLookup; +import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProviderConfiguration; +import org.junit.Test; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.xml.sax.InputSource; +import org.xml.sax.SAXException; + +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.io.IOException; +import java.io.StringReader; + +import static org.ehcache.xml.XmlConfigurationMatchers.isSameConfigurationAs; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.sameInstance; + +/** + * TxCacheManagerServiceConfigurationParserTest + */ +public class TxCacheManagerServiceConfigurationParserTest { + + @Test + public void testParseLookupInsideProperty() throws ParserConfigurationException, IOException, SAXException { + String property = TxCacheManagerServiceConfigurationParserTest.class.getName() + ":lookup"; + String inputString = ""; + + TxCacheManagerServiceConfigurationParser configParser = new TxCacheManagerServiceConfigurationParser(); + + DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + documentBuilderFactory.setNamespaceAware(true); + Element node = documentBuilderFactory.newDocumentBuilder() + .parse(new InputSource(new StringReader(inputString))).getDocumentElement(); + + System.setProperty(property, BitronixTransactionManagerLookup.class.getName()); + try { + LookupTransactionManagerProviderConfiguration configuration = + (LookupTransactionManagerProviderConfiguration) configParser.parseServiceCreationConfiguration(node, null); + + assertThat(configuration.getTransactionManagerLookup(), sameInstance(BitronixTransactionManagerLookup.class)); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testTranslateServiceCreationConfiguration() { + TxCacheManagerServiceConfigurationParser configTranslator = new TxCacheManagerServiceConfigurationParser(); + LookupTransactionManagerProviderConfiguration lookupTransactionManagerProviderConfiguration = + new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class); + + Node retElement = configTranslator.unparseServiceCreationConfiguration(lookupTransactionManagerProviderConfiguration); + String inputString = ""; + assertThat(retElement, isSameConfigurationAs(inputString)); + } + +} diff --git a/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/xml/TxCacheServiceConfigurationParserTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/xml/TxCacheServiceConfigurationParserTest.java new file mode 100644 index 0000000000..1e9b61c865 --- /dev/null +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/internal/xml/TxCacheServiceConfigurationParserTest.java @@ -0,0 +1,72 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.transactions.xa.internal.xml; + +import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; +import org.junit.Test; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.xml.sax.InputSource; +import org.xml.sax.SAXException; + +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.io.IOException; +import java.io.StringReader; + +import static org.ehcache.xml.XmlConfigurationMatchers.isSameConfigurationAs; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +/** + * TxCacheServiceConfigurationParserTest + */ +public class TxCacheServiceConfigurationParserTest { + + @Test + public void testParseXaResourceIdInsideProperty() throws ParserConfigurationException, IOException, SAXException { + String property = TxCacheManagerServiceConfigurationParserTest.class.getName() + ":xaResourceId"; + String inputString = ""; + + TxCacheServiceConfigurationParser configParser = new TxCacheServiceConfigurationParser(); + + DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + documentBuilderFactory.setNamespaceAware(true); + Element node = documentBuilderFactory.newDocumentBuilder() + .parse(new InputSource(new StringReader(inputString))).getDocumentElement(); + + System.setProperty(property, "Brian"); + try { + XAStoreConfiguration configuration = (XAStoreConfiguration) configParser.parseServiceConfiguration(node, null); + + assertThat(configuration.getUniqueXAResourceId(), is("Brian")); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testTranslateServiceConfiguration() { + TxCacheServiceConfigurationParser configTranslator = new TxCacheServiceConfigurationParser(); + XAStoreConfiguration storeConfiguration = new XAStoreConfiguration("my-unique-resource"); + + Node retElement = configTranslator.unparseServiceConfiguration(storeConfiguration); + String inputString = ""; + assertThat(retElement, isSameConfigurationAs(inputString)); + } + +} diff --git a/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfigurationTest.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfigurationTest.java new file mode 100644 index 0000000000..8211c1456c --- /dev/null +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfigurationTest.java @@ -0,0 +1,37 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.transactions.xa.txmgr.provider; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsSame.sameInstance; +import static org.mockito.Mockito.mock; + +public class LookupTransactionManagerProviderConfigurationTest { + + @Test + public void testDeriveDetachesCorrectly() { + LookupTransactionManagerProviderConfiguration configuration = new LookupTransactionManagerProviderConfiguration(mock(TransactionManagerLookup.class).getClass()); + LookupTransactionManagerProviderConfiguration derived = configuration.build(configuration.derive()); + + assertThat(derived, is(not(sameInstance(configuration)))); + assertThat(derived.getTransactionManagerLookup(), sameInstance(configuration.getTransactionManagerLookup())); + } +} diff --git a/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/utils/JavaSerializer.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/utils/JavaSerializer.java new file mode 100644 index 0000000000..a2f80d75f3 --- /dev/null +++ b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/utils/JavaSerializer.java @@ -0,0 +1,133 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.transactions.xa.utils; + +import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.core.util.ByteBufferInputStream; +import org.ehcache.spi.serialization.Serializer; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.ObjectStreamClass; +import java.lang.reflect.Proxy; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +/** + * + * @author cdennis + */ +public class JavaSerializer implements Serializer { + + private final ClassLoader classLoader; + + public JavaSerializer(ClassLoader classLoader) { + this.classLoader = classLoader; + } + + @Override + public ByteBuffer serialize(T object) { + ByteArrayOutputStream bout = new ByteArrayOutputStream(); + try { + ObjectOutputStream oout = new ObjectOutputStream(bout); + oout.writeObject(object); + } catch (IOException e) { + throw new SerializerException(e); + } finally { + try { + bout.close(); + } catch (IOException e) { + throw new AssertionError(e); + } + } + return ByteBuffer.wrap(bout.toByteArray()); + } + + @SuppressWarnings("unchecked") + @Override + public T read(ByteBuffer entry) throws SerializerException, ClassNotFoundException { + ByteBufferInputStream bin = new ByteBufferInputStream(entry); + try { + try (OIS ois = new OIS(bin, classLoader)) { + return (T) ois.readObject(); + } + } catch (IOException e) { + throw new SerializerException(e); + } finally { + try { + bin.close(); + } catch (IOException e) { + throw new AssertionError(e); + } + } + } + + @Override + public boolean equals(T object, ByteBuffer binary) throws SerializerException, ClassNotFoundException { + return object.equals(read(binary)); + } + + private static class OIS extends ObjectInputStream { + + private final ClassLoader classLoader; + + public OIS(InputStream in, ClassLoader classLoader) throws IOException { + super(in); + this.classLoader = classLoader; + } + + @Override + protected Class resolveClass(ObjectStreamClass desc) throws IOException, ClassNotFoundException { + try { + return Class.forName(desc.getName(), false, classLoader); + } catch (ClassNotFoundException cnfe) { + Class primitive = primitiveClasses.get(desc.getName()); + if (primitive != null) { + return primitive; + } + throw cnfe; + } + } + + @Override + protected Class resolveProxyClass(String[] interfaces) throws IOException, ClassNotFoundException { + Class[] interfaceClasses = new Class[interfaces.length]; + for (int i = 0; i < interfaces.length; i++) { + interfaceClasses[i] = Class.forName(interfaces[i], false, classLoader); + } + + return Proxy.getProxyClass(classLoader, interfaceClasses); + } + + private static final Map> primitiveClasses = new HashMap<>(); + static { + primitiveClasses.put("boolean", boolean.class); + primitiveClasses.put("byte", byte.class); + primitiveClasses.put("char", char.class); + primitiveClasses.put("double", double.class); + primitiveClasses.put("float", float.class); + primitiveClasses.put("int", int.class); + primitiveClasses.put("long", long.class); + primitiveClasses.put("short", short.class); + primitiveClasses.put("void", void.class); + } + } + +} diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/utils/TestXid.java b/ehcache-transactions/src/test/java/org/ehcache/transactions/xa/utils/TestXid.java similarity index 100% rename from transactions/src/test/java/org/ehcache/transactions/xa/utils/TestXid.java rename to ehcache-transactions/src/test/java/org/ehcache/transactions/xa/utils/TestXid.java diff --git a/transactions/src/test/resources/configs/simple-xa.xml b/ehcache-transactions/src/test/resources/configs/simple-xa.xml similarity index 77% rename from transactions/src/test/resources/configs/simple-xa.xml rename to ehcache-transactions/src/test/resources/configs/simple-xa.xml index 772cdd5475..fcb384f582 100644 --- a/transactions/src/test/resources/configs/simple-xa.xml +++ b/ehcache-transactions/src/test/resources/configs/simple-xa.xml @@ -14,11 +14,8 @@ ~ limitations under the License. --> + xmlns:tx='http://www.ehcache.org/v3/tx'> diff --git a/transactions/src/test/resources/configs/template-xa.xml b/ehcache-transactions/src/test/resources/configs/template-xa.xml similarity index 79% rename from transactions/src/test/resources/configs/template-xa.xml rename to ehcache-transactions/src/test/resources/configs/template-xa.xml index 98dd5912f7..9958139655 100644 --- a/transactions/src/test/resources/configs/template-xa.xml +++ b/ehcache-transactions/src/test/resources/configs/template-xa.xml @@ -15,11 +15,8 @@ --> + xmlns:tx='http://www.ehcache.org/v3/tx'> diff --git a/ehcache-transactions/src/test/resources/configs/transactional-cache.xml b/ehcache-transactions/src/test/resources/configs/transactional-cache.xml new file mode 100644 index 0000000000..65fef5a049 --- /dev/null +++ b/ehcache-transactions/src/test/resources/configs/transactional-cache.xml @@ -0,0 +1,38 @@ + + + + + + + + + + java.lang.String + java.lang.String + + + + + 20 + + + + + diff --git a/transactions/src/test/resources/docs/configs/xa-getting-started.xml b/ehcache-transactions/src/test/resources/docs/configs/xa-getting-started.xml similarity index 78% rename from transactions/src/test/resources/docs/configs/xa-getting-started.xml rename to ehcache-transactions/src/test/resources/docs/configs/xa-getting-started.xml index fe3b0b3353..78face8ee8 100644 --- a/transactions/src/test/resources/docs/configs/xa-getting-started.xml +++ b/ehcache-transactions/src/test/resources/docs/configs/xa-getting-started.xml @@ -14,11 +14,8 @@ ~ limitations under the License. --> + xmlns:tx='http://www.ehcache.org/v3/tx'> diff --git a/xml/README.adoc b/ehcache-xml/README.adoc similarity index 100% rename from xml/README.adoc rename to ehcache-xml/README.adoc diff --git a/ehcache-xml/build.gradle b/ehcache-xml/build.gradle new file mode 100644 index 0000000000..f60cf4abbc --- /dev/null +++ b/ehcache-xml/build.gradle @@ -0,0 +1,97 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'org.ehcache.build.internal-module' + id 'org.unbroken-dome.xjc' + id 'java-test-fixtures' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache 3 XML Parsing module' + description = 'The module containing all XML parsing logic Ehcache 3' + } +} + +components.java { + withVariantsFromConfiguration(configurations.testFixturesApiElements) { skip() } + withVariantsFromConfiguration(configurations.testFixturesRuntimeElements) { skip() } +} + +sourceSets { + main { + resources.source(xjcSchema) + } +} +tasks.named('sourcesJar') { + filesMatching('*.xsd') { + duplicatesStrategy = DuplicatesStrategy.EXCLUDE + } +} + +configurations { + lowerBoundTestRuntimeClasspath { + extendsFrom testRuntimeClasspath + resolutionStrategy.dependencySubstitution { + substitute module('org.glassfish.jaxb:jaxb-runtime') using module('com.sun.xml.bind:jaxb-impl:2.2.8-b01') + } + } +} + +dependencies { + api project(':ehcache-api') + implementation project(':ehcache-core') + implementation project(':ehcache-impl') + + api 'javax.xml.bind:jaxb-api:[2.2,3)' + runtimeOnly 'org.glassfish.jaxb:jaxb-runtime:[2.2,3)' + + testFixturesApi 'org.xmlunit:xmlunit-core:2.6.0', 'org.xmlunit:xmlunit-matchers:2.6.0' + + xjcClasspath 'org.jvnet.jaxb2_commons:jaxb2-fluent-api:3.0' + xjcClasspath 'org.jvnet.jaxb2_commons:jaxb2-basics-annotate:1.1.0' + + lowerBoundTestRuntimeClasspath 'com.sun.activation:javax.activation:1.2.0' +} + +jar { + bnd ( + 'Export-Package': 'org.ehcache.xml, org.ehcache.xml.exceptions, org.ehcache.xml.model', + 'Import-Package': "javax.xml.bind*;version=\"[2.2,3)\", *" + ) +} + +xjc { + extraArgs.add '-Xfluent-api' + extraArgs.add '-Xannotate' + + // ehcache-multi.xsd references ehcache-core.xsd but we cannot control the order they get presented to XJC in. + // Turning off strict checks prevents failing on when seeing the resultant schema parsing issues. + strictCheck = false +} + +tasks.register('lowerBoundTest', Test) { + group = JavaBasePlugin.VERIFICATION_GROUP + //remove the original runtime classpath + classpath -= configurations.testRuntimeClasspath + //add the classpath we want + classpath += configurations.lowerBoundTestRuntimeClasspath +} + +tasks.named('check') { + dependsOn tasks.lowerBoundTest +} diff --git a/ehcache-xml/config/checkstyle-suppressions.xml b/ehcache-xml/config/checkstyle-suppressions.xml new file mode 100644 index 0000000000..c385350074 --- /dev/null +++ b/ehcache-xml/config/checkstyle-suppressions.xml @@ -0,0 +1,10 @@ + + + + + + + + diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/BaseConfigParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/BaseConfigParser.java new file mode 100644 index 0000000000..bbb1ffa53f --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/BaseConfigParser.java @@ -0,0 +1,73 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.xml; + +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.xml.sax.SAXException; + +import java.io.IOException; +import java.lang.reflect.ParameterizedType; +import java.util.Objects; + +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.transform.Source; + +/** + * BaseConfigParser - Base class providing functionality for translating service configurations to corresponding xml + * document. + */ +public abstract class BaseConfigParser { + private final Class typeParameterClass; + + @SuppressWarnings("unchecked") + public BaseConfigParser() { + typeParameterClass = (Class) ((ParameterizedType) getClass().getGenericSuperclass()).getActualTypeArguments()[0]; + } + + public BaseConfigParser(Class type) { + this.typeParameterClass = type; + } + + private T validateConfig(Object config) { + Objects.requireNonNull(config, "Configuration must not be null."); + try { + return typeParameterClass.cast(config); + } catch (ClassCastException e) { + throw new IllegalArgumentException("Invalid configuration parameter passed.", e); + } + } + + private Document createDocument() { + try { + return DomUtil.createDocumentRoot(getXmlSchema()); + } catch (SAXException | ParserConfigurationException | IOException e) { + throw new XmlConfigurationException(e); + } + } + + protected Element unparseConfig(Object config) { + T mainConfig = validateConfig(config); + Document doc = createDocument(); + Element rootElement = createRootElement(doc, mainConfig); + return rootElement; + } + + protected abstract Element createRootElement(Document doc, T config); + + protected abstract Source getXmlSchema() throws IOException; +} diff --git a/xml/src/main/java/org/ehcache/xml/CacheManagerServiceConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/CacheManagerServiceConfigurationParser.java similarity index 79% rename from xml/src/main/java/org/ehcache/xml/CacheManagerServiceConfigurationParser.java rename to ehcache-xml/src/main/java/org/ehcache/xml/CacheManagerServiceConfigurationParser.java index 0ede895c94..d62e79f5fe 100644 --- a/xml/src/main/java/org/ehcache/xml/CacheManagerServiceConfigurationParser.java +++ b/ehcache-xml/src/main/java/org/ehcache/xml/CacheManagerServiceConfigurationParser.java @@ -33,5 +33,9 @@ public interface CacheManagerServiceConfigurationParser { URI getNamespace(); - ServiceCreationConfiguration parseServiceCreationConfiguration(Element fragment); + ServiceCreationConfiguration parseServiceCreationConfiguration(Element fragment, ClassLoader classLoader); + + Class getServiceType(); + + Element unparseServiceCreationConfiguration(ServiceCreationConfiguration serviceCreationConfiguration); } diff --git a/xml/src/main/java/org/ehcache/xml/CacheResourceConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/CacheResourceConfigurationParser.java similarity index 85% rename from xml/src/main/java/org/ehcache/xml/CacheResourceConfigurationParser.java rename to ehcache-xml/src/main/java/org/ehcache/xml/CacheResourceConfigurationParser.java index bb85aa4c36..8780a89813 100644 --- a/xml/src/main/java/org/ehcache/xml/CacheResourceConfigurationParser.java +++ b/ehcache-xml/src/main/java/org/ehcache/xml/CacheResourceConfigurationParser.java @@ -17,10 +17,12 @@ package org.ehcache.xml; import org.ehcache.config.ResourcePool; +import org.ehcache.config.ResourceType; import org.w3c.dom.Element; import java.io.IOException; import java.net.URI; +import java.util.Set; import javax.xml.transform.Source; @@ -36,4 +38,8 @@ public interface CacheResourceConfigurationParser { URI getNamespace(); ResourcePool parseResourceConfiguration(Element fragment); + + Element unparseResourcePool(ResourcePool resourcePool); + + Set> getResourceTypes(); } diff --git a/xml/src/main/java/org/ehcache/xml/CacheServiceConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/CacheServiceConfigurationParser.java similarity index 82% rename from xml/src/main/java/org/ehcache/xml/CacheServiceConfigurationParser.java rename to ehcache-xml/src/main/java/org/ehcache/xml/CacheServiceConfigurationParser.java index c40e76349c..52d72aaf3a 100644 --- a/xml/src/main/java/org/ehcache/xml/CacheServiceConfigurationParser.java +++ b/ehcache-xml/src/main/java/org/ehcache/xml/CacheServiceConfigurationParser.java @@ -34,5 +34,9 @@ public interface CacheServiceConfigurationParser { URI getNamespace(); - ServiceConfiguration parseServiceConfiguration(Element fragment); + ServiceConfiguration parseServiceConfiguration(Element fragment, ClassLoader classLoader); + + Class getServiceType(); + + Element unparseServiceConfiguration(ServiceConfiguration serviceConfiguration); } diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/ConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/ConfigurationParser.java new file mode 100644 index 0000000000..d0cd669aa1 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/ConfigurationParser.java @@ -0,0 +1,439 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.config.FluentConfigurationBuilder; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.core.util.ClassLoading; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.BaseCacheType; +import org.ehcache.xml.model.CacheDefinition; +import org.ehcache.xml.model.CacheEntryType; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheTemplateType; +import org.ehcache.xml.model.CacheType; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.ObjectFactory; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.xml.sax.ErrorHandler; +import org.xml.sax.SAXException; +import org.xml.sax.SAXNotRecognizedException; +import org.xml.sax.SAXParseException; + +import javax.xml.XMLConstants; +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBElement; +import javax.xml.bind.JAXBException; +import javax.xml.bind.Marshaller; +import javax.xml.bind.Unmarshaller; +import javax.xml.bind.helpers.DefaultValidationEventHandler; +import javax.xml.namespace.QName; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.transform.OutputKeys; +import javax.xml.transform.Source; +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerConfigurationException; +import javax.xml.transform.TransformerException; +import javax.xml.transform.TransformerFactory; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; +import javax.xml.transform.stream.StreamSource; +import javax.xml.validation.Schema; +import javax.xml.validation.SchemaFactory; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.StringWriter; +import java.net.URI; +import java.net.URL; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import static java.lang.String.format; +import static java.util.Arrays.asList; +import static java.util.Spliterators.spliterator; +import static java.util.function.Function.identity; +import static java.util.regex.Pattern.quote; +import static java.util.stream.Collectors.collectingAndThen; +import static java.util.stream.Collectors.joining; +import static java.util.stream.Collectors.toMap; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ConfigurationBuilder.newConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.util.ClassLoading.servicesOfType; +import static org.ehcache.xml.XmlConfiguration.CORE_SCHEMA_URL; +import static org.ehcache.xml.XmlConfiguration.getClassForName; + +/** + * Provides support for parsing a cache configuration expressed in XML. + */ +public class ConfigurationParser { + + public static Schema newSchema(Source... schemas) throws SAXException { + SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); + try { + /* + * Our schema is accidentally not XSD 1.1 compatible. Since Saxon incorrectly (imho) defaults to XSD 1.1 for + * `XMLConstants.W3C_XML_SCHEMA_NS_URI` we force it back to 1.0. + */ + schemaFactory.setProperty("http://saxon.sf.net/feature/xsd-version", "1.0"); + } catch (SAXNotRecognizedException e) { + //not saxon + } + schemaFactory.setErrorHandler(new FatalErrorHandler()); + return schemaFactory.newSchema(schemas); + } + private static final TransformerFactory TRANSFORMER_FACTORY = TransformerFactory.newInstance(); + + private static final QName CORE_SCHEMA_ROOT_NAME; + static { + ObjectFactory objectFactory = new ObjectFactory(); + CORE_SCHEMA_ROOT_NAME = objectFactory.createConfig(objectFactory.createConfigType()).getName(); + } + + static final CoreCacheConfigurationParser CORE_CACHE_CONFIGURATION_PARSER = new CoreCacheConfigurationParser(); + + private final Schema schema; + private final JAXBContext jaxbContext = JAXBContext.newInstance(ConfigType.class); + private final DocumentBuilder documentBuilder; + + private final ServiceCreationConfigurationParser serviceCreationConfigurationParser; + private final ServiceConfigurationParser serviceConfigurationParser; + private final ResourceConfigurationParser resourceConfigurationParser; + + @SuppressWarnings("unchecked") + private static Stream stream(Iterable iterable) { + return StreamSupport.stream(spliterator((Iterator) iterable.iterator(), Long.MAX_VALUE, 0), false); + } + + ConfigurationParser() throws IOException, SAXException, JAXBException, ParserConfigurationException { + serviceCreationConfigurationParser = ConfigurationParser.>stream( + servicesOfType(CacheManagerServiceConfigurationParser.class)) + .collect(collectingAndThen(toMap(CacheManagerServiceConfigurationParser::getServiceType, identity(), + (a, b) -> a.getClass().isInstance(b) ? b : a), ServiceCreationConfigurationParser::new)); + + serviceConfigurationParser = ConfigurationParser.>stream( + servicesOfType(CacheServiceConfigurationParser.class)) + .collect(collectingAndThen(toMap(CacheServiceConfigurationParser::getServiceType, identity(), + (a, b) -> a.getClass().isInstance(b) ? b : a), ServiceConfigurationParser::new)); + + resourceConfigurationParser = stream(servicesOfType(CacheResourceConfigurationParser.class)) + .flatMap(p -> p.getResourceTypes().stream().map(t -> new AbstractMap.SimpleImmutableEntry<>(t, p))) + .collect(collectingAndThen(toMap(Map.Entry::getKey, Map.Entry::getValue, (a, b) -> a.getClass().isInstance(b) ? b : a), + m -> new ResourceConfigurationParser(new HashSet<>(m.values())))); + + schema = discoverSchema(new StreamSource(CORE_SCHEMA_URL.openStream())); + documentBuilder = documentBuilder(schema); + } + + CacheConfigurationBuilder parseServiceConfigurations(CacheConfigurationBuilder cacheBuilder, + ClassLoader cacheClassLoader, CacheTemplate cacheDefinition) + throws ClassNotFoundException, IllegalAccessException, InstantiationException { + cacheBuilder = CORE_CACHE_CONFIGURATION_PARSER.parseConfiguration(cacheDefinition, cacheClassLoader, cacheBuilder); + return serviceConfigurationParser.parseConfiguration(cacheDefinition, cacheClassLoader, cacheBuilder); + } + + private static Iterable getCacheElements(ConfigType configType) { + List cacheCfgs = new ArrayList<>(); + final List cacheOrCacheTemplate = configType.getCacheOrCacheTemplate(); + for (BaseCacheType baseCacheType : cacheOrCacheTemplate) { + if(baseCacheType instanceof CacheType) { + final CacheType cacheType = (CacheType)baseCacheType; + + final BaseCacheType[] sources; + if(cacheType.getUsesTemplate() != null) { + sources = new BaseCacheType[2]; + sources[0] = cacheType; + sources[1] = (BaseCacheType) cacheType.getUsesTemplate(); + } else { + sources = new BaseCacheType[1]; + sources[0] = cacheType; + } + + cacheCfgs.add(new CacheDefinition(cacheType.getAlias(), sources)); + } + } + + return Collections.unmodifiableList(cacheCfgs); + } + + private Map getTemplates(ConfigType configType) { + final Map templates = new HashMap<>(); + final List cacheOrCacheTemplate = configType.getCacheOrCacheTemplate(); + for (BaseCacheType baseCacheType : cacheOrCacheTemplate) { + if (baseCacheType instanceof CacheTemplateType) { + final CacheTemplate cacheTemplate = new CacheTemplate.Impl(((CacheTemplateType) baseCacheType)); + templates.put(cacheTemplate.id(), parseTemplate(cacheTemplate)); + } + } + return Collections.unmodifiableMap(templates); + } + + private XmlConfiguration.Template parseTemplate(CacheTemplate template) { + return new XmlConfiguration.Template() { + @Override + public CacheConfigurationBuilder builderFor(ClassLoader classLoader, Class keyType, Class valueType, ResourcePools resources) throws ClassNotFoundException, InstantiationException, IllegalAccessException { + checkTemplateTypeConsistency("key", classLoader, keyType, template); + checkTemplateTypeConsistency("value", classLoader, valueType, template); + + if ((resources == null || resources.getResourceTypeSet().isEmpty()) && template.getHeap() == null && template.getResources().isEmpty()) { + throw new IllegalStateException("Template defines no resources, and none were provided"); + } + + if (resources == null) { + resources = resourceConfigurationParser.parseResourceConfiguration(template, newResourcePoolsBuilder()); + } + + return parseServiceConfigurations(newCacheConfigurationBuilder(keyType, valueType, resources), classLoader, template); + } + }; + } + + private static void checkTemplateTypeConsistency(String type, ClassLoader classLoader, Class providedType, CacheTemplate template) throws ClassNotFoundException { + Class templateType; + if (type.equals("key")) { + templateType = getClassForName(template.keyType(), classLoader); + } else { + templateType = getClassForName(template.valueType(), classLoader); + } + + if(providedType == null || !templateType.isAssignableFrom(providedType)) { + throw new IllegalArgumentException("CacheTemplate '" + template.id() + "' declares " + type + " type of " + templateType.getName() + ". Provided: " + providedType); + } + } + + public Document uriToDocument(URI uri) throws IOException, SAXException { + return documentBuilder.parse(uri.toString()); + } + + public XmlConfigurationWrapper documentToConfig(Document document, ClassLoader classLoader, Map cacheClassLoaders) throws JAXBException, ClassNotFoundException, InstantiationException, IllegalAccessException { + Element root = document.getDocumentElement(); + + QName rootName = new QName(root.getNamespaceURI(), root.getLocalName()); + if (!CORE_SCHEMA_ROOT_NAME.equals(rootName)) { + throw new XmlConfigurationException("Expecting " + CORE_SCHEMA_ROOT_NAME + " element; found " + rootName); + } + + Class configTypeClass = ConfigType.class; + Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); + unmarshaller.setEventHandler(new DefaultValidationEventHandler()); + ConfigType jaxbModel = unmarshaller.unmarshal(document, configTypeClass).getValue(); + + FluentConfigurationBuilder managerBuilder = newConfigurationBuilder().withClassLoader(classLoader); + managerBuilder = serviceCreationConfigurationParser.parseServiceCreationConfiguration(jaxbModel, classLoader, managerBuilder); + + for (CacheDefinition cacheDefinition : getCacheElements(jaxbModel)) { + String alias = cacheDefinition.id(); + if(managerBuilder.getCache(alias) != null) { + throw new XmlConfigurationException("Two caches defined with the same alias: " + alias); + } + + ClassLoader cacheClassLoader = cacheClassLoaders.get(alias); + boolean classLoaderConfigured = cacheClassLoader != null; + + if (cacheClassLoader == null) { + if (classLoader != null) { + cacheClassLoader = classLoader; + } else { + cacheClassLoader = ClassLoading.getDefaultClassLoader(); + } + } + + Class keyType = getClassForName(cacheDefinition.keyType(), cacheClassLoader); + Class valueType = getClassForName(cacheDefinition.valueType(), cacheClassLoader); + + ResourcePools resourcePools = resourceConfigurationParser.parseResourceConfiguration(cacheDefinition, newResourcePoolsBuilder()); + + CacheConfigurationBuilder cacheBuilder = newCacheConfigurationBuilder(keyType, valueType, resourcePools); + if (classLoaderConfigured) { + cacheBuilder = cacheBuilder.withClassLoader(cacheClassLoader); + } + + cacheBuilder = parseServiceConfigurations(cacheBuilder, cacheClassLoader, cacheDefinition); + managerBuilder = managerBuilder.withCache(alias, cacheBuilder.build()); + } + + Map templates = getTemplates(jaxbModel); + + return new XmlConfigurationWrapper(managerBuilder.build(), templates); + } + + public Document configToDocument(Configuration configuration) throws JAXBException { + ConfigType configType = new ConfigType(); + + serviceCreationConfigurationParser.unparseServiceCreationConfiguration(configuration, configType); + + for (Map.Entry> cacheConfigurationEntry : configuration.getCacheConfigurations().entrySet()) { + CacheConfiguration cacheConfiguration = cacheConfigurationEntry.getValue(); + + CacheType cacheType = new CacheType().withAlias(cacheConfigurationEntry.getKey()) + .withKeyType(new CacheEntryType().withValue(cacheConfiguration.getKeyType().getName())) + .withValueType(new CacheEntryType().withValue(cacheConfiguration.getValueType().getName())); + + resourceConfigurationParser.unparseResourceConfiguration(cacheConfiguration.getResourcePools(), cacheType); + + CORE_CACHE_CONFIGURATION_PARSER.unparseConfiguration(cacheConfiguration, cacheType); + serviceConfigurationParser.unparseServiceConfiguration(cacheConfiguration, cacheType); + configType.withCacheOrCacheTemplate(cacheType); + } + + JAXBElement root = new ObjectFactory().createConfig(configType); + + Marshaller marshaller = jaxbContext.createMarshaller(); + marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); + marshaller.setSchema(schema); + + Document document = documentBuilder.newDocument(); + marshaller.marshal(root, document); + return document; + } + + public static class FatalErrorHandler implements ErrorHandler { + + private static final Collection ABSTRACT_TYPE_FAILURES; + static { + ObjectFactory objectFactory = new ObjectFactory(); + List abstractTypes = asList( + objectFactory.createServiceCreationConfiguration(null).getName(), + objectFactory.createServiceConfiguration(null).getName(), + objectFactory.createResource(null).getName()); + + ABSTRACT_TYPE_FAILURES = asList( + //Xerces + abstractTypes.stream().map(element -> quote(format("\"%s\":%s", element.getNamespaceURI(), element.getLocalPart()))) + .collect(collectingAndThen(joining("|", "^\\Qcvc-complex-type.2.4.a\\E.*'\\{.*(?:", ").*\\}'.*$"), Pattern::compile)), + //Saxon + abstractTypes.stream().map(element -> quote(element.getLocalPart())) + .collect(collectingAndThen(joining("|", "^.*\\QThe content model does not allow element\\E.*(?:", ").*"), Pattern::compile))); + } + + @Override + public void warning(SAXParseException exception) throws SAXException { + fatalError(exception); + } + + @Override + public void error(SAXParseException exception) throws SAXException { + fatalError(exception); + } + + @Override + public void fatalError(SAXParseException exception) throws SAXException { + if (ABSTRACT_TYPE_FAILURES.stream().anyMatch(pattern -> pattern.matcher(exception.getMessage()).matches())) { + throw new XmlConfigurationException( + "Cannot confirm XML sub-type correctness. You might be missing client side libraries.", exception); + } else { + throw exception; + } + } + } + + public static class XmlConfigurationWrapper { + private final Configuration configuration; + private final Map templates; + + public XmlConfigurationWrapper(Configuration configuration, Map templates) { + this.configuration = configuration; + this.templates = templates; + } + + public Configuration getConfiguration() { + return configuration; + } + + public Map getTemplates() { + return templates; + } + } + + public static String documentToText(Document xml) throws IOException, TransformerException { + try (StringWriter writer = new StringWriter()) { + transformer().transform(new DOMSource(xml), new StreamResult(writer)); + return writer.toString(); + } + } + + private static Transformer transformer() throws TransformerConfigurationException { + Transformer transformer = TRANSFORMER_FACTORY.newTransformer(); + transformer.setOutputProperty(OutputKeys.METHOD, "xml"); + transformer.setOutputProperty(OutputKeys.ENCODING, StandardCharsets.UTF_8.name()); + transformer.setOutputProperty(OutputKeys.INDENT, "yes"); + transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2"); + return transformer; + } + + public static String urlToText(URL url, String encoding) throws IOException { + Charset charset = encoding == null ? StandardCharsets.UTF_8 : Charset.forName(encoding); + try (BufferedReader reader = new BufferedReader(new InputStreamReader(url.openStream(), charset))) { + return reader.lines().collect(joining(System.lineSeparator())); + } + } + + public static DocumentBuilder documentBuilder(Schema schema) throws ParserConfigurationException { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + factory.setNamespaceAware(true); + factory.setIgnoringComments(true); + factory.setIgnoringElementContentWhitespace(true); + factory.setSchema(schema); + DocumentBuilder documentBuilder = factory.newDocumentBuilder(); + documentBuilder.setErrorHandler(new FatalErrorHandler()); + return documentBuilder; + } + + public static Schema discoverSchema(Source ... fixedSources) throws SAXException, IOException { + Map pluginSchemas = new HashMap<>(); + for (CacheManagerServiceConfigurationParser p : servicesOfType(CacheManagerServiceConfigurationParser.class)) { + if (!pluginSchemas.containsKey(p.getNamespace())) { + pluginSchemas.put(p.getNamespace(), p.getXmlSchema()); + } + } + for (CacheServiceConfigurationParser p : servicesOfType(CacheServiceConfigurationParser.class)) { + if (!pluginSchemas.containsKey(p.getNamespace())) { + pluginSchemas.put(p.getNamespace(), p.getXmlSchema()); + } + } + for (CacheResourceConfigurationParser p : servicesOfType(CacheResourceConfigurationParser.class)) { + if (!pluginSchemas.containsKey(p.getNamespace())) { + pluginSchemas.put(p.getNamespace(), p.getXmlSchema()); + } + } + + List schemaSources = new ArrayList<>(asList(fixedSources)); + schemaSources.addAll(pluginSchemas.values()); + + return newSchema(schemaSources.toArray(new Source[0])); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/CoreCacheConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/CoreCacheConfigurationParser.java new file mode 100644 index 0000000000..89681e3999 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/CoreCacheConfigurationParser.java @@ -0,0 +1,120 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.core.config.ExpiryUtils; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; +import org.ehcache.xml.model.Expiry; +import org.ehcache.xml.model.ExpiryType; +import org.ehcache.xml.model.ObjectFactory; +import org.ehcache.xml.model.TimeTypeWithPropSubst; + +import java.math.BigInteger; +import java.time.Duration; +import java.util.stream.Stream; + +import static java.util.Comparator.comparing; +import static java.util.concurrent.TimeUnit.NANOSECONDS; +import static org.ehcache.core.config.ExpiryUtils.jucTimeUnitToTemporalUnit; +import static org.ehcache.xml.XmlConfiguration.getClassForName; +import static org.ehcache.xml.XmlModel.convertToXmlTimeUnit; + +public class CoreCacheConfigurationParser { + + public CacheConfigurationBuilder parseConfiguration(CacheTemplate cacheDefinition, ClassLoader cacheClassLoader, + CacheConfigurationBuilder cacheBuilder) throws ClassNotFoundException, IllegalAccessException, InstantiationException { + final Expiry parsedExpiry = cacheDefinition.expiry(); + if (parsedExpiry != null) { + cacheBuilder = cacheBuilder.withExpiry(getExpiry(cacheClassLoader, parsedExpiry)); + } + + @SuppressWarnings("unchecked") + EvictionAdvisor evictionAdvisor = getInstanceOfName(cacheDefinition.evictionAdvisor(), cacheClassLoader, EvictionAdvisor.class); + cacheBuilder = cacheBuilder.withEvictionAdvisor(evictionAdvisor); + + return cacheBuilder; + } + + @SuppressWarnings({"unchecked", "deprecation"}) + private static ExpiryPolicy getExpiry(ClassLoader cacheClassLoader, Expiry parsedExpiry) + throws ClassNotFoundException, InstantiationException, IllegalAccessException { + if (parsedExpiry.isUserDef()) { + try { + return getInstanceOfName(parsedExpiry.type(), cacheClassLoader, ExpiryPolicy.class); + } catch (ClassCastException e) { + return ExpiryUtils.convertToExpiryPolicy(getInstanceOfName(parsedExpiry.type(), cacheClassLoader, org.ehcache.expiry.Expiry.class)); + } + } else if (parsedExpiry.isTTL()) { + return ExpiryPolicyBuilder.timeToLiveExpiration(Duration.of(parsedExpiry.value(), parsedExpiry.unit())); + } else if (parsedExpiry.isTTI()) { + return ExpiryPolicyBuilder.timeToIdleExpiration(Duration.of(parsedExpiry.value(), parsedExpiry.unit())); + } else { + return ExpiryPolicyBuilder.noExpiration(); + } + } + + static T getInstanceOfName(String name, ClassLoader classLoader, Class type) throws ClassNotFoundException, InstantiationException, IllegalAccessException { + if (name == null) { + return null; + } + Class klazz = getClassForName(name, classLoader); + return klazz.asSubclass(type).newInstance(); + } + + public CacheType unparseConfiguration(CacheConfiguration cacheConfiguration, CacheType cacheType) { + ExpiryPolicy expiryPolicy = cacheConfiguration.getExpiryPolicy(); + if (expiryPolicy != null) { + Duration expiry = expiryPolicy.getExpiryForCreation(null, null); + ExpiryType expiryType = new ExpiryType(); + if (expiryPolicy.equals(ExpiryPolicy.NO_EXPIRY)) { + expiryType.withNone(new ExpiryType.None()); + } else if (expiryPolicy.equals(ExpiryPolicyBuilder.timeToLiveExpiration(expiry))) { + expiryType.withTtl(convertToTimeType(expiry)); + } else if (expiryPolicy.equals(ExpiryPolicyBuilder.timeToIdleExpiration(expiry))) { + expiryType.withTti(convertToTimeType(expiry)); + } else { + throw new XmlConfigurationException("XML translation of custom expiry policy is not supported"); + } + cacheType.withExpiry(expiryType); + } + + EvictionAdvisor evictionAdvisor = cacheConfiguration.getEvictionAdvisor(); + if (evictionAdvisor != null) { + throw new XmlConfigurationException("XML translation of eviction advisor is not supported"); + } + + return cacheType; + } + + private static TimeTypeWithPropSubst convertToTimeType(Duration duration) { + return Stream.of(java.util.concurrent.TimeUnit.values()) + .sorted(comparing(unit -> unit.convert(duration.toNanos(), NANOSECONDS))) + .filter(unit -> duration.equals(Duration.of(unit.convert(duration.toNanos(), NANOSECONDS), jucTimeUnitToTemporalUnit(unit)))) + .findFirst() + .map(unit -> new ObjectFactory().createTimeTypeWithPropSubst() + .withValue(BigInteger.valueOf(unit.convert(duration.toNanos(), NANOSECONDS))) + .withUnit(convertToXmlTimeUnit(unit)) + ).orElseThrow(AssertionError::new); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/CoreServiceConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/CoreServiceConfigurationParser.java new file mode 100644 index 0000000000..82e5645769 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/CoreServiceConfigurationParser.java @@ -0,0 +1,31 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; +import org.w3c.dom.Document; + +public interface CoreServiceConfigurationParser { + + CacheConfigurationBuilder parseServiceConfiguration(CacheTemplate cacheDefinition, ClassLoader cacheClassLoader, + CacheConfigurationBuilder cacheBuilder) throws ClassNotFoundException; + + CacheType unparseServiceConfiguration(CacheConfiguration cacheConfiguration, CacheType cacheType); +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/CoreServiceCreationConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/CoreServiceCreationConfigurationParser.java new file mode 100644 index 0000000000..a5d951d8b3 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/CoreServiceCreationConfigurationParser.java @@ -0,0 +1,28 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.Configuration; +import org.ehcache.config.FluentConfigurationBuilder; +import org.ehcache.xml.model.ConfigType; + +public interface CoreServiceCreationConfigurationParser { + + FluentConfigurationBuilder parseServiceCreationConfiguration(ConfigType root, ClassLoader classLoader, FluentConfigurationBuilder builder) throws ClassNotFoundException; + + ConfigType unparseServiceCreationConfiguration(Configuration configuration, ConfigType root); +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/DomUtil.java b/ehcache-xml/src/main/java/org/ehcache/xml/DomUtil.java new file mode 100644 index 0000000000..e47f8300d2 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/DomUtil.java @@ -0,0 +1,91 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.xml; + +import org.w3c.dom.Document; +import org.xml.sax.ErrorHandler; +import org.xml.sax.SAXException; +import org.xml.sax.SAXParseException; + +import java.io.IOException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.transform.Source; +import javax.xml.transform.stream.StreamSource; + +import static org.ehcache.xml.ConfigurationParser.newSchema; + +public class DomUtil { + + private static final URL CORE_SCHEMA_URL = XmlConfiguration.class.getResource("/ehcache-core.xsd"); + + public static DocumentBuilder createAndGetDocumentBuilder(Collection schemaSources) throws SAXException, ParserConfigurationException { + DocumentBuilderFactory factory = createAndGetFactory(schemaSources); + DocumentBuilder documentBuilder = factory.newDocumentBuilder(); + documentBuilder.setErrorHandler(new TransformationErrorHandler()); + return documentBuilder; + } + + public static DocumentBuilder createAndGetDocumentBuilder(Source schemaSource) throws SAXException, ParserConfigurationException, IOException { + List schemaSources = new ArrayList<>(2); + schemaSources.add(new StreamSource(CORE_SCHEMA_URL.openStream())); + schemaSources.add(schemaSource); + return createAndGetDocumentBuilder(schemaSources); + } + + public static DocumentBuilder createAndGetDocumentBuilder() throws SAXException, ParserConfigurationException, IOException { + return createAndGetDocumentBuilder(new StreamSource(CORE_SCHEMA_URL.openStream())); + } + + private static DocumentBuilderFactory createAndGetFactory(Collection schemaSources) throws SAXException { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + factory.setNamespaceAware(true); + factory.setIgnoringComments(true); + factory.setIgnoringElementContentWhitespace(true); + factory.setSchema(newSchema(schemaSources.toArray(new Source[schemaSources.size()]))); + return factory; + } + + public static Document createDocumentRoot(Source schemaSource) throws IOException, SAXException, ParserConfigurationException { + DocumentBuilder domBuilder = createAndGetDocumentBuilder(schemaSource); + Document doc = domBuilder.newDocument(); + return doc; + } + + static class TransformationErrorHandler implements ErrorHandler { + + @Override + public void warning(SAXParseException exception) throws SAXException { + throw exception; + } + + @Override + public void error(SAXParseException exception) throws SAXException { + throw exception; + } + + @Override + public void fatalError(SAXParseException exception) throws SAXException { + throw exception; + } + } +} diff --git a/xml/src/main/java/org/ehcache/xml/JaxbHelper.java b/ehcache-xml/src/main/java/org/ehcache/xml/JaxbHelper.java similarity index 97% rename from xml/src/main/java/org/ehcache/xml/JaxbHelper.java rename to ehcache-xml/src/main/java/org/ehcache/xml/JaxbHelper.java index d0b9f71aca..14a1261cfb 100644 --- a/xml/src/main/java/org/ehcache/xml/JaxbHelper.java +++ b/ehcache-xml/src/main/java/org/ehcache/xml/JaxbHelper.java @@ -22,7 +22,7 @@ /** * @author Ludovic Orban */ -final class JaxbHelper { +public final class JaxbHelper { public static String findDefaultValue(Object jaxbObject, String fieldName) { Field declaredField = null; diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/JaxbParsers.java b/ehcache-xml/src/main/java/org/ehcache/xml/JaxbParsers.java new file mode 100644 index 0000000000..3d3e5acfca --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/JaxbParsers.java @@ -0,0 +1,83 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import java.math.BigInteger; +import java.security.PrivilegedAction; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static java.security.AccessController.doPrivileged; + +public class JaxbParsers { + + private static final Pattern SYSPROP = Pattern.compile("\\$\\{(?[^{}]+)}"); + private static final Pattern PADDED_SYSPROP = Pattern.compile("\\s*" + SYSPROP.pattern() + "\\s*"); + + public static String parsePropertyOrString(String s) { + Matcher matcher = PADDED_SYSPROP.matcher(s); + if (matcher.matches()) { + String property = matcher.group("property"); + String value = doPrivileged((PrivilegedAction) () -> System.getProperty(property)); + if (value == null) { + throw new IllegalStateException(String.format("Replacement for ${%s} not found!", property)); + } else { + return value; + } + } else { + return s; + } + } + + public static BigInteger parsePropertyOrInteger(String s) { + return new BigInteger(parsePropertyOrString(s)); + } + + public static BigInteger parsePropertyOrPositiveInteger(String s) { + BigInteger value = parsePropertyOrInteger(s); + if (value.compareTo(BigInteger.ZERO) > 0) { + return value; + } else { + throw new IllegalArgumentException("Value " + value + " is not a positive integer"); + } + } + + public static BigInteger parsePropertyOrNonNegativeInteger(String s) { + BigInteger value = parsePropertyOrInteger(s); + if (value.compareTo(BigInteger.ZERO) >= 0) { + return value; + } else { + throw new IllegalArgumentException("Value " + value + " is not a non-negative integer"); + } + } + + public static String parseStringWithProperties(String s) { + Matcher matcher = SYSPROP.matcher(s); + StringBuffer sb = new StringBuffer(); + while (matcher.find()) { + final String property = matcher.group("property"); + final String value = doPrivileged((PrivilegedAction) () -> System.getProperty(property)); + if (value == null) { + throw new IllegalStateException(String.format("Replacement for ${%s} not found!", property)); + } + matcher.appendReplacement(sb, Matcher.quoteReplacement(value)); + } + matcher.appendTail(sb); + return sb.toString(); + } + +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/ResourceConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/ResourceConfigurationParser.java new file mode 100644 index 0000000000..7fb910948c --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/ResourceConfigurationParser.java @@ -0,0 +1,213 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.ResourcePool; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.ResourceUnit; +import org.ehcache.config.SizedResourcePool; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.impl.config.SizedResourcePoolImpl; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; +import org.ehcache.xml.model.Disk; +import org.ehcache.xml.model.Heap; +import org.ehcache.xml.model.MemoryTypeWithPropSubst; +import org.ehcache.xml.model.ObjectFactory; +import org.ehcache.xml.model.Offheap; +import org.ehcache.xml.model.PersistableMemoryTypeWithPropSubst; +import org.ehcache.xml.model.ResourceTypeWithPropSubst; +import org.ehcache.xml.model.ResourcesType; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.xml.sax.SAXException; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBException; +import javax.xml.bind.Marshaller; +import javax.xml.bind.Unmarshaller; +import javax.xml.bind.helpers.DefaultValidationEventHandler; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.transform.stream.StreamSource; +import javax.xml.validation.Schema; + +import static org.ehcache.xml.ConfigurationParser.newSchema; +import static org.ehcache.xml.XmlConfiguration.CORE_SCHEMA_URL; + +public class ResourceConfigurationParser { + + private static final ObjectFactory OBJECT_FACTORY = new ObjectFactory(); + private static final Schema CORE_SCHEMA; + static { + try { + CORE_SCHEMA = newSchema(new StreamSource(CORE_SCHEMA_URL.toExternalForm())); + } catch (Exception e) { + throw new AssertionError(e); + } + } + private static final String CORE_SCHEMA_NS = OBJECT_FACTORY.createResource(OBJECT_FACTORY.createResourceTypeWithPropSubst()).getName().getNamespaceURI(); + + private final JAXBContext jaxbContext; + private final Set extensionParsers; + + public ResourceConfigurationParser(Set extensionParsers) { + this.extensionParsers = extensionParsers; + try { + this.jaxbContext = JAXBContext.newInstance(ResourcesType.class); + } catch (JAXBException e) { + throw new AssertionError(e); + } + } + + public ResourcePools parseResourceConfiguration(CacheTemplate cacheTemplate, ResourcePoolsBuilder resourcePoolsBuilder) { + + if (cacheTemplate.getHeap() != null) { + resourcePoolsBuilder = resourcePoolsBuilder.with(parseHeapConfiguration(cacheTemplate.getHeap())); + } else if (!cacheTemplate.getResources().isEmpty()) { + for (Element element : cacheTemplate.getResources()) { + ResourcePool resourcePool; + if (!CORE_SCHEMA_NS.equals(element.getNamespaceURI())) { + resourcePool = parseResourceExtension(element); + } else { + try { + Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); + unmarshaller.setEventHandler(new DefaultValidationEventHandler()); + Object resource = unmarshaller.unmarshal(element); + if (resource instanceof Heap) { + resourcePool = parseHeapConfiguration((Heap) resource); + } else if (resource instanceof Offheap) { + MemoryTypeWithPropSubst offheapResource = ((Offheap) resource).getValue(); + resourcePool = new SizedResourcePoolImpl<>(org.ehcache.config.ResourceType.Core.OFFHEAP, + offheapResource.getValue().longValue(), parseMemory(offheapResource), false); + } else if (resource instanceof Disk) { + PersistableMemoryTypeWithPropSubst diskResource = ((Disk) resource).getValue(); + resourcePool = new SizedResourcePoolImpl<>(org.ehcache.config.ResourceType.Core.DISK, + diskResource.getValue().longValue(), parseMemory(diskResource), diskResource.isPersistent()); + } else { + // Someone updated the core resources without updating *this* code ... + throw new AssertionError("Unrecognized resource: " + element + " / " + resource.getClass().getName()); + } + } catch (JAXBException e) { + throw new IllegalArgumentException("Can't find parser for resource: " + element, e); + } + } + + resourcePoolsBuilder = resourcePoolsBuilder.with(resourcePool); + } + } else { + throw new XmlConfigurationException("No resources defined for the cache: " + cacheTemplate.id()); + } + + return resourcePoolsBuilder.build(); + } + + private ResourcePool parseHeapConfiguration(Heap heap) { + ResourceTypeWithPropSubst heapResource = heap.getValue(); + return new SizedResourcePoolImpl<>(org.ehcache.config.ResourceType.Core.HEAP, + heapResource.getValue().longValue(), parseUnit(heapResource), false); + } + + private static ResourceUnit parseUnit(ResourceTypeWithPropSubst resourceType) { + if (resourceType.getUnit().equals(org.ehcache.xml.model.ResourceUnit.ENTRIES)) { + return EntryUnit.ENTRIES; + } else { + return org.ehcache.config.units.MemoryUnit.valueOf(resourceType.getUnit().value().toUpperCase()); + } + } + + private static org.ehcache.config.units.MemoryUnit parseMemory(MemoryTypeWithPropSubst memoryType) { + return MemoryUnit.valueOf(memoryType.getUnit().value().toUpperCase()); + } + + ResourcePool parseResourceExtension(final Element element) { + for (CacheResourceConfigurationParser parser : extensionParsers) { + ResourcePool resourcePool = parser.parseResourceConfiguration(element); + if (resourcePool != null) { + return resourcePool; + } + } + throw new XmlConfigurationException("Can't find parser for element: " + element); + } + + public CacheType unparseResourceConfiguration(ResourcePools resourcePools, CacheType cacheType) { + List resources = new ArrayList<>(); + resourcePools.getResourceTypeSet().forEach(resourceType -> { + Element element; + ResourcePool resourcePool = resourcePools.getPoolForResource(resourceType); + if (resourceType instanceof org.ehcache.config.ResourceType.Core) { + SizedResourcePool pool = (SizedResourcePool) resourcePool; + Object resource; + if (resourceType == org.ehcache.config.ResourceType.Core.HEAP) { + resource = OBJECT_FACTORY.createHeap(OBJECT_FACTORY.createResourceTypeWithPropSubst().withValue(BigInteger.valueOf(pool.getSize())).withUnit(unparseUnit(pool.getUnit()))); + } else if (resourceType == org.ehcache.config.ResourceType.Core.OFFHEAP) { + resource = OBJECT_FACTORY.createOffheap(OBJECT_FACTORY.createMemoryTypeWithPropSubst().withValue(BigInteger.valueOf(pool.getSize())).withUnit(unparseMemory((MemoryUnit) pool.getUnit()))); + } else if (resourceType == org.ehcache.config.ResourceType.Core.DISK) { + resource = OBJECT_FACTORY.createDisk(OBJECT_FACTORY.createPersistableMemoryTypeWithPropSubst().withValue(BigInteger.valueOf(pool.getSize())) + .withUnit(unparseMemory((MemoryUnit) pool.getUnit())).withPersistent(pool.isPersistent())); + } else { + throw new AssertionError("Unrecognized core resource type: " + resourceType); + } + + try { + Document document = DomUtil.createAndGetDocumentBuilder().newDocument(); + Marshaller marshaller = jaxbContext.createMarshaller(); + marshaller.setSchema(CORE_SCHEMA); + marshaller.marshal(resource, document); + element = document.getDocumentElement(); + } catch (SAXException | ParserConfigurationException | IOException | JAXBException e) { + throw new XmlConfigurationException(e); + } + } else { + Map, CacheResourceConfigurationParser> parsers = new HashMap<>(); + extensionParsers.forEach(parser -> parser.getResourceTypes().forEach(rt -> parsers.put(rt, parser))); + CacheResourceConfigurationParser parser = parsers.get(resourcePool.getClass()); + if (parser != null) { + element = parser.unparseResourcePool(resourcePool); + } else { + throw new AssertionError("Parser not found for resource type: " + resourceType); + } + } + + resources.add(element); + }); + return cacheType.withResources(OBJECT_FACTORY.createResourcesType().withResource(resources)); + } + + private static org.ehcache.xml.model.ResourceUnit unparseUnit(ResourceUnit resourceUnit) { + if (resourceUnit instanceof EntryUnit) { + return org.ehcache.xml.model.ResourceUnit.ENTRIES; + } else { + return org.ehcache.xml.model.ResourceUnit.fromValue(resourceUnit.toString()); + } + } + + private static org.ehcache.xml.model.MemoryUnit unparseMemory(MemoryUnit memoryUnit) { + return org.ehcache.xml.model.MemoryUnit.fromValue(memoryUnit.toString()); + } + +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/ServiceConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/ServiceConfigurationParser.java new file mode 100644 index 0000000000..5f53219f96 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/ServiceConfigurationParser.java @@ -0,0 +1,101 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; +import org.ehcache.xml.service.DefaultCacheEventDispatcherConfigurationParser; +import org.ehcache.xml.service.DefaultCacheEventListenerConfigurationParser; +import org.ehcache.xml.service.DefaultCacheLoaderWriterConfigurationParser; +import org.ehcache.xml.service.DefaultCopierConfigurationParser; +import org.ehcache.xml.service.DefaultResilienceStrategyConfigurationParser; +import org.ehcache.xml.service.DefaultSerializerConfigurationParser; +import org.ehcache.xml.service.DefaultSizeOfEngineConfigurationParser; +import org.ehcache.xml.service.DefaultWriteBehindConfigurationParser; +import org.ehcache.xml.service.OffHeapDiskStoreConfigurationParser; +import org.w3c.dom.Element; + +import java.net.URI; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import static java.util.Arrays.asList; +import static java.util.function.Function.identity; +import static java.util.stream.Collectors.toMap; + +public class ServiceConfigurationParser { + + static final Collection CORE_SERVICE_CONFIGURATION_PARSERS = asList( + new DefaultSerializerConfigurationParser(), + new DefaultCopierConfigurationParser(), + new DefaultCacheLoaderWriterConfigurationParser(), + new DefaultResilienceStrategyConfigurationParser(), + new DefaultSizeOfEngineConfigurationParser(), + new DefaultWriteBehindConfigurationParser(), + new OffHeapDiskStoreConfigurationParser(), + new DefaultCacheEventDispatcherConfigurationParser(), + new DefaultCacheEventListenerConfigurationParser() + ); + + private final Map, CacheServiceConfigurationParser> extensionParsers; + + public ServiceConfigurationParser(Map, CacheServiceConfigurationParser> extensionParsers) { + this.extensionParsers = extensionParsers; + } + + public CacheConfigurationBuilder parseConfiguration(CacheTemplate cacheDefinition, ClassLoader cacheClassLoader, + CacheConfigurationBuilder cacheBuilder) throws ClassNotFoundException, IllegalAccessException, InstantiationException { + for (CoreServiceConfigurationParser coreServiceConfigParser : CORE_SERVICE_CONFIGURATION_PARSERS) { + cacheBuilder = coreServiceConfigParser.parseServiceConfiguration(cacheDefinition, cacheClassLoader, cacheBuilder); + } + + Map> parsers = extensionParsers.values().stream(). + collect(toMap(CacheServiceConfigurationParser::getNamespace, identity())); + for (Element element : cacheDefinition.serviceConfigExtensions()) { + URI namespace = URI.create(element.getNamespaceURI()); + final CacheServiceConfigurationParser xmlConfigurationParser = parsers.get(namespace); + if(xmlConfigurationParser == null) { + throw new IllegalArgumentException("Can't find parser for namespace: " + namespace); + } + cacheBuilder = cacheBuilder.withService(xmlConfigurationParser.parseServiceConfiguration(element, cacheClassLoader)); + } + + return cacheBuilder; + } + + CacheType unparseServiceConfiguration(CacheConfiguration cacheConfiguration, CacheType cacheType) { + for (CoreServiceConfigurationParser parser : CORE_SERVICE_CONFIGURATION_PARSERS) { + parser.unparseServiceConfiguration(cacheConfiguration, cacheType); + } + + List serviceConfigs = cacheType.getServiceConfiguration(); + cacheConfiguration.getServiceConfigurations().forEach(config -> { + @SuppressWarnings("rawtypes") + CacheServiceConfigurationParser parser = extensionParsers.get(config.getServiceType()); + if (parser != null) { + @SuppressWarnings("unchecked") + Element element = parser.unparseServiceConfiguration(config); + serviceConfigs.add(element); + } + }); + + return cacheType; + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/ServiceCreationConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/ServiceCreationConfigurationParser.java new file mode 100644 index 0000000000..adc4050533 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/ServiceCreationConfigurationParser.java @@ -0,0 +1,104 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.Configuration; +import org.ehcache.config.FluentConfigurationBuilder; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.ServiceType; +import org.ehcache.xml.provider.CacheEventDispatcherFactoryConfigurationParser; +import org.ehcache.xml.provider.CacheManagerPersistenceConfigurationParser; +import org.ehcache.xml.provider.DefaultCopyProviderConfigurationParser; +import org.ehcache.xml.provider.DefaultSerializationProviderConfigurationParser; +import org.ehcache.xml.provider.DefaultSizeOfEngineProviderConfigurationParser; +import org.ehcache.xml.provider.OffHeapDiskStoreProviderConfigurationParser; +import org.ehcache.xml.provider.PooledExecutionServiceConfigurationParser; +import org.ehcache.xml.provider.WriteBehindProviderConfigurationParser; +import org.w3c.dom.Element; + +import java.net.URI; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import static java.util.Arrays.asList; +import static java.util.stream.Collectors.toMap; +import static java.util.function.Function.identity; + +public class ServiceCreationConfigurationParser { + + static final Collection CORE_SERVICE_CREATION_CONFIGURATION_PARSERS = asList( + new DefaultCopyProviderConfigurationParser(), + new DefaultSerializationProviderConfigurationParser(), + new OffHeapDiskStoreProviderConfigurationParser(), + new CacheEventDispatcherFactoryConfigurationParser(), + new DefaultSizeOfEngineProviderConfigurationParser(), + new CacheManagerPersistenceConfigurationParser(), + new PooledExecutionServiceConfigurationParser(), + new WriteBehindProviderConfigurationParser() + ); + + private final Map, CacheManagerServiceConfigurationParser> extensionParsers; + + public ServiceCreationConfigurationParser(Map, CacheManagerServiceConfigurationParser> extensionParsers) { + this.extensionParsers = extensionParsers; + } + + FluentConfigurationBuilder parseServiceCreationConfiguration(ConfigType configRoot, ClassLoader classLoader, FluentConfigurationBuilder managerBuilder) throws ClassNotFoundException { + for (CoreServiceCreationConfigurationParser parser : CORE_SERVICE_CREATION_CONFIGURATION_PARSERS) { + managerBuilder = parser.parseServiceCreationConfiguration(configRoot, classLoader, managerBuilder); + } + + Map> parsers = extensionParsers.values().stream(). + collect(toMap(CacheManagerServiceConfigurationParser::getNamespace, identity())); + for (ServiceType serviceType : configRoot.getService()) { + Element element = serviceType.getServiceCreationConfiguration(); + URI namespace = URI.create(element.getNamespaceURI()); + CacheManagerServiceConfigurationParser cacheManagerServiceConfigurationParser = parsers.get(namespace); + if(cacheManagerServiceConfigurationParser == null) { + throw new IllegalArgumentException("Can't find parser for namespace: " + namespace); + } + ServiceCreationConfiguration serviceConfiguration = cacheManagerServiceConfigurationParser.parseServiceCreationConfiguration(element, classLoader); + managerBuilder = managerBuilder.withService(serviceConfiguration); + } + + return managerBuilder; + } + + + ConfigType unparseServiceCreationConfiguration(Configuration configuration, ConfigType configType) { + for (CoreServiceCreationConfigurationParser parser : CORE_SERVICE_CREATION_CONFIGURATION_PARSERS) { + parser.unparseServiceCreationConfiguration(configuration, configType); + } + + List services = configType.getService(); + configuration.getServiceCreationConfigurations().forEach(config -> { + @SuppressWarnings("rawtypes") + CacheManagerServiceConfigurationParser parser = extensionParsers.get(config.getServiceType()); + if (parser != null) { + ServiceType serviceType = new ServiceType(); + @SuppressWarnings("unchecked") + Element element = parser.unparseServiceCreationConfiguration(config); + serviceType.setServiceCreationConfiguration(element); + services.add(serviceType); + } + }); + + return configType; + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/XmlConfiguration.java b/ehcache-xml/src/main/java/org/ehcache/xml/XmlConfiguration.java new file mode 100644 index 0000000000..f44adb566d --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/XmlConfiguration.java @@ -0,0 +1,441 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.Builder; +import org.ehcache.config.FluentConfigurationBuilder; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.core.util.ClassLoading; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.w3c.dom.Document; + +import java.lang.reflect.Array; +import java.net.URL; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; + +import static java.lang.Class.forName; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static java.util.Objects.requireNonNull; +import static org.ehcache.config.builders.ConfigurationBuilder.newConfigurationBuilder; +import static org.ehcache.xml.ConfigurationParser.documentToText; +import static org.ehcache.xml.XmlConfiguration.PrettyClassFormat.when; + +/** + * Exposes {@link org.ehcache.config.Configuration} and {@link CacheConfigurationBuilder} expressed + * in a XML file that obeys the core Ehcache schema. + *

                                      + * Instances of this class are not thread-safe. + */ +public class XmlConfiguration implements Configuration { + + public static final URL CORE_SCHEMA_URL = XmlConfiguration.class.getResource("/ehcache-core.xsd"); + + private final URL source; + private final Document document; + private final String renderedDocument; + + private final Configuration configuration; + private final Map cacheClassLoaders; + private final Map templates; + + /** + * Constructs an instance of XmlConfiguration mapping to the XML file located at {@code url}. + *

                                      + * The default ClassLoader will first try to use the thread context class loader, followed by the ClassLoader that + * loaded the Ehcache classes. + * + * @param url URL pointing to the XML file's location + * + * @throws XmlConfigurationException if anything went wrong parsing the XML + */ + public XmlConfiguration(URL url) + throws XmlConfigurationException { + this(url, ClassLoading.getDefaultClassLoader()); + } + + /** + * Constructs an instance of XmlConfiguration mapping to the XML file located at {@code url} and using the provided + * {@code classLoader} to load user types (e.g. key and value Class instances). + * + * @param url URL pointing to the XML file's location + * @param classLoader ClassLoader to use to load user types. + * + * @throws XmlConfigurationException if anything went wrong parsing the XML + */ + public XmlConfiguration(URL url, final ClassLoader classLoader) + throws XmlConfigurationException { + this(url, classLoader, Collections.emptyMap()); + } + + /** + * Constructs an instance of XmlConfiguration mapping to the XML file located at {@code url} and using the provided + * {@code classLoader} to load user types (e.g. key and value Class instances). The {@code cacheClassLoaders} will + * let you specify a different {@link java.lang.ClassLoader} to use for each {@link org.ehcache.Cache} managed by + * the {@link org.ehcache.CacheManager} configured using this {@link org.ehcache.xml.XmlConfiguration}. Caches with + * aliases that do not appear in the map will use {@code classLoader} as a default. + * + * @param url URL pointing to the XML file's location + * @param classLoader ClassLoader to use to load user types. + * @param cacheClassLoaders the map with mappings between cache names and the corresponding class loaders + * + * @throws XmlConfigurationException if anything went wrong parsing the XML + */ + public XmlConfiguration(URL url, final ClassLoader classLoader, final Map cacheClassLoaders) + throws XmlConfigurationException { + + this.source = requireNonNull(url, "The url can not be null"); + requireNonNull(classLoader, "The classLoader can not be null"); + this.cacheClassLoaders = requireNonNull(cacheClassLoaders, "The cacheClassLoaders map can not be null"); + + try { + ConfigurationParser parser = new ConfigurationParser(); + this.document = parser.uriToDocument(source.toURI()); + ConfigurationParser.XmlConfigurationWrapper configWrapper = parser.documentToConfig(document, classLoader, cacheClassLoaders); + this.configuration = configWrapper.getConfiguration(); + this.templates = configWrapper.getTemplates(); + + this.renderedDocument = ConfigurationParser.urlToText(url, document.getInputEncoding()); + } catch (XmlConfigurationException e) { + throw e; + } catch (Exception e) { + throw new XmlConfigurationException("Error parsing XML configuration at " + url, e); + } + } + + /** + * Constructs an instance of XmlConfiguration from the given XML DOM. + *

                                      + * The default ClassLoader will first try to use the thread context class loader, followed by the ClassLoader that + * loaded the Ehcache classes. + * + * @param xml XML Document Object Model + * + * @throws XmlConfigurationException if anything went wrong parsing the XML + */ + public XmlConfiguration(Document xml) throws XmlConfigurationException { + this(xml, ClassLoading.getDefaultClassLoader()); + } + + /** + * Constructs an instance of XmlConfiguration from the given XML DOM and using the provided {@code classLoader} to + * load user types (e.g. key and value Class instances). + * + * @param xml XML Document Object Model + * @param classLoader ClassLoader to use to load user types. + * + * @throws XmlConfigurationException if anything went wrong parsing the XML + */ + public XmlConfiguration(Document xml, ClassLoader classLoader) throws XmlConfigurationException { + this(xml, classLoader, emptyMap()); + } + + /** + * Constructs an instance of XmlConfiguration from the given XML DOM and using the provided {@code classLoader} to + * load user types (e.g. key and value Class instances). The {@code cacheClassLoaders} will let you specify a + * different {@link java.lang.ClassLoader} to use for each {@link org.ehcache.Cache} managed by the + * {@link org.ehcache.CacheManager} configured using this {@link org.ehcache.xml.XmlConfiguration}. Caches with + * aliases that do not appear in the map will use {@code classLoader} as a default. + * + * @param xml XML Document Object Model + * @param classLoader ClassLoader to use to load user types. + * @param cacheClassLoaders the map with mappings between cache names and the corresponding class loaders + * + * @throws XmlConfigurationException if anything went wrong parsing the XML + */ + public XmlConfiguration(Document xml, ClassLoader classLoader, Map cacheClassLoaders) throws XmlConfigurationException { + requireNonNull(xml, "The source-element cannot be null"); + requireNonNull(classLoader, "The classLoader can not be null"); + this.cacheClassLoaders = requireNonNull(cacheClassLoaders, "The cacheClassLoaders map can not be null"); + + this.source = null; + try { + ConfigurationParser parser = new ConfigurationParser(); + this.document = xml; + ConfigurationParser.XmlConfigurationWrapper configWrapper = parser.documentToConfig(document, classLoader, cacheClassLoaders); + this.configuration = configWrapper.getConfiguration(); + this.templates = configWrapper.getTemplates(); + + this.renderedDocument = documentToText(xml); + } catch (XmlConfigurationException e) { + throw e; + } catch (Exception e) { + throw new XmlConfigurationException("Error parsing XML configuration", e); + } + } + + /** + * Constructs an instance of XmlConfiguration from an existing configuration object. + * + * @param configuration existing configuration + * + * @throws XmlConfigurationException if anything went wrong converting to XML + */ + public XmlConfiguration(Configuration configuration) throws XmlConfigurationException { + this.source = null; + this.cacheClassLoaders = emptyMap(); + try { + ConfigurationParser parser = new ConfigurationParser(); + this.configuration = configuration; + this.templates = emptyMap(); + + this.document = parser.configToDocument(configuration); + this.renderedDocument = documentToText(document); + } catch (XmlConfigurationException e) { + throw e; + } catch (Exception e) { + throw new XmlConfigurationException("Error unparsing configuration: " + configuration, e); + } + } + + /** + * Return this configuration as an XML {@link org.w3c.dom.Document}. + * + * @return configuration XML DOM. + */ + public Document asDocument() { + return document; + } + + /** + * Return this configuration as a rendered XML string. + * + * @return configuration XML string + */ + public String asRenderedDocument() { + return renderedDocument; + } + + @Override + public String toString() { + return asRenderedDocument(); + } + + /** + * Exposes the URL where the XML file parsed or yet to be parsed was or will be sourced from. + * @return The URL provided at object instantiation + */ + public URL getURL() { + return source; + } + + /** + * Creates a new {@link CacheConfigurationBuilder} seeded with the cache-template configuration + * by the given {@code name} in the parsed XML configuration. + *

                                      + * Note that this version does not specify resources, which are mandatory to create a + * {@link CacheConfigurationBuilder}. So if the template does not define resources, this will throw. + * + * @param name the unique name identifying the cache-template element in the XML + * @param keyType the type of keys for the {@link CacheConfigurationBuilder} to use, must + * match the {@code key-type} declared in the template if declared in XML + * @param valueType the type of values for the {@link CacheConfigurationBuilder} to use, must + * match the {@code value-type} declared in the template if declared in XML + * @param type of keys + * @param type of values + * + * @return the preconfigured {@link CacheConfigurationBuilder} + * or {@code null} if no cache-template for the provided {@code name} + * + * @throws IllegalStateException if the template does not configure resources. + * @throws IllegalArgumentException if {@code keyType} or {@code valueType} don't match the declared type(s) of the template + * @throws ClassNotFoundException if a {@link java.lang.Class} declared in the XML couldn't be found + * @throws InstantiationException if a user provided {@link java.lang.Class} couldn't get instantiated + * @throws IllegalAccessException if a method (including constructor) couldn't be invoked on a user provided type + */ + @SuppressWarnings("unchecked") + public CacheConfigurationBuilder newCacheConfigurationBuilderFromTemplate(final String name, + final Class keyType, + final Class valueType) + throws InstantiationException, IllegalAccessException, ClassNotFoundException { + Template template = templates.get(name); + if (template == null) { + return null; + } else { + return template.builderFor(cacheClassLoaders.getOrDefault(name, getClassLoader()), keyType, valueType, null); + } + } + + /** + * Creates a new {@link CacheConfigurationBuilder} seeded with the cache-template configuration + * by the given {@code name} in the parsed XML configuration. + * + * @param name the unique name identifying the cache-template element in the XML + * @param keyType the type of keys for the {@link CacheConfigurationBuilder} to use, must + * match the {@code key-type} declared in the template if declared in XML + * @param valueType the type of values for the {@link CacheConfigurationBuilder} to use, must + * match the {@code value-type} declared in the template if declared in XML + * @param resourcePools Resources definitions that will be used + * @param type of keys + * @param type of values + * + * @return the preconfigured {@link CacheConfigurationBuilder} + * or {@code null} if no cache-template for the provided {@code name} + * + * @throws IllegalArgumentException if {@code keyType} or {@code valueType} don't match the declared type(s) of the template + * @throws ClassNotFoundException if a {@link java.lang.Class} declared in the XML couldn't be found + * @throws InstantiationException if a user provided {@link java.lang.Class} couldn't get instantiated + * @throws IllegalAccessException if a method (including constructor) couldn't be invoked on a user provided type + */ + @SuppressWarnings("unchecked") + public CacheConfigurationBuilder newCacheConfigurationBuilderFromTemplate(final String name, + final Class keyType, + final Class valueType, + final ResourcePools resourcePools) + throws InstantiationException, IllegalAccessException, ClassNotFoundException { + Template template = templates.get(name); + if (template == null) { + return null; + } else { + return template.builderFor(cacheClassLoaders.getOrDefault(name, getClassLoader()), keyType, valueType, requireNonNull(resourcePools)); + } + } + + /** + * Creates a new {@link CacheConfigurationBuilder} seeded with the cache-template configuration + * by the given {@code name} in the parsed XML configuration. + * + * @param name the unique name identifying the cache-template element in the XML + * @param keyType the type of keys for the {@link CacheConfigurationBuilder} to use, must + * match the {@code key-type} declared in the template if declared in XML + * @param valueType the type of values for the {@link CacheConfigurationBuilder} to use, must + * match the {@code value-type} declared in the template if declared in XML + * @param resourcePoolsBuilder Resources definitions that will be used + * @param type of keys + * @param type of values + * + * @return the preconfigured {@link CacheConfigurationBuilder} + * or {@code null} if no cache-template for the provided {@code name} + * + * @throws IllegalArgumentException if {@code keyType} or {@code valueType} don't match the declared type(s) of the template + * @throws ClassNotFoundException if a {@link java.lang.Class} declared in the XML couldn't be found + * @throws InstantiationException if a user provided {@link java.lang.Class} couldn't get instantiated + * @throws IllegalAccessException if a method (including constructor) couldn't be invoked on a user provided type + */ + @SuppressWarnings("unchecked") + public CacheConfigurationBuilder newCacheConfigurationBuilderFromTemplate(final String name, + final Class keyType, + final Class valueType, + final Builder resourcePoolsBuilder) + throws InstantiationException, IllegalAccessException, ClassNotFoundException { + return newCacheConfigurationBuilderFromTemplate(name, keyType, valueType, resourcePoolsBuilder.build()); + } + + @Override + public Map> getCacheConfigurations() { + return configuration.getCacheConfigurations(); + } + + @Override + public Collection> getServiceCreationConfigurations() { + return configuration.getServiceCreationConfigurations(); + } + + @Override + public ClassLoader getClassLoader() { + return configuration.getClassLoader(); + } + + @Override + public FluentConfigurationBuilder derive() { + return newConfigurationBuilder(this); + } + + public interface Template { + CacheConfigurationBuilder builderFor(ClassLoader classLoader, Class keyType, Class valueType, ResourcePools resourcePools) throws ClassNotFoundException, InstantiationException, IllegalAccessException; + } + + public static Class getClassForName(String name, ClassLoader classLoader) throws ClassNotFoundException { + String klazz = name.trim(); + return PRETTY_FORMATS.stream().filter(p -> p.applies().test(klazz)).findFirst().map(PrettyClassFormat::lookup).orElseThrow(AssertionError::new).lookup(klazz, classLoader); + } + + private static final List PRETTY_FORMATS = asList( + //Primitive Types + when("boolean"::equals).then((n, l) -> Boolean.TYPE), + when("byte"::equals).then((n, l) -> Byte.TYPE), + when("short"::equals).then((n, l) -> Short.TYPE), + when("int"::equals).then((n, l) -> Integer.TYPE), + when("long"::equals).then((n, l) -> Long.TYPE), + when("char"::equals).then((n, l) -> Character.TYPE), + when("float"::equals).then((n, l) -> Float.TYPE), + when("double"::equals).then((n, l) -> Double.TYPE), + + //Java Language Array Syntax + when(n -> n.endsWith("[]")).then((n, l) -> { + String component = n.split("(\\[\\])+$", 2)[0]; + int dimensions = (n.length() - component.length()) >> 1; + return Array.newInstance(getClassForName(component, l), new int[dimensions]).getClass(); + }), + + //Inner Classes + when(n -> n.contains(".")).then((n, l) -> { + try { + return forName(n, false, l); + } catch (ClassNotFoundException e) { + int innerSeperator = n.lastIndexOf("."); + if (innerSeperator == -1) { + throw e; + } else { + return forName(n.substring(0, innerSeperator) + "$" + n.substring(innerSeperator + 1), false, l); + } + } + }), + + //Everything Else + when(n -> true).then((n, l) -> forName(n, false, l)) + ); + + interface PrettyClassFormat { + + static Builder when(Predicate predicate) { + return lookup -> new PrettyClassFormat() { + @Override + public Predicate applies() { + return predicate; + } + + @Override + public Lookup lookup() { + return lookup; + } + }; + } + + Predicate applies(); + + Lookup lookup(); + + interface Builder { + PrettyClassFormat then(Lookup lookup); + } + } + + private interface Lookup { + + Class lookup(String name, ClassLoader loader) throws ClassNotFoundException; + } + +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/XmlModel.java b/ehcache-xml/src/main/java/org/ehcache/xml/XmlModel.java new file mode 100644 index 0000000000..6a549cbfa3 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/XmlModel.java @@ -0,0 +1,107 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.xml; + +import java.time.temporal.ChronoUnit; +import java.time.temporal.TemporalUnit; +import java.util.concurrent.TimeUnit; + +public class XmlModel { + public static TemporalUnit convertToJavaTimeUnit(org.ehcache.xml.model.TimeUnit unit) { + switch (unit) { + case NANOS: + return ChronoUnit.NANOS; + case MICROS: + return ChronoUnit.MICROS; + case MILLIS: + return ChronoUnit.MILLIS; + case SECONDS: + return ChronoUnit.SECONDS; + case MINUTES: + return ChronoUnit.MINUTES; + case HOURS: + return ChronoUnit.HOURS; + case DAYS: + return ChronoUnit.DAYS; + default: + throw new IllegalArgumentException("Unknown time unit: " + unit); + } + } + + public static TimeUnit convertToJUCTimeUnit(org.ehcache.xml.model.TimeUnit unit) { + switch (unit) { + case NANOS: + return TimeUnit.NANOSECONDS; + case MICROS: + return TimeUnit.MICROSECONDS; + case MILLIS: + return TimeUnit.MILLISECONDS; + case SECONDS: + return TimeUnit.SECONDS; + case MINUTES: + return TimeUnit.MINUTES; + case HOURS: + return TimeUnit.HOURS; + case DAYS: + return TimeUnit.DAYS; + default: + throw new IllegalArgumentException("Unknown time unit: " + unit); + } + } + + public static TemporalUnit convertToJavaTemporalUnit(org.ehcache.xml.model.TimeUnit unit) { + switch (unit) { + case NANOS: + return ChronoUnit.NANOS; + case MICROS: + return ChronoUnit.MICROS; + case MILLIS: + return ChronoUnit.MILLIS; + case SECONDS: + return ChronoUnit.SECONDS; + case MINUTES: + return ChronoUnit.MINUTES; + case HOURS: + return ChronoUnit.HOURS; + case DAYS: + return ChronoUnit.DAYS; + default: + throw new IllegalArgumentException("Unknown time unit: " + unit); + } + } + + public static org.ehcache.xml.model.TimeUnit convertToXmlTimeUnit(TimeUnit unit) { + switch (unit) { + case NANOSECONDS: + return org.ehcache.xml.model.TimeUnit.NANOS; + case MICROSECONDS: + return org.ehcache.xml.model.TimeUnit.MICROS; + case MILLISECONDS: + return org.ehcache.xml.model.TimeUnit.MILLIS; + case SECONDS: + return org.ehcache.xml.model.TimeUnit.SECONDS; + case MINUTES: + return org.ehcache.xml.model.TimeUnit.MINUTES; + case HOURS: + return org.ehcache.xml.model.TimeUnit.HOURS; + case DAYS: + return org.ehcache.xml.model.TimeUnit.DAYS; + default: + throw new IllegalArgumentException("Unknown time unit: " + unit); + } + } + +} diff --git a/xml/src/main/java/org/ehcache/xml/exceptions/XmlConfigurationException.java b/ehcache-xml/src/main/java/org/ehcache/xml/exceptions/XmlConfigurationException.java similarity index 100% rename from xml/src/main/java/org/ehcache/xml/exceptions/XmlConfigurationException.java rename to ehcache-xml/src/main/java/org/ehcache/xml/exceptions/XmlConfigurationException.java diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/model/CacheDefinition.java b/ehcache-xml/src/main/java/org/ehcache/xml/model/CacheDefinition.java new file mode 100644 index 0000000000..b91ca903aa --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/model/CacheDefinition.java @@ -0,0 +1,24 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.model; + +public class CacheDefinition extends CacheSpec { + + public CacheDefinition(String id, BaseCacheType... sources) { + super(id, sources); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/model/CacheSpec.java b/ehcache-xml/src/main/java/org/ehcache/xml/model/CacheSpec.java new file mode 100644 index 0000000000..b57d192f1f --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/model/CacheSpec.java @@ -0,0 +1,156 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.model; + +import org.ehcache.xml.JaxbHelper; +import org.w3c.dom.Element; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static java.util.Arrays.asList; +import static java.util.Collections.emptyList; +import static java.util.Optional.ofNullable; +import static java.util.function.Function.identity; + +public class CacheSpec implements CacheTemplate { + + protected final List sources; + private final String id; + + public CacheSpec(String id, BaseCacheType... sources) { + this.id = id; + this.sources = asList(sources); + } + + public String id() { + return id; + } + + @Override + public String keyType() { + return key().map(CacheEntryType::getValue).orElseGet(() -> extract(source -> JaxbHelper.findDefaultValue(source, "keyType")).orElse(null)); + } + + @Override + public String keySerializer() { + return key().map(CacheEntryType::getSerializer).orElse(null); + } + + @Override + public String keyCopier() { + return key().map(CacheEntryType::getCopier).orElse(null); + } + + private Optional key() { + return extract(BaseCacheType::getKeyType); + } + + @Override + public String valueType() { + return value().map(CacheEntryType::getValue).orElseGet(() -> extract(source -> JaxbHelper.findDefaultValue(source, "keyType")).orElse(null)); + } + + @Override + public String valueSerializer() { + return value().map(CacheEntryType::getSerializer).orElse(null); + } + + @Override + public String valueCopier() { + return value().map(CacheEntryType::getCopier).orElse(null); + } + + private Optional value() { + return extract(BaseCacheType::getValueType); + } + + @Override + public String evictionAdvisor() { + return extract(BaseCacheType::getEvictionAdvisor).orElse(null); + } + + @Override + public Expiry expiry() { + return extract(BaseCacheType::getExpiry).map(Expiry::new).orElse(null); + } + + @Override + public List getResources() { + return extract(BaseCacheType::getResources).map(ResourcesType::getResource).orElse(emptyList()); + } + + @Override + public String loaderWriter() { + return extract(BaseCacheType::getLoaderWriter).map(CacheLoaderWriterType::getClazz).orElse(null); + } + + @Override + public String resilienceStrategy() { + return extract(BaseCacheType::getResilience).orElse(null); + } + + @Override + public ListenersConfig listenersConfig() { + ListenersType base = null; + ArrayList additionals = new ArrayList<>(); + for (BaseCacheType source : sources) { + if (source.getListeners() != null) { + if (base == null) { + base = source.getListeners(); + } else { + additionals.add(source.getListeners()); + } + } + } + return base != null ? new ListenersConfig(base, additionals.toArray(new ListenersType[0])) : null; + } + + @Override + public Collection serviceConfigExtensions() { + return sources.stream().flatMap(s -> s.getServiceConfiguration().stream()) + .collect(Collectors.toMap(Element::getTagName, identity(), (a, b) -> a)).values(); + } + + @Override + public Heap getHeap() { + return extract(BaseCacheType::getHeap).orElse(null); + } + + @Override + public CacheLoaderWriterType.WriteBehind writeBehind() { + return extract(BaseCacheType::getLoaderWriter).map(CacheLoaderWriterType::getWriteBehind).orElse(null); + } + + @Override + public DiskStoreSettingsType diskStoreSettings() { + return extract(BaseCacheType::getDiskStoreSettings).orElse(null); + } + + @Override + public SizeOfEngineLimits heapStoreSettings() { + return extract(BaseCacheType::getHeapStoreSettings).map(SizeOfEngineLimits::new).orElse(null); + } + + private Optional extract(Function extractor) { + return sources.stream().map(s -> ofNullable(extractor.apply(s))).filter(Optional::isPresent).map(Optional::get).findFirst(); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/model/CacheTemplate.java b/ehcache-xml/src/main/java/org/ehcache/xml/model/CacheTemplate.java new file mode 100644 index 0000000000..23f94027be --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/model/CacheTemplate.java @@ -0,0 +1,69 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.model; + +import org.w3c.dom.Element; + +import java.util.Collection; +import java.util.List; + +public interface CacheTemplate { + + String id(); + + String keyType(); + + String keySerializer(); + + String keyCopier(); + + String valueType(); + + String valueSerializer(); + + String valueCopier(); + + String evictionAdvisor(); + + Expiry expiry(); + + Heap getHeap(); + + List getResources(); + + String loaderWriter(); + + String resilienceStrategy(); + + ListenersConfig listenersConfig(); + + Collection serviceConfigExtensions(); + + CacheLoaderWriterType.WriteBehind writeBehind(); + + DiskStoreSettingsType diskStoreSettings(); + + SizeOfEngineLimits heapStoreSettings(); + + class Impl extends CacheSpec { + + public Impl(CacheTemplateType cacheTemplateType) { + super(cacheTemplateType.getName(), cacheTemplateType); + } + } + +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/model/Expiry.java b/ehcache-xml/src/main/java/org/ehcache/xml/model/Expiry.java new file mode 100644 index 0000000000..2307708328 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/model/Expiry.java @@ -0,0 +1,69 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.model; + +import org.ehcache.xml.XmlModel; + +import java.time.temporal.TemporalUnit; + +public class Expiry { + + private final ExpiryType type; + + public Expiry(final ExpiryType type) { + this.type = type; + } + + public boolean isUserDef() { + return type != null && type.getClazz() != null; + } + + public boolean isTTI() { + return type != null && type.getTti() != null; + } + + public boolean isTTL() { + return type != null && type.getTtl() != null; + } + + public String type() { + return type.getClazz(); + } + + public long value() { + final TimeTypeWithPropSubst time; + if(isTTI()) { + time = type.getTti(); + } else { + time = type.getTtl(); + } + return time == null ? 0L : time.getValue().longValue(); + } + + public TemporalUnit unit() { + final TimeTypeWithPropSubst time; + if(isTTI()) { + time = type.getTti(); + } else { + time = type.getTtl(); + } + if(time != null) { + return XmlModel.convertToJavaTemporalUnit(time.getUnit()); + } + return null; + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/model/ListenersConfig.java b/ehcache-xml/src/main/java/org/ehcache/xml/model/ListenersConfig.java new file mode 100644 index 0000000000..ff8f5dbe6a --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/model/ListenersConfig.java @@ -0,0 +1,51 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.model; + +import java.util.HashSet; +import java.util.Set; + +public class ListenersConfig { + + private final String threadPool; + private final Iterable listeners; + + public ListenersConfig(final ListenersType type, final ListenersType... others) { + String threadPool = type.getDispatcherThreadPool(); + Set listenerSet = new HashSet<>(); + listenerSet.addAll(type.getListener()); + + for (ListenersType other : others) { + if (threadPool == null && other.getDispatcherThreadPool() != null) { + threadPool = other.getDispatcherThreadPool(); + } + listenerSet.addAll(other.getListener()); + } + + this.threadPool = threadPool; + this.listeners = !listenerSet.isEmpty() ? listenerSet : null; + } + + public String threadPool() { + return threadPool; + } + + public Iterable listeners() { + return listeners; + } + +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/model/SizeOfEngineLimits.java b/ehcache-xml/src/main/java/org/ehcache/xml/model/SizeOfEngineLimits.java new file mode 100644 index 0000000000..795e61506e --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/model/SizeOfEngineLimits.java @@ -0,0 +1,58 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.model; + +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.xml.JaxbHelper; + +import java.math.BigInteger; + +public class SizeOfEngineLimits { + + private final SizeofType sizeoflimits; + + public SizeOfEngineLimits(SizeofType sizeoflimits) { + this.sizeoflimits = sizeoflimits; + } + + public long getMaxObjectGraphSize() { + SizeofType.MaxObjectGraphSize value = sizeoflimits.getMaxObjectGraphSize(); + if (value == null) { + return new BigInteger(JaxbHelper.findDefaultValue(sizeoflimits, "maxObjectGraphSize")).longValue(); + } else { + return value.getValue().longValue(); + } + } + + public long getMaxObjectSize() { + MemoryType value = sizeoflimits.getMaxObjectSize(); + if (value == null) { + return new BigInteger(JaxbHelper.findDefaultValue(sizeoflimits, "maxObjectSize")).longValue(); + } else { + return value.getValue().longValue(); + } + } + + public MemoryUnit getUnit() { + MemoryType value = sizeoflimits.getMaxObjectSize(); + if (value == null) { + return MemoryUnit.valueOf(new ObjectFactory().createMemoryType().getUnit().value().toUpperCase()); + } else { + return MemoryUnit.valueOf(value.getUnit().value().toUpperCase()); + } + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/multi/XmlMultiConfiguration.java b/ehcache-xml/src/main/java/org/ehcache/xml/multi/XmlMultiConfiguration.java new file mode 100644 index 0000000000..c87789b5ce --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/multi/XmlMultiConfiguration.java @@ -0,0 +1,517 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.multi; + +import org.ehcache.config.Configuration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.multi.model.Configurations; +import org.ehcache.xml.multi.model.ObjectFactory; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.xml.sax.SAXException; + +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBException; +import javax.xml.bind.Marshaller; +import javax.xml.bind.Unmarshaller; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.bind.annotation.XmlSchema; +import javax.xml.bind.helpers.DefaultValidationEventHandler; +import javax.xml.namespace.QName; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.transform.TransformerException; +import javax.xml.transform.stream.StreamSource; +import javax.xml.validation.Schema; +import java.io.IOException; +import java.net.URL; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static java.util.Collections.unmodifiableSet; +import static java.util.stream.Collectors.toList; +import static java.util.stream.Collectors.toMap; +import static org.ehcache.xml.ConfigurationParser.discoverSchema; +import static org.ehcache.xml.ConfigurationParser.documentBuilder; +import static org.ehcache.xml.ConfigurationParser.documentToText; +import static org.ehcache.xml.ConfigurationParser.urlToText; +import static org.ehcache.xml.XmlConfiguration.CORE_SCHEMA_URL; + +/** + * A collection of multiple Ehcache configurations. + */ +public class XmlMultiConfiguration { + + private static final URL MULTI_SCHEMA_URL = XmlMultiConfiguration.class.getResource("/ehcache-multi.xsd"); + private static final QName MULTI_SCHEMA_ROOT_NAME = new QName( + Configurations.class.getPackage().getAnnotation(XmlSchema.class).namespace(), + Configurations.class.getAnnotation(XmlRootElement.class).name()); + + private final Map configurations; + + private final Document document; + private final String renderedDocument; + + @SuppressWarnings("unchecked") + private XmlMultiConfiguration(URL url, BiFunction configParser) throws XmlConfigurationException { + try { + Schema schema = discoverSchema(new StreamSource(CORE_SCHEMA_URL.openStream()), new StreamSource(MULTI_SCHEMA_URL.openStream())); + DocumentBuilder domBuilder = documentBuilder(schema); + this.document = domBuilder.parse(url.toExternalForm()); + this.renderedDocument = urlToText(url, document.getInputEncoding()); + + Element rootElement = document.getDocumentElement(); + + QName rootName = new QName(rootElement.getNamespaceURI(), rootElement.getLocalName()); + if (!MULTI_SCHEMA_ROOT_NAME.equals(rootName)) { + throw new XmlConfigurationException("Expecting " + MULTI_SCHEMA_ROOT_NAME + " element; found " + rootName); + } + + JAXBContext jaxbContext = JAXBContext.newInstance(Configurations.class); + Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); + unmarshaller.setEventHandler(new DefaultValidationEventHandler()); + Configurations value = unmarshaller.unmarshal(rootElement, Configurations.class).getValue(); + + this.configurations = value.getConfiguration().stream().collect(toMap(Configurations.Configuration::getIdentity, c -> { + + Element configuration = c.getConfig(); + if (configuration != null) { + Document configDoc = domBuilder.newDocument(); + configDoc.appendChild(configDoc.importNode(configuration, true)); + return new SingleConfig(configParser.apply(c.getIdentity(), configDoc)); + } else { + return new VariantConfig(c.getVariant().stream() + .collect(toMap(Configurations.Configuration.Variant::getType, v -> { + Document configDoc = domBuilder.newDocument(); + configDoc.appendChild(configDoc.importNode(v.getConfig(), true)); + return configParser.apply(c.getIdentity(), configDoc); + }))); + } + })); + } catch (ParserConfigurationException | SAXException | IOException | JAXBException e) { + throw new XmlConfigurationException(e); + } + } + + private XmlMultiConfiguration(Map configurations) { + try { + Schema schema = discoverSchema(new StreamSource(CORE_SCHEMA_URL.openStream()), new StreamSource(MULTI_SCHEMA_URL.openStream())); + + this.configurations = configurations; + + ObjectFactory objectFactory = new ObjectFactory(); + Configurations jaxb = objectFactory.createConfigurations().withConfiguration(configurations.entrySet().stream().map( + entry -> entry.getValue().unparse(objectFactory, objectFactory.createConfigurationsConfiguration().withIdentity(entry.getKey()))).collect(toList())); + + JAXBContext jaxbContext = JAXBContext.newInstance(Configurations.class); + Marshaller marshaller = jaxbContext.createMarshaller(); + marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); + marshaller.setSchema(schema); + + this.document = documentBuilder(schema).newDocument(); + marshaller.marshal(jaxb, document); + this.renderedDocument = documentToText(document); + } catch (JAXBException | IOException | TransformerException | ParserConfigurationException | SAXException e) { + throw new XmlConfigurationException(e); + } + } + + /** + * Retrieve the singular configuration for {@code identity}. + *

                                      + * If the given identity is associated with multiple variant configurations then an {@code IllegalStateException} will + * be thrown. In this case the {@link #configuration(String, String)} method must be used to select a specific + * variant. + * + * @param identity identity to retrieve + * @return the configuration for the given identity; {@code null} if the identity is not in this configuration + * @throws IllegalArgumentException if the identity is associated with multiple variant configurations + */ + public Configuration configuration(String identity) throws IllegalArgumentException { + Config variants = configurations.get(identity); + if (variants == null) { + return null; + } else { + return variants.configuration(); + } + } + + /** + * Retrieve the singular configuration for {@code identity} and {@code variant}. + *

                                      + * If the given identity is associated only with a singular configuration then that configuration will be returned for + * all variants. + * + * @param identity identity to retrieve + * @param variant variant to retrieve + * @return the configuration for the given identity; {@code null} if the identity is not in this configuration + * @throws IllegalArgumentException if the given variant does not exist + */ + public Configuration configuration(String identity, String variant) { + Config config = configurations.get(identity); + if (config == null) { + return null; + } else { + return config.configuration(variant); + } + } + + /** + * Return the set of variants defined for the given configuration. + *

                                      + * If the given identity does not exist then an {@code IllegalArgumentException} is thrown. If the given identity is + * not variant-ed then an empty set is returned. + * + * @return the set of variants; possibly empty. + * @throws IllegalArgumentException if the identity does not exist + */ + public Set variants(String identity) throws IllegalArgumentException { + Config config = configurations.get(identity); + if (config == null) { + throw new IllegalArgumentException("Identity " + identity + " does not exist."); + } else { + return config.variants(); + } + } + + /** + * Return the set of identities defined in this multi-configuration. + * + * @return the defined identity set + */ + public Set identities() { + return unmodifiableSet(configurations.keySet()); + } + + /** + * Return this configuration as an XML {@link org.w3c.dom.Document}. + * + * @return configuration XML DOM. + */ + public Document asDocument() { + return document; + } + + /** + * Return this configuration as a rendered XML string. + * + * @return configuration XML string + */ + public String asRenderedDocument() { + return renderedDocument; + } + + @Override + public String toString() { + return asRenderedDocument(); + } + + private static Element unparseEhcacheConfiguration(Configuration config) { + if (config instanceof XmlConfiguration) { + return ((XmlConfiguration) config).asDocument().getDocumentElement(); + } else { + return new XmlConfiguration(config).asDocument().getDocumentElement(); + } + } + + private interface Config { + + Configuration configuration() throws IllegalStateException; + + Configuration configuration(String variant); + + Configurations.Configuration unparse(ObjectFactory factory, Configurations.Configuration container); + + Set variants(); + } + + private static class SingleConfig implements Config { + + private final Configuration config; + + private SingleConfig(Configuration config) { + this.config = config; + } + + @Override + public Configuration configuration() { + return config; + } + + @Override + public Configuration configuration(String variant) { + return configuration(); + } + + @Override + public Configurations.Configuration unparse(ObjectFactory factory, Configurations.Configuration container) { + return container.withConfig(unparseEhcacheConfiguration(config)); + } + + @Override + public Set variants() { + return emptySet(); + } + } + + private static class VariantConfig implements Config { + + private final Map configs; + + private VariantConfig(Map configs) { + this.configs = configs; + } + + @Override + public Configuration configuration() { + switch (configs.size()) { + case 0: + return null; + case 1: + return configs.values().iterator().next(); + default: + throw new IllegalStateException("Please choose a variant: " + configs.keySet()); + } + } + + @Override + public Configuration configuration(String variant) { + Configuration configuration = configs.get(variant); + if (configuration == null) { + throw new IllegalArgumentException("Please choose a valid variant: " + configs.keySet()); + } else { + return configuration; + } + } + + @Override + public Configurations.Configuration unparse(ObjectFactory factory, Configurations.Configuration container) { + return container.withVariant(configs.entrySet().stream() + .map(v -> factory.createConfigurationsConfigurationVariant() + .withType(v.getKey()) + .withConfig(unparseEhcacheConfiguration(v.getValue()))) + .collect(toList())); + } + + @Override + public Set variants() { + return unmodifiableSet(configs.keySet()); + } + } + + /** + * Create a builder seeded from an XML configuration. + *

                                      + * Enclosed configurations will parsed using {@link XmlConfiguration#XmlConfiguration(Document)}. + * + * @param xml xml seed resource + * @return a builder seeded with the xml configuration + * @see XmlConfiguration#XmlConfiguration(Document) + */ + public static Builder from(URL xml) { + return from(new XmlMultiConfiguration(xml, (identity, dom) -> new XmlConfiguration(dom))); + } + + /** + * Create a builder seeded from an XML configuration using the supplier class loader. + *

                                      + * Enclosed configurations will parsed using {@link XmlConfiguration#XmlConfiguration(Document, ClassLoader)}, which + * will be passed the classloader provided to this method. + * + * @param xml xml seed resource + * @param classLoader loader for the cache managers + * @return a builder seeded with the xml configuration + * @see XmlConfiguration#XmlConfiguration(Document, ClassLoader) + */ + public static Builder from(URL xml, ClassLoader classLoader) { + return from(new XmlMultiConfiguration(xml, (identity, dom) -> new XmlConfiguration(dom, classLoader))); + } + + /** + * Create a builder seeded from an existing {@code XmlMultiConfiguration}. + * + * @param config existing configuration seed + * @return a builder seeded with the xml configuration + */ + public static Builder from(XmlMultiConfiguration config) { + return new Builder() { + @Override + public Builder withManager(String identity, Configuration configuration) { + Map configurations = new HashMap<>(config.configurations); + configurations.put(identity, new SingleConfig(configuration)); + return from(new XmlMultiConfiguration(configurations)); + } + + @Override + public Builder withoutManager(String identity) { + Map configurations = config.configurations; + configurations.remove(identity); + return from(new XmlMultiConfiguration(configurations)); + } + + @Override + public Variant withManager(String identity) { + Map variants = new HashMap<>(); + + Config current = config.configurations.get(identity); + if (current instanceof VariantConfig) { + variants.putAll(((VariantConfig) current).configs); + } else if (current != null) { + throw new IllegalStateException("Existing non-variant configuration cannot be replaced - it must be removed first."); + } + + return new Variant() { + @Override + public Variant withoutVariant(String variant) { + variants.remove(variant); + return this; + } + + @Override + public Variant variant(String variant, Configuration configuration) { + variants.put(variant, configuration); + return this; + } + + @Override + public Builder withoutManager(String identity) { + return from(build()).withoutManager(identity); + } + + @Override + public Builder withManager(String identity, Configuration configuration) { + return from(build()).withManager(identity, configuration); + } + + @Override + public Variant withManager(String identity) { + return from(build()).withManager(identity); + } + + @Override + public XmlMultiConfiguration build() { + Map configurations = new HashMap<>(config.configurations); + configurations.put(identity, new VariantConfig(variants)); + return new XmlMultiConfiguration(configurations); + } + }; + } + + @Override + public XmlMultiConfiguration build() { + return config; + } + }; + } + + /** + * Create an initially empty builder. + * + * @return an empty builder + */ + public static Builder fromNothing() { + return from(new XmlMultiConfiguration(emptyMap())); + } + + /** + * An {@code XmlMultiConfiguration} builder. + */ + public interface Builder { + + /** + * Remove the configuration with the given identity + * + * @param identity configuration to remove + * @return a new builder instance + */ + Builder withoutManager(String identity); + + /** + * Add a new configuration with the given identity + * + * @param identity configuration identifier + * @param configuration configuration instance + * @return a new builder instance + */ + Builder withManager(String identity, Configuration configuration); + + /** + * Add a new configuration with the given identity built from the given builder. + * + * @param identity configuration identifier + * @param builder configuration builder + * @return a new builder instance + */ + default Builder withManager(String identity, org.ehcache.config.Builder builder) { + return withManager(identity, builder.build()); + } + + /** + * Add a new manager with variant configurations. + * + * @param identity configuration to add + * @return a new variant configuration builder + */ + Variant withManager(String identity); + + /** + * Build a new {@code XmlMultiConfiguration}. + * + * @return a new {@code XmlMultiConfiguration} + */ + XmlMultiConfiguration build(); + } + + /** + * A variant configuration builder. + */ + public interface Variant extends Builder { + + /** + * Remove the given configuration variant. + * + * @param variant variant to remove + * @return a new builder instance + */ + Variant withoutVariant(String variant); + + /** + * Add a new variant configuration + * + * @param variant configuration variant + * @param configuration configuration instance + * @return a new builder instance + */ + Variant variant(String variant, Configuration configuration); + + /** + * Add a new variant configuration built from the given builder. + * + * @param variant configuration variant + * @param builder configuration builder + * @return a new builder instance + */ + default Variant variant(String variant, org.ehcache.config.Builder builder) { + return variant(variant, builder.build()); + } + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/provider/CacheEventDispatcherFactoryConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/provider/CacheEventDispatcherFactoryConfigurationParser.java new file mode 100644 index 0000000000..706c1f7f2c --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/provider/CacheEventDispatcherFactoryConfigurationParser.java @@ -0,0 +1,28 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.impl.config.event.CacheEventDispatcherFactoryConfiguration; +import org.ehcache.xml.model.ConfigType; + +public class CacheEventDispatcherFactoryConfigurationParser extends ThreadPoolServiceCreationConfigurationParser { + + public CacheEventDispatcherFactoryConfigurationParser() { + super(CacheEventDispatcherFactoryConfiguration.class, ConfigType::getEventDispatch, ConfigType::setEventDispatch, + CacheEventDispatcherFactoryConfiguration::new, CacheEventDispatcherFactoryConfiguration::getThreadPoolAlias); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/provider/CacheManagerPersistenceConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/provider/CacheManagerPersistenceConfigurationParser.java new file mode 100644 index 0000000000..6d9cfef8a3 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/provider/CacheManagerPersistenceConfigurationParser.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.PersistenceType; + +import java.io.File; + +public class CacheManagerPersistenceConfigurationParser + extends SimpleCoreServiceCreationConfigurationParser { + + public CacheManagerPersistenceConfigurationParser() { + super(CacheManagerPersistenceConfiguration.class, + ConfigType::getPersistence, ConfigType::setPersistence, + config -> new CacheManagerPersistenceConfiguration(new File(config.getDirectory())), + config -> new PersistenceType().withDirectory(config.getRootDirectory().toString())); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/provider/DefaultCopyProviderConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/provider/DefaultCopyProviderConfigurationParser.java new file mode 100644 index 0000000000..a9cd85e7e4 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/provider/DefaultCopyProviderConfigurationParser.java @@ -0,0 +1,47 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; +import org.ehcache.spi.copy.Copier; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.CopierType; + +import static java.util.stream.Collectors.toList; +import static org.ehcache.xml.XmlConfiguration.getClassForName; + +public class DefaultCopyProviderConfigurationParser + extends SimpleCoreServiceCreationConfigurationParser { + + @SuppressWarnings("unchecked") + public DefaultCopyProviderConfigurationParser() { + super(DefaultCopyProviderConfiguration.class, + ConfigType::getDefaultCopiers, ConfigType::setDefaultCopiers, + (config, loader) -> { + DefaultCopyProviderConfiguration configuration = new DefaultCopyProviderConfiguration(); + for (CopierType.Copier copier : config.getCopier()) { + configuration.addCopierFor(getClassForName(copier.getType(), loader), (Class) getClassForName(copier.getValue(), loader)); + } + return configuration; + }, + config -> new CopierType() + .withCopier(config.getDefaults().entrySet().stream().map(entry -> new CopierType.Copier() + .withType(entry.getKey().getName()) + .withValue(entry.getValue().getClazz().getName())).collect(toList())) + ); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/provider/DefaultSerializationProviderConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/provider/DefaultSerializationProviderConfigurationParser.java new file mode 100644 index 0000000000..7838ad99bb --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/provider/DefaultSerializationProviderConfigurationParser.java @@ -0,0 +1,49 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.SerializerType; + +import java.util.List; + +import static java.util.stream.Collectors.toList; +import static org.ehcache.xml.XmlConfiguration.getClassForName; + +public class DefaultSerializationProviderConfigurationParser + extends SimpleCoreServiceCreationConfigurationParser { + + @SuppressWarnings("unchecked") + public DefaultSerializationProviderConfigurationParser() { + super(DefaultSerializationProviderConfiguration.class, + ConfigType::getDefaultSerializers, ConfigType::setDefaultSerializers, + (config, loader) -> { + DefaultSerializationProviderConfiguration configuration = new DefaultSerializationProviderConfiguration(); + for (SerializerType.Serializer serializer : config.getSerializer()) { + configuration.addSerializerFor(getClassForName(serializer.getType(), loader), (Class) getClassForName(serializer.getValue(), loader)); + } + return configuration; + }, + config -> new SerializerType() + .withSerializer(config.getDefaultSerializers().entrySet().stream().map(entry -> new SerializerType.Serializer() + .withType(entry.getKey().getName()) + .withValue(entry.getValue().getName())).collect(toList())) + ); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/provider/DefaultSizeOfEngineProviderConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/provider/DefaultSizeOfEngineProviderConfigurationParser.java new file mode 100644 index 0000000000..ad77533953 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/provider/DefaultSizeOfEngineProviderConfigurationParser.java @@ -0,0 +1,47 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.MemoryType; +import org.ehcache.xml.model.MemoryUnit; +import org.ehcache.xml.model.SizeOfEngineLimits; +import org.ehcache.xml.model.SizeofType; + +import java.math.BigInteger; + +public class DefaultSizeOfEngineProviderConfigurationParser + extends SimpleCoreServiceCreationConfigurationParser { + + public DefaultSizeOfEngineProviderConfigurationParser() { + super(DefaultSizeOfEngineProviderConfiguration.class, + ConfigType::getHeapStore, ConfigType::setHeapStore, + config -> { + SizeOfEngineLimits sizeOfEngineLimits = new SizeOfEngineLimits(config); + return new DefaultSizeOfEngineProviderConfiguration(sizeOfEngineLimits.getMaxObjectSize(), + sizeOfEngineLimits.getUnit(), sizeOfEngineLimits.getMaxObjectGraphSize()); + }, + config -> new SizeofType() + .withMaxObjectGraphSize(new SizeofType.MaxObjectGraphSize().withValue(BigInteger.valueOf(config.getMaxObjectGraphSize()))) + .withMaxObjectSize(new MemoryType() + .withValue(BigInteger.valueOf(config.getMaxObjectSize())) + .withUnit(MemoryUnit.fromValue(config.getUnit().toString())) + ) + ); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/provider/OffHeapDiskStoreProviderConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/provider/OffHeapDiskStoreProviderConfigurationParser.java new file mode 100644 index 0000000000..c4e483b758 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/provider/OffHeapDiskStoreProviderConfigurationParser.java @@ -0,0 +1,31 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.impl.config.event.CacheEventDispatcherFactoryConfiguration; +import org.ehcache.impl.config.store.disk.OffHeapDiskStoreProviderConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.ThreadPoolReferenceType; + +public class OffHeapDiskStoreProviderConfigurationParser extends ThreadPoolServiceCreationConfigurationParser { + + + public OffHeapDiskStoreProviderConfigurationParser() { + super(OffHeapDiskStoreProviderConfiguration.class, ConfigType::getDiskStore, ConfigType::setDiskStore, + OffHeapDiskStoreProviderConfiguration::new, OffHeapDiskStoreProviderConfiguration::getThreadPoolAlias); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/provider/PooledExecutionServiceConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/provider/PooledExecutionServiceConfigurationParser.java new file mode 100644 index 0000000000..3fc1a4b179 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/provider/PooledExecutionServiceConfigurationParser.java @@ -0,0 +1,62 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.ThreadPoolsType; + +import java.math.BigInteger; +import java.util.List; + +import static java.util.stream.Collectors.toList; + +public class PooledExecutionServiceConfigurationParser + extends SimpleCoreServiceCreationConfigurationParser { + + public PooledExecutionServiceConfigurationParser() { + super(PooledExecutionServiceConfiguration.class, + ConfigType::getThreadPools, ConfigType::setThreadPools, + config -> { + PooledExecutionServiceConfiguration poolsConfiguration = new PooledExecutionServiceConfiguration(); + for (ThreadPoolsType.ThreadPool pool : config.getThreadPool()) { + if (pool.isDefault()) { + poolsConfiguration.addDefaultPool(pool.getAlias(), pool.getMinSize().intValue(), pool.getMaxSize().intValue()); + } else { + poolsConfiguration.addPool(pool.getAlias(), pool.getMinSize().intValue(), pool.getMaxSize().intValue()); + } + } + return poolsConfiguration; + }, + config -> { + List threadPools = config.getPoolConfigurations().entrySet().stream().map(entry -> { + PooledExecutionServiceConfiguration.PoolConfiguration poolConfig = entry.getValue(); + String alias = entry.getKey(); + ThreadPoolsType.ThreadPool threadPool = new ThreadPoolsType.ThreadPool() + .withAlias(alias) + .withMinSize(BigInteger.valueOf(poolConfig.minSize())) + .withMaxSize(BigInteger.valueOf(poolConfig.maxSize())); + if (alias.equals(config.getDefaultPoolAlias())) { + threadPool.setDefault(true); + } + return threadPool; + }).collect(toList()); + return new ThreadPoolsType().withThreadPool(threadPools); + } + ); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/provider/SimpleCoreServiceCreationConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/provider/SimpleCoreServiceCreationConfigurationParser.java new file mode 100644 index 0000000000..f9e2c01ca7 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/provider/SimpleCoreServiceCreationConfigurationParser.java @@ -0,0 +1,101 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.config.Configuration; +import org.ehcache.config.FluentConfigurationBuilder; +import org.ehcache.core.spi.service.ServiceUtils; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.CoreServiceCreationConfigurationParser; +import org.ehcache.xml.model.ConfigType; + +import java.util.function.BiConsumer; +import java.util.function.BinaryOperator; +import java.util.function.Function; + +class SimpleCoreServiceCreationConfigurationParser> implements CoreServiceCreationConfigurationParser { + + private final Class configType; + + private final Function getter; + private final BiConsumer setter; + + private final Parser parser; + private final Function unparser; + + private final BinaryOperator merger; + + SimpleCoreServiceCreationConfigurationParser(Class configType, + Function getter, BiConsumer setter, + Function parser, Function unparser) { + this(configType, getter, setter, (config, loader) -> parser.apply(config), unparser, (a, b) -> { throw new IllegalStateException(); }); + } + + SimpleCoreServiceCreationConfigurationParser(Class configType, Function getter, BiConsumer setter, + Function parser, Function unparser, BinaryOperator merger) { + this(configType, getter, setter, (config, loader) -> parser.apply(config), unparser, merger); + } + + SimpleCoreServiceCreationConfigurationParser(Class configType, + Function getter, BiConsumer setter, + Parser parser, Function unparser) { + this(configType, getter, setter, parser, unparser, (a, b) -> { throw new IllegalStateException(); }); + } + + SimpleCoreServiceCreationConfigurationParser(Class configType, + Function getter, BiConsumer setter, + Parser parser, Function unparser, BinaryOperator merger) { + this.configType = configType; + this.getter = getter; + this.setter = setter; + this.parser = parser; + this.unparser = unparser; + this.merger = merger; + } + + @Override + public final FluentConfigurationBuilder parseServiceCreationConfiguration(ConfigType root, ClassLoader classLoader, FluentConfigurationBuilder builder) throws ClassNotFoundException { + T config = getter.apply(root); + if (config == null) { + return builder; + } else { + return builder.withService(parser.parse(config, classLoader)); + } + } + + @Override + public ConfigType unparseServiceCreationConfiguration(Configuration configuration, ConfigType configType) { + U config = ServiceUtils.findSingletonAmongst(this.configType, configuration.getServiceCreationConfigurations()); + if (config == null) { + return configType; + } else { + T foo = getter.apply(configType); + if (foo == null) { + setter.accept(configType, unparser.apply(config)); + } else { + setter.accept(configType, merger.apply(foo, unparser.apply(config))); + } + return configType; + } + } + + @FunctionalInterface + interface Parser> { + + U parse(T t, ClassLoader classLoader) throws ClassNotFoundException; + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/provider/ThreadPoolServiceCreationConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/provider/ThreadPoolServiceCreationConfigurationParser.java new file mode 100644 index 0000000000..c039471933 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/provider/ThreadPoolServiceCreationConfigurationParser.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.ThreadPoolReferenceType; + +import java.util.function.BiConsumer; +import java.util.function.Function; + +class ThreadPoolServiceCreationConfigurationParser> extends SimpleCoreServiceCreationConfigurationParser { + + ThreadPoolServiceCreationConfigurationParser(Class configType, + Function getter, BiConsumer setter, + Function parser, Function unparser) { + super(configType, getter, setter, config -> parser.apply(config.getThreadPool()), + config -> new ThreadPoolReferenceType().withThreadPool(unparser.apply(config))); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/provider/WriteBehindProviderConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/provider/WriteBehindProviderConfigurationParser.java new file mode 100644 index 0000000000..f2e0568b26 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/provider/WriteBehindProviderConfigurationParser.java @@ -0,0 +1,29 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.impl.config.loaderwriter.writebehind.WriteBehindProviderConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.ThreadPoolReferenceType; + +public class WriteBehindProviderConfigurationParser extends ThreadPoolServiceCreationConfigurationParser { + + public WriteBehindProviderConfigurationParser() { + super(WriteBehindProviderConfiguration.class, ConfigType::getWriteBehind, ConfigType::setWriteBehind, + WriteBehindProviderConfiguration::new, WriteBehindProviderConfiguration::getThreadPoolAlias); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultCacheEventDispatcherConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultCacheEventDispatcherConfigurationParser.java new file mode 100644 index 0000000000..7dc1ed7299 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultCacheEventDispatcherConfigurationParser.java @@ -0,0 +1,39 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.impl.config.event.DefaultCacheEventDispatcherConfiguration; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; +import org.ehcache.xml.model.ListenersConfig; +import org.ehcache.xml.model.ListenersType; + +import static java.util.Optional.ofNullable; + +public class DefaultCacheEventDispatcherConfigurationParser + extends SimpleCoreServiceConfigurationParser { + + public DefaultCacheEventDispatcherConfigurationParser() { + super(DefaultCacheEventDispatcherConfiguration.class, + CacheTemplate::listenersConfig, + config -> ofNullable(config.threadPool()).map(DefaultCacheEventDispatcherConfiguration::new).orElse(null), + CacheType::getListeners, CacheType::setListeners, + config -> new ListenersType().withDispatcherThreadPool(config.getThreadPoolAlias()), + (initial, additional) -> initial.withDispatcherThreadPool(additional.getDispatcherThreadPool())); + } +} + diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultCacheEventListenerConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultCacheEventListenerConfigurationParser.java new file mode 100644 index 0000000000..4cd2eeae5b --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultCacheEventListenerConfigurationParser.java @@ -0,0 +1,100 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; +import org.ehcache.event.CacheEventListener; +import org.ehcache.event.EventFiring; +import org.ehcache.event.EventOrdering; +import org.ehcache.impl.config.event.DefaultCacheEventListenerConfiguration; +import org.ehcache.xml.CoreServiceConfigurationParser; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; +import org.ehcache.xml.model.EventFiringType; +import org.ehcache.xml.model.EventOrderingType; +import org.ehcache.xml.model.EventType; +import org.ehcache.xml.model.ListenersConfig; +import org.ehcache.xml.model.ListenersType; + +import java.util.Collection; +import java.util.Set; + +import static java.util.stream.Collectors.toSet; +import static org.ehcache.core.spi.service.ServiceUtils.findAmongst; +import static org.ehcache.xml.XmlConfiguration.getClassForName; + +public class DefaultCacheEventListenerConfigurationParser implements CoreServiceConfigurationParser { + + @Override + public CacheConfigurationBuilder parseServiceConfiguration(CacheTemplate cacheDefinition, ClassLoader cacheClassLoader, + CacheConfigurationBuilder cacheBuilder) throws ClassNotFoundException { + ListenersConfig listenersConfig = cacheDefinition.listenersConfig(); + if(listenersConfig != null && listenersConfig.listeners() != null) { + for (ListenersType.Listener listener : listenersConfig.listeners()) { + Set eventSetToFireOn = listener.getEventsToFireOn().stream() + .map(EventType::value).map(org.ehcache.event.EventType::valueOf).collect(toSet()); + @SuppressWarnings("unchecked") + Class> cacheEventListenerClass = (Class>) getClassForName(listener.getClazz(), cacheClassLoader); + CacheEventListenerConfigurationBuilder listenerBuilder = CacheEventListenerConfigurationBuilder + .newEventListenerConfiguration(cacheEventListenerClass, eventSetToFireOn) + .firingMode(EventFiring.valueOf(listener.getEventFiringMode().value())) + .eventOrdering(EventOrdering.valueOf(listener.getEventOrderingMode().value())); + cacheBuilder = cacheBuilder.withService(listenerBuilder); + } + } + + return cacheBuilder; + } + + @Override + public CacheType unparseServiceConfiguration(CacheConfiguration cacheConfiguration, CacheType cacheType) { + Collection serviceConfigs = + findAmongst(DefaultCacheEventListenerConfiguration.class, cacheConfiguration.getServiceConfigurations()); + + if (!serviceConfigs.isEmpty()) { + ListenersType listenersType = cacheType.getListeners(); + if (listenersType == null) { + listenersType = new ListenersType(); + cacheType.setListeners(listenersType); + } + + Set listeners = serviceConfigs.stream().map(serviceConfig -> { + ListenersType.Listener listener = new ListenersType.Listener(); + if(serviceConfig.getInstance() == null) { + return listener.withClazz(serviceConfig.getClazz().getName()) + .withEventFiringMode(EventFiringType.fromValue(serviceConfig.firingMode().name())) + .withEventOrderingMode(EventOrderingType.fromValue(serviceConfig.orderingMode().name())) + .withEventsToFireOn(serviceConfig.fireOn() + .stream() + .map(eventType -> EventType.fromValue(eventType.name())) + .collect(toSet())); + } else { + throw new XmlConfigurationException("XML translation for instance based initialization for DefaultCacheEventListenerConfiguration is not supported"); + + } + }).collect(toSet()); + + cacheType.withListeners(listenersType.withListener(listeners)); + } + + return cacheType; + } +} + diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultCacheLoaderWriterConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultCacheLoaderWriterConfigurationParser.java new file mode 100644 index 0000000000..7144111c85 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultCacheLoaderWriterConfigurationParser.java @@ -0,0 +1,45 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheLoaderWriterType; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; + +import static org.ehcache.xml.XmlConfiguration.getClassForName; + +public class DefaultCacheLoaderWriterConfigurationParser + extends SimpleCoreServiceConfigurationParser { + + @SuppressWarnings("unchecked") + public DefaultCacheLoaderWriterConfigurationParser() { + super(DefaultCacheLoaderWriterConfiguration.class, + CacheTemplate::loaderWriter, + (config, loader) -> new DefaultCacheLoaderWriterConfiguration((Class>) getClassForName(config, loader)), + CacheType::getLoaderWriter, CacheType::setLoaderWriter, + config -> { + if(config.getInstance() == null) { + return new CacheLoaderWriterType().withClazz(config.getClazz().getName()); + } else { + throw new XmlConfigurationException("XML translation for instance based initialization for DefaultCacheLoaderWriterConfiguration is not supported"); + } + }); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultCopierConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultCopierConfigurationParser.java new file mode 100644 index 0000000000..8bf44511be --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultCopierConfigurationParser.java @@ -0,0 +1,68 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.impl.config.copy.DefaultCopierConfiguration; +import org.ehcache.xml.CoreServiceConfigurationParser; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; + +import java.util.Collection; + +import static org.ehcache.core.spi.service.ServiceUtils.findAmongst; +import static org.ehcache.xml.XmlConfiguration.getClassForName; + +public class DefaultCopierConfigurationParser implements CoreServiceConfigurationParser { + + @Override @SuppressWarnings({"unchecked", "rawtypes"}) + public CacheConfigurationBuilder parseServiceConfiguration(CacheTemplate cacheDefinition, ClassLoader cacheClassLoader, + CacheConfigurationBuilder cacheBuilder) throws ClassNotFoundException { + if (cacheDefinition.keyCopier() != null) { + Class keyCopier = getClassForName(cacheDefinition.keyCopier(), cacheClassLoader); + cacheBuilder = cacheBuilder.withService(new DefaultCopierConfiguration(keyCopier, DefaultCopierConfiguration.Type.KEY)); + } + + if (cacheDefinition.valueCopier() != null) { + Class valueCopier = getClassForName(cacheDefinition.valueCopier(), cacheClassLoader); + cacheBuilder = cacheBuilder.withService(new DefaultCopierConfiguration(valueCopier, DefaultCopierConfiguration.Type.VALUE)); + } + + return cacheBuilder; + } + + @Override + @SuppressWarnings("rawtypes") + public CacheType unparseServiceConfiguration(CacheConfiguration cacheConfiguration, CacheType cacheType) { + Collection copierConfigs = + findAmongst(DefaultCopierConfiguration.class, cacheConfiguration.getServiceConfigurations()); + for (DefaultCopierConfiguration copierConfig : copierConfigs) { + if(copierConfig.getInstance() == null) { + if (copierConfig.getType() == DefaultCopierConfiguration.Type.KEY) { + cacheType.getKeyType().setCopier(copierConfig.getClazz().getName()); + } else { + cacheType.getValueType().setCopier(copierConfig.getClazz().getName()); + } + } else { + throw new XmlConfigurationException("XML translation for instance based initialization for DefaultCopierConfiguration is not supported"); + } + } + return cacheType; + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultResilienceStrategyConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultResilienceStrategyConfigurationParser.java new file mode 100644 index 0000000000..6aa97a81fc --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultResilienceStrategyConfigurationParser.java @@ -0,0 +1,43 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyConfiguration; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; + +import static org.ehcache.xml.XmlConfiguration.getClassForName; + +public class DefaultResilienceStrategyConfigurationParser + extends SimpleCoreServiceConfigurationParser { + + @SuppressWarnings({"unchecked", "rawtypes"}) + public DefaultResilienceStrategyConfigurationParser() { + super(DefaultResilienceStrategyConfiguration.class, + CacheTemplate::resilienceStrategy, + (config, loader) -> new DefaultResilienceStrategyConfiguration((Class) getClassForName(config, loader)), + CacheType::getResilience, CacheType::setResilience, config -> { + if(config.getInstance() == null) { + return config.getClazz().getName(); + } else { + throw new XmlConfigurationException("XML translation for instance based initialization for DefaultResilienceStrategyConfiguration is not supported"); + } + }); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultSerializerConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultSerializerConfigurationParser.java new file mode 100644 index 0000000000..1ca1c1bae8 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultSerializerConfigurationParser.java @@ -0,0 +1,67 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; +import org.ehcache.xml.CoreServiceConfigurationParser; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; + +import java.util.Collection; + +import static org.ehcache.core.spi.service.ServiceUtils.findAmongst; +import static org.ehcache.xml.XmlConfiguration.getClassForName; + +public class DefaultSerializerConfigurationParser implements CoreServiceConfigurationParser { + + @Override @SuppressWarnings({"rawtypes", "unchecked"}) + public CacheConfigurationBuilder parseServiceConfiguration(CacheTemplate cacheDefinition, ClassLoader cacheClassLoader, + CacheConfigurationBuilder cacheBuilder) throws ClassNotFoundException { + if (cacheDefinition.keySerializer() != null) { + Class keySerializer = getClassForName(cacheDefinition.keySerializer(), cacheClassLoader); + cacheBuilder = cacheBuilder.withService(new DefaultSerializerConfiguration(keySerializer, DefaultSerializerConfiguration.Type.KEY)); + } + + if (cacheDefinition.valueSerializer() != null) { + Class valueSerializer = getClassForName(cacheDefinition.valueSerializer(), cacheClassLoader); + cacheBuilder = cacheBuilder.withService(new DefaultSerializerConfiguration(valueSerializer, DefaultSerializerConfiguration.Type.VALUE)); + } + + return cacheBuilder; + } + + @Override @SuppressWarnings({"rawtypes", "unchecked"}) + public CacheType unparseServiceConfiguration(CacheConfiguration cacheConfiguration, CacheType cacheType) { + Collection serializerConfigs = + findAmongst(DefaultSerializerConfiguration.class, cacheConfiguration.getServiceConfigurations()); + for (DefaultSerializerConfiguration serializerConfig : serializerConfigs) { + if(serializerConfig.getInstance() == null) { + if (serializerConfig.getType() == DefaultSerializerConfiguration.Type.KEY) { + cacheType.getKeyType().setSerializer(serializerConfig.getClazz().getName()); + } else { + cacheType.getValueType().setSerializer(serializerConfig.getClazz().getName()); + } + } else { + throw new XmlConfigurationException("XML translation for instance based initialization for DefaultSerializerConfiguration is not supported"); + } + } + return cacheType; + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultSizeOfEngineConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultSizeOfEngineConfigurationParser.java new file mode 100644 index 0000000000..93c795e712 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultSizeOfEngineConfigurationParser.java @@ -0,0 +1,44 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; +import org.ehcache.xml.model.MemoryType; +import org.ehcache.xml.model.MemoryUnit; +import org.ehcache.xml.model.SizeOfEngineLimits; +import org.ehcache.xml.model.SizeofType; + +import java.math.BigInteger; + +public class DefaultSizeOfEngineConfigurationParser + extends SimpleCoreServiceConfigurationParser { + + public DefaultSizeOfEngineConfigurationParser() { + super(DefaultSizeOfEngineConfiguration.class, + CacheTemplate::heapStoreSettings, + config -> new DefaultSizeOfEngineConfiguration(config.getMaxObjectSize(), config.getUnit(), config.getMaxObjectGraphSize()), + CacheType::getHeapStoreSettings, CacheType::setHeapStoreSettings, + config -> new SizeofType() + .withMaxObjectGraphSize(new SizeofType.MaxObjectGraphSize().withValue(BigInteger.valueOf(config.getMaxObjectGraphSize()))) + .withMaxObjectSize(new MemoryType() + .withValue(BigInteger.valueOf(config.getMaxObjectSize())) + .withUnit(MemoryUnit.fromValue(config.getUnit().toString())) + )); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultWriteBehindConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultWriteBehindConfigurationParser.java new file mode 100644 index 0000000000..a42f0f4da2 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/service/DefaultWriteBehindConfigurationParser.java @@ -0,0 +1,78 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.builders.WriteBehindConfigurationBuilder; +import org.ehcache.config.builders.WriteBehindConfigurationBuilder.BatchedWriteBehindConfigurationBuilder; +import org.ehcache.config.builders.WriteBehindConfigurationBuilder.UnBatchedWriteBehindConfigurationBuilder; +import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; +import org.ehcache.xml.model.BaseCacheType; +import org.ehcache.xml.model.CacheLoaderWriterType; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.TimeTypeWithPropSubst; + +import java.math.BigInteger; + +import static java.util.Optional.ofNullable; +import static org.ehcache.config.builders.WriteBehindConfigurationBuilder.newBatchedWriteBehindConfiguration; +import static org.ehcache.xml.XmlModel.convertToJUCTimeUnit; +import static org.ehcache.xml.XmlModel.convertToXmlTimeUnit; + +public class DefaultWriteBehindConfigurationParser + extends SimpleCoreServiceConfigurationParser> { + + @SuppressWarnings("unchecked") + public DefaultWriteBehindConfigurationParser() { + super((Class>) (Class) WriteBehindConfiguration.class, + CacheTemplate::writeBehind, + config -> ofNullable(config.getBatching()).map(batching -> { + BatchedWriteBehindConfigurationBuilder batchedBuilder = newBatchedWriteBehindConfiguration(batching.getMaxWriteDelay().getValue().longValue(), convertToJUCTimeUnit(batching.getMaxWriteDelay().getUnit()), batching.getBatchSize().intValue()); + if (batching.isCoalesce()) { + batchedBuilder = batchedBuilder.enableCoalescing(); + } + return batchedBuilder; + }).orElseGet(UnBatchedWriteBehindConfigurationBuilder::newUnBatchedWriteBehindConfiguration).useThreadPool(config.getThreadPool()) + .concurrencyLevel(config.getConcurrency().intValue()) + .queueSize(config.getSize().intValue()).build(), + BaseCacheType::getLoaderWriter, BaseCacheType::setLoaderWriter, + config -> { + CacheLoaderWriterType.WriteBehind writeBehind = new CacheLoaderWriterType.WriteBehind() + .withThreadPool(config.getThreadPoolAlias()) + .withConcurrency(BigInteger.valueOf(config.getConcurrency())) + .withSize(BigInteger.valueOf(config.getMaxQueueSize())); + + WriteBehindConfiguration.BatchingConfiguration batchingConfiguration = config.getBatchingConfiguration(); + if (batchingConfiguration == null) { + writeBehind.setNonBatching(new CacheLoaderWriterType.WriteBehind.NonBatching()); + } else { + writeBehind.withBatching(new CacheLoaderWriterType.WriteBehind.Batching() + .withBatchSize(BigInteger.valueOf(batchingConfiguration.getBatchSize())) + .withCoalesce(batchingConfiguration.isCoalescing()) + .withMaxWriteDelay(new TimeTypeWithPropSubst() + .withValue(BigInteger.valueOf(batchingConfiguration.getMaxDelay())) + .withUnit(convertToXmlTimeUnit(batchingConfiguration.getMaxDelayUnit())) + ) + ); + } + return new CacheLoaderWriterType().withWriteBehind(writeBehind); + }, + (existing, additional) -> { + existing.setWriteBehind(additional.getWriteBehind()); + return existing; + }); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/service/OffHeapDiskStoreConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/service/OffHeapDiskStoreConfigurationParser.java new file mode 100644 index 0000000000..597dc4badc --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/service/OffHeapDiskStoreConfigurationParser.java @@ -0,0 +1,41 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; +import org.ehcache.xml.model.DiskStoreSettingsType; + +import java.math.BigInteger; + +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; + +public class OffHeapDiskStoreConfigurationParser + extends SimpleCoreServiceConfigurationParser { + + public OffHeapDiskStoreConfigurationParser() { + super(OffHeapDiskStoreConfiguration.class, + CacheTemplate::diskStoreSettings, + config -> new OffHeapDiskStoreConfiguration(config.getThreadPool(), config.getWriterConcurrency().intValue(), config.getDiskSegments().intValue()), + CacheType::getDiskStoreSettings, CacheType::setDiskStoreSettings, + config -> new DiskStoreSettingsType() + .withThreadPool(config.getThreadPoolAlias()) + .withDiskSegments(BigInteger.valueOf(config.getDiskSegments())) + .withWriterConcurrency(BigInteger.valueOf(config.getWriterConcurrency()))); + } +} diff --git a/ehcache-xml/src/main/java/org/ehcache/xml/service/SimpleCoreServiceConfigurationParser.java b/ehcache-xml/src/main/java/org/ehcache/xml/service/SimpleCoreServiceConfigurationParser.java new file mode 100644 index 0000000000..32bf3be092 --- /dev/null +++ b/ehcache-xml/src/main/java/org/ehcache/xml/service/SimpleCoreServiceConfigurationParser.java @@ -0,0 +1,108 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.core.spi.service.ServiceUtils; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.xml.CoreServiceConfigurationParser; +import org.ehcache.xml.model.CacheTemplate; +import org.ehcache.xml.model.CacheType; + +import java.util.function.BiConsumer; +import java.util.function.BinaryOperator; +import java.util.function.Function; + +class SimpleCoreServiceConfigurationParser> implements CoreServiceConfigurationParser { + + private final Function extractor; + private final Parser parser; + + private final Class configType; + + private final Function getter; + private final BiConsumer setter; + private final Function unparser; + private final BinaryOperator merger; + + SimpleCoreServiceConfigurationParser(Class configType, + Function extractor, Function parser, + Function getter, BiConsumer setter, Function unparser) { + this(configType, extractor, (config, loader) -> parser.apply(config), getter, setter, unparser, (a, b) -> { throw new IllegalStateException(); }); + } + + SimpleCoreServiceConfigurationParser(Class configType, + Function extractor, Function parser, + Function getter, BiConsumer setter, Function unparser, BinaryOperator merger) { + this(configType, extractor, (config, loader) -> parser.apply(config), getter, setter, unparser, merger); + } + + SimpleCoreServiceConfigurationParser(Class configType, + Function extractor, Parser parser, + Function getter, BiConsumer setter, Function unparser) { + this(configType, extractor, parser, getter, setter, unparser, (a, b) -> { throw new IllegalStateException(); }); + } + + SimpleCoreServiceConfigurationParser(Class configType, + Function extractor, Parser parser, + Function getter, BiConsumer setter, Function unparser, BinaryOperator merger) { + this.configType = configType; + this.extractor = extractor; + this.parser = parser; + + this.getter = getter; + this.setter = setter; + this.unparser = unparser; + this.merger = merger; + } + + @Override + public final CacheConfigurationBuilder parseServiceConfiguration(CacheTemplate cacheDefinition, ClassLoader cacheClassLoader, CacheConfigurationBuilder cacheBuilder) throws ClassNotFoundException { + IN config = extractor.apply(cacheDefinition); + if (config != null) { + U configuration = parser.parse(config, cacheClassLoader); + if (configuration != null) { + return cacheBuilder.withService(configuration); + } + } + return cacheBuilder; + } + + @Override + public CacheType unparseServiceConfiguration(CacheConfiguration cacheConfiguration, CacheType cacheType) { + U serviceConfig = ServiceUtils.findSingletonAmongst(configType, cacheConfiguration.getServiceConfigurations()); + if (serviceConfig == null) { + return cacheType; + } else { + + OUT foo = getter.apply(cacheType); + if (foo == null) { + setter.accept(cacheType, unparser.apply(serviceConfig)); + } else { + setter.accept(cacheType, merger.apply(foo, unparser.apply(serviceConfig))); + } + return cacheType; + } + } + + @FunctionalInterface + interface Parser { + + U parse(T t, ClassLoader classLoader) throws ClassNotFoundException; + } +} diff --git a/xml/src/main/resources/ehcache-core.xsd b/ehcache-xml/src/main/schema/ehcache-core.xsd similarity index 80% rename from xml/src/main/resources/ehcache-core.xsd rename to ehcache-xml/src/main/schema/ehcache-core.xsd index 90d9eba8de..987316b871 100644 --- a/xml/src/main/resources/ehcache-core.xsd +++ b/ehcache-xml/src/main/schema/ehcache-core.xsd @@ -17,7 +17,17 @@ + + + + + + + + + @@ -124,7 +134,7 @@ - + @@ -161,8 +171,8 @@ - - + + @@ -260,6 +270,13 @@ + + + + Configures the resilience strategy used when the cache's underlying storage fails. + + + @@ -348,7 +365,6 @@ - @@ -369,17 +385,19 @@ - + - + - + + + - - + + @@ -413,18 +431,18 @@ A fully qualified class name to a concrete type that implements - org.ehcache.expiry.Expiry and has a public default no argument constructor. + org.ehcache.expiry.ExpiryPolicy and has a public default no argument constructor. - + Entries in the cache should expire if not accessed for the defined time. - + Entries in the cache should expire after the defined time. @@ -442,9 +460,9 @@ - + - + @@ -459,20 +477,32 @@ - - - - The memory unit (see org.ehcache.config.units.MemoryUnit) this value is expressed in. - - - + - + - + + + + + + + + + + + The memory unit (see org.ehcache.config.units.MemoryUnit) this value is expressed in. + + + + + + + + @@ -484,9 +514,9 @@ - + - + @@ -500,25 +530,34 @@ - + + + @java.lang.SuppressWarnings({"unchecked", "serial"}) + - + + + @java.lang.SuppressWarnings({"unchecked", "serial"}) + - + + + @java.lang.SuppressWarnings({"unchecked", "serial"}) + @@ -530,8 +569,8 @@ - - + + @@ -590,7 +629,54 @@ - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ehcache-xml/src/main/schema/ehcache-multi.xsd b/ehcache-xml/src/main/schema/ehcache-multi.xsd new file mode 100644 index 0000000000..a0980cf84f --- /dev/null +++ b/ehcache-xml/src/main/schema/ehcache-multi.xsd @@ -0,0 +1,80 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ehcache-xml/src/test/java/com/pany/ehcache/DeprecatedExpiry.java b/ehcache-xml/src/test/java/com/pany/ehcache/DeprecatedExpiry.java new file mode 100644 index 0000000000..4d8a78a5cc --- /dev/null +++ b/ehcache-xml/src/test/java/com/pany/ehcache/DeprecatedExpiry.java @@ -0,0 +1,40 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.pany.ehcache; + +import java.util.concurrent.TimeUnit; + +/** + * @author Alex Snaps + */ +@SuppressWarnings("deprecation") +public class DeprecatedExpiry implements org.ehcache.expiry.Expiry { + @Override + public org.ehcache.expiry.Duration getExpiryForCreation(Object key, Object value) { + return org.ehcache.expiry.Duration.of(42, TimeUnit.SECONDS); + } + + @Override + public org.ehcache.expiry.Duration getExpiryForAccess(Object key, org.ehcache.ValueSupplier value) { + return org.ehcache.expiry.Duration.of(42, TimeUnit.SECONDS); + } + + @Override + public org.ehcache.expiry.Duration getExpiryForUpdate(Object key, org.ehcache.ValueSupplier oldValue, Object newValue) { + return org.ehcache.expiry.Duration.of(42, TimeUnit.SECONDS); + } +} diff --git a/ehcache-xml/src/test/java/com/pany/ehcache/MyExpiry.java b/ehcache-xml/src/test/java/com/pany/ehcache/MyExpiry.java new file mode 100644 index 0000000000..f99e8d2617 --- /dev/null +++ b/ehcache-xml/src/test/java/com/pany/ehcache/MyExpiry.java @@ -0,0 +1,42 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.pany.ehcache; + +import org.ehcache.expiry.ExpiryPolicy; + +import java.time.Duration; +import java.util.function.Supplier; + +/** + * @author Alex Snaps + */ +public class MyExpiry implements ExpiryPolicy { + @Override + public Duration getExpiryForCreation(Object key, Object value) { + return Duration.ofSeconds(42); + } + + @Override + public Duration getExpiryForAccess(Object key, Supplier value) { + return Duration.ofSeconds(42); + } + + @Override + public Duration getExpiryForUpdate(Object key, Supplier oldValue, Object newValue) { + return Duration.ofSeconds(42); + } +} diff --git a/xml/src/test/java/com/pany/ehcache/copier/AnotherDescriptionCopier.java b/ehcache-xml/src/test/java/com/pany/ehcache/copier/AnotherDescriptionCopier.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/copier/AnotherDescriptionCopier.java rename to ehcache-xml/src/test/java/com/pany/ehcache/copier/AnotherDescriptionCopier.java diff --git a/xml/src/test/java/com/pany/ehcache/copier/AnotherPersonCopier.java b/ehcache-xml/src/test/java/com/pany/ehcache/copier/AnotherPersonCopier.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/copier/AnotherPersonCopier.java rename to ehcache-xml/src/test/java/com/pany/ehcache/copier/AnotherPersonCopier.java diff --git a/xml/src/test/java/com/pany/ehcache/copier/Description.java b/ehcache-xml/src/test/java/com/pany/ehcache/copier/Description.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/copier/Description.java rename to ehcache-xml/src/test/java/com/pany/ehcache/copier/Description.java diff --git a/xml/src/test/java/com/pany/ehcache/copier/DescriptionCopier.java b/ehcache-xml/src/test/java/com/pany/ehcache/copier/DescriptionCopier.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/copier/DescriptionCopier.java rename to ehcache-xml/src/test/java/com/pany/ehcache/copier/DescriptionCopier.java diff --git a/xml/src/test/java/com/pany/ehcache/copier/Employee.java b/ehcache-xml/src/test/java/com/pany/ehcache/copier/Employee.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/copier/Employee.java rename to ehcache-xml/src/test/java/com/pany/ehcache/copier/Employee.java diff --git a/xml/src/test/java/com/pany/ehcache/copier/Person.java b/ehcache-xml/src/test/java/com/pany/ehcache/copier/Person.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/copier/Person.java rename to ehcache-xml/src/test/java/com/pany/ehcache/copier/Person.java diff --git a/xml/src/test/java/com/pany/ehcache/copier/PersonCopier.java b/ehcache-xml/src/test/java/com/pany/ehcache/copier/PersonCopier.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/copier/PersonCopier.java rename to ehcache-xml/src/test/java/com/pany/ehcache/copier/PersonCopier.java diff --git a/xml/src/test/java/com/pany/ehcache/integration/TestCacheEventListener.java b/ehcache-xml/src/test/java/com/pany/ehcache/integration/TestCacheEventListener.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/integration/TestCacheEventListener.java rename to ehcache-xml/src/test/java/com/pany/ehcache/integration/TestCacheEventListener.java diff --git a/ehcache-xml/src/test/java/com/pany/ehcache/integration/TestCacheLoaderWriter.java b/ehcache-xml/src/test/java/com/pany/ehcache/integration/TestCacheLoaderWriter.java new file mode 100644 index 0000000000..07a28440eb --- /dev/null +++ b/ehcache-xml/src/test/java/com/pany/ehcache/integration/TestCacheLoaderWriter.java @@ -0,0 +1,76 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.pany.ehcache.integration; + +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.CountDownLatch; + +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; + +/** + * @author Alex Snaps + */ +public class TestCacheLoaderWriter implements CacheLoaderWriter { + + public static Number lastWrittenKey; + + public static CountDownLatch latch; + + @Override + public String load(final Number key) { + return key.toString(); + } + + @Override + public Map loadAll(final Iterable keys) { + final Map loaded = new HashMap<>(); + for (Number key : keys) { + loaded.put(key, load(key)); + } + return loaded; + } + + @Override + public void write(final Number key, final String value) { + lastWrittenKey = key; + if(latch != null) { + latch.countDown(); + } + } + + @Override + public void writeAll(final Iterable> entries) { + for (Entry entry : entries) { + lastWrittenKey = entry.getKey(); + if(latch != null) { + latch.countDown(); + } + } + } + + @Override + public void delete(final Number key) { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public void deleteAll(final Iterable keys) { + throw new UnsupportedOperationException("Implement me!"); + } +} diff --git a/ehcache-xml/src/test/java/com/pany/ehcache/integration/TestEvictionAdvisor.java b/ehcache-xml/src/test/java/com/pany/ehcache/integration/TestEvictionAdvisor.java new file mode 100644 index 0000000000..7454956c90 --- /dev/null +++ b/ehcache-xml/src/test/java/com/pany/ehcache/integration/TestEvictionAdvisor.java @@ -0,0 +1,27 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.pany.ehcache.integration; + +import org.ehcache.config.EvictionAdvisor; + +public class TestEvictionAdvisor implements EvictionAdvisor { + + @Override + public boolean adviseAgainstEviction(K key, V value) { + return false; + } +} diff --git a/ehcache-xml/src/test/java/com/pany/ehcache/integration/TestResilienceStrategy.java b/ehcache-xml/src/test/java/com/pany/ehcache/integration/TestResilienceStrategy.java new file mode 100644 index 0000000000..86a04a483e --- /dev/null +++ b/ehcache-xml/src/test/java/com/pany/ehcache/integration/TestResilienceStrategy.java @@ -0,0 +1,91 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.pany.ehcache.integration; + +import org.ehcache.Cache; +import org.ehcache.spi.resilience.ResilienceStrategy; +import org.ehcache.spi.resilience.StoreAccessException; + +import java.util.Map; + +public class TestResilienceStrategy implements ResilienceStrategy { + + @Override + public V getFailure(K key, StoreAccessException e) { + return null; + } + + @Override + public boolean containsKeyFailure(K key, StoreAccessException e) { + return false; + } + + @Override + public void putFailure(K key, V value, StoreAccessException e) { + + } + + @Override + public void removeFailure(K key, StoreAccessException e) { + + } + + @Override + public void clearFailure(StoreAccessException e) { + + } + + @Override + public Cache.Entry iteratorFailure(StoreAccessException e) { + return null; + } + + @Override + public V putIfAbsentFailure(K key, V value, StoreAccessException e) { + return null; + } + + @Override + public boolean removeFailure(K key, V value, StoreAccessException e) { + return false; + } + + @Override + public V replaceFailure(K key, V value, StoreAccessException e) { + return null; + } + + @Override + public boolean replaceFailure(K key, V value, V newValue, StoreAccessException e) { + return false; + } + + @Override + public Map getAllFailure(Iterable keys, StoreAccessException e) { + return null; + } + + @Override + public void putAllFailure(Map entries, StoreAccessException e) { + + } + + @Override + public void removeAllFailure(Iterable keys, StoreAccessException e) { + + } +} diff --git a/xml/src/test/java/com/pany/ehcache/integration/TestSecondCacheEventListener.java b/ehcache-xml/src/test/java/com/pany/ehcache/integration/TestSecondCacheEventListener.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/integration/TestSecondCacheEventListener.java rename to ehcache-xml/src/test/java/com/pany/ehcache/integration/TestSecondCacheEventListener.java diff --git a/xml/src/test/java/com/pany/ehcache/integration/ThreadRememberingLoaderWriter.java b/ehcache-xml/src/test/java/com/pany/ehcache/integration/ThreadRememberingLoaderWriter.java similarity index 77% rename from xml/src/test/java/com/pany/ehcache/integration/ThreadRememberingLoaderWriter.java rename to ehcache-xml/src/test/java/com/pany/ehcache/integration/ThreadRememberingLoaderWriter.java index 727b03fd30..4c5dd124a7 100644 --- a/xml/src/test/java/com/pany/ehcache/integration/ThreadRememberingLoaderWriter.java +++ b/ehcache-xml/src/test/java/com/pany/ehcache/integration/ThreadRememberingLoaderWriter.java @@ -18,7 +18,6 @@ import java.util.Collections; import java.util.Map; import java.util.concurrent.Semaphore; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; public class ThreadRememberingLoaderWriter implements CacheLoaderWriter { @@ -26,35 +25,35 @@ public class ThreadRememberingLoaderWriter implements CacheLoaderWriter loadAll(Iterable keys) throws Exception { + public Map loadAll(Iterable keys) { return Collections.emptyMap(); } @Override - public void write(String key, String value) throws Exception { + public void write(String key, String value) { LAST_SEEN_THREAD = Thread.currentThread(); USED.release(); } @Override - public void writeAll(Iterable> entries) throws BulkCacheWritingException, Exception { + public void writeAll(Iterable> entries) { LAST_SEEN_THREAD = Thread.currentThread(); USED.release(); } @Override - public void delete(String key) throws Exception { + public void delete(String key) { LAST_SEEN_THREAD = Thread.currentThread(); USED.release(); } @Override - public void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception { + public void deleteAll(Iterable keys) { LAST_SEEN_THREAD = Thread.currentThread(); USED.release(); } diff --git a/xml/src/test/java/com/pany/ehcache/serializer/TestSerializer.java b/ehcache-xml/src/test/java/com/pany/ehcache/serializer/TestSerializer.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/serializer/TestSerializer.java rename to ehcache-xml/src/test/java/com/pany/ehcache/serializer/TestSerializer.java diff --git a/xml/src/test/java/com/pany/ehcache/serializer/TestSerializer2.java b/ehcache-xml/src/test/java/com/pany/ehcache/serializer/TestSerializer2.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/serializer/TestSerializer2.java rename to ehcache-xml/src/test/java/com/pany/ehcache/serializer/TestSerializer2.java diff --git a/xml/src/test/java/com/pany/ehcache/serializer/TestSerializer3.java b/ehcache-xml/src/test/java/com/pany/ehcache/serializer/TestSerializer3.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/serializer/TestSerializer3.java rename to ehcache-xml/src/test/java/com/pany/ehcache/serializer/TestSerializer3.java diff --git a/xml/src/test/java/com/pany/ehcache/serializer/TestSerializer4.java b/ehcache-xml/src/test/java/com/pany/ehcache/serializer/TestSerializer4.java similarity index 100% rename from xml/src/test/java/com/pany/ehcache/serializer/TestSerializer4.java rename to ehcache-xml/src/test/java/com/pany/ehcache/serializer/TestSerializer4.java diff --git a/ehcache-xml/src/test/java/org/ehcache/docs/GettingStarted.java b/ehcache-xml/src/test/java/org/ehcache/docs/GettingStarted.java new file mode 100644 index 0000000000..f4a8f1516f --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/docs/GettingStarted.java @@ -0,0 +1,88 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.docs; + +import org.ehcache.CacheManager; +import org.ehcache.config.Configuration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.xml.XmlConfiguration; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.IOException; +import java.net.URL; +import java.time.Duration; + +/** + * GettingStarted + */ +public class GettingStarted { + + @Rule + public TemporaryFolder tmpDir = new TemporaryFolder(); + + @Test + public void xmlConfigSample() throws Exception { + // tag::xmlConfig[] + final URL myUrl = getClass().getResource("/configs/docs/getting-started.xml"); // <1> + XmlConfiguration xmlConfig = new XmlConfiguration(myUrl); // <2> + CacheManager myCacheManager = CacheManagerBuilder.newCacheManager(xmlConfig); // <3> + myCacheManager.init(); // <4> + // end::xmlConfig[] + } + + @Test + public void xmlTemplateSample() throws Exception { + // tag::xmlTemplate[] + XmlConfiguration xmlConfiguration = new XmlConfiguration(getClass().getResource("/configs/docs/template-sample.xml")); + CacheConfigurationBuilder configurationBuilder = xmlConfiguration.newCacheConfigurationBuilderFromTemplate("example", Long.class, String.class); // <1> + configurationBuilder = configurationBuilder.withResourcePools(ResourcePoolsBuilder.heap(1000)); // <2> + // end::xmlTemplate[] + } + + @Test + public void xmlExpirySample() throws Exception { + XmlConfiguration xmlConfiguration = new XmlConfiguration(getClass().getResource("/configs/docs/expiry.xml")); + CacheManagerBuilder.newCacheManager(xmlConfiguration).init(); + } + + @Test + public void testXmlToString() throws IOException { + // tag::xmlTranslation[] + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .with(CacheManagerBuilder.persistence(tmpDir.newFile("myData"))) + .withCache("threeTieredCache", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .disk(20, MemoryUnit.MB, true)) + .withExpiry(ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofSeconds(20))) + ).build(false); + + Configuration configuration = cacheManager.getRuntimeConfiguration(); + XmlConfiguration xmlConfiguration = new XmlConfiguration(configuration); // <1> + String xml = xmlConfiguration.toString(); // <2> + // end::xmlTranslation[] + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/docs/MultiGettingStarted.java b/ehcache-xml/src/test/java/org/ehcache/docs/MultiGettingStarted.java new file mode 100644 index 0000000000..397184bf94 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/docs/MultiGettingStarted.java @@ -0,0 +1,141 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.docs; + +import org.ehcache.config.Configuration; +import org.ehcache.config.ResourceType; +import org.ehcache.xml.multi.XmlMultiConfiguration; +import org.hamcrest.collection.IsIterableContainingInAnyOrder; +import org.hamcrest.collection.IsMapContaining; +import org.hamcrest.core.AllOf; +import org.hamcrest.core.Is; +import org.junit.Test; +import org.w3c.dom.Document; + +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.hamcrest.MatcherAssert.assertThat; + +public class MultiGettingStarted { + + @Test + public void multipleConfigurations() { + //tag::multipleManagers[] + XmlMultiConfiguration multipleConfiguration = XmlMultiConfiguration + .from(getClass().getResource("/configs/docs/multi/multiple-managers.xml")) // <1> + .build(); // <2> + + Configuration fooConfiguration = multipleConfiguration.configuration("foo-manager"); // <3> + //end::multipleManagers[] + + assertThat(resourceMap(multipleConfiguration.identities().stream().collect( + Collectors.toMap(Function.identity(), multipleConfiguration::configuration) + )), AllOf.allOf( + IsMapContaining.hasEntry(Is.is("foo-manager"), IsMapContaining.hasEntry(Is.is("foo"), IsIterableContainingInAnyOrder.containsInAnyOrder(ResourceType.Core.HEAP, ResourceType.Core.OFFHEAP))), + IsMapContaining.hasEntry(Is.is("bar-manager"), IsMapContaining.hasEntry(Is.is("bar"), IsIterableContainingInAnyOrder.containsInAnyOrder(ResourceType.Core.HEAP, ResourceType.Core.OFFHEAP))) + )); + } + + @Test + public void multipleVariants() { + //tag::multipleVariants[] + XmlMultiConfiguration variantConfiguration = XmlMultiConfiguration + .from(getClass().getResource("/configs/docs/multi/multiple-variants.xml")) + .build(); + + Configuration fooConfiguration = variantConfiguration.configuration("foo-manager", "offheap"); // <1> + //end::multipleVariants[] + + assertThat(resourceMap(variantConfiguration.identities().stream().collect( + Collectors.toMap(Function.identity(), i -> variantConfiguration.configuration(i, "offheap")) + )), AllOf.allOf( + IsMapContaining.hasEntry(Is.is("foo-manager"), IsMapContaining.hasEntry(Is.is("foo"), IsIterableContainingInAnyOrder.containsInAnyOrder(ResourceType.Core.HEAP, ResourceType.Core.OFFHEAP))), + IsMapContaining.hasEntry(Is.is("bar-manager"), IsMapContaining.hasEntry(Is.is("bar"), IsIterableContainingInAnyOrder.containsInAnyOrder(ResourceType.Core.HEAP))) + )); + + assertThat(resourceMap(variantConfiguration.identities().stream().collect( + Collectors.toMap(Function.identity(), i -> variantConfiguration.configuration(i, "heap")) + )), AllOf.allOf( + IsMapContaining.hasEntry(Is.is("foo-manager"), IsMapContaining.hasEntry(Is.is("foo"), IsIterableContainingInAnyOrder.containsInAnyOrder(ResourceType.Core.HEAP))), + IsMapContaining.hasEntry(Is.is("bar-manager"), IsMapContaining.hasEntry(Is.is("bar"), IsIterableContainingInAnyOrder.containsInAnyOrder(ResourceType.Core.HEAP))) + )); + } + + @Test + public void multipleRetrieval() { + XmlMultiConfiguration multipleConfiguration = XmlMultiConfiguration + .from(getClass().getResource("/configs/docs/multi/multiple-managers.xml")) + .build(); + XmlMultiConfiguration variantConfiguration = XmlMultiConfiguration + .from(getClass().getResource("/configs/docs/multi/multiple-variants.xml")) + .build(); + + //tag::multipleRetrieval[] + Map allConfigurations = multipleConfiguration.identities().stream() // <1> + .collect(Collectors.toMap(i -> i, i -> multipleConfiguration.configuration(i))); // <2> + Map offheapConfigurations = variantConfiguration.identities().stream() + .collect(Collectors.toMap(i -> i, i -> variantConfiguration.configuration(i, "offheap"))); // <3> + //end::multipleRetrieval[] + + assertThat(resourceMap(allConfigurations), AllOf.allOf( + IsMapContaining.hasEntry(Is.is("foo-manager"), IsMapContaining.hasEntry(Is.is("foo"), IsIterableContainingInAnyOrder.containsInAnyOrder(ResourceType.Core.HEAP, ResourceType.Core.OFFHEAP))), + IsMapContaining.hasEntry(Is.is("bar-manager"), IsMapContaining.hasEntry(Is.is("bar"), IsIterableContainingInAnyOrder.containsInAnyOrder(ResourceType.Core.HEAP, ResourceType.Core.OFFHEAP))) + )); + + assertThat(resourceMap(offheapConfigurations), AllOf.allOf( + IsMapContaining.hasEntry(Is.is("foo-manager"), IsMapContaining.hasEntry(Is.is("foo"), IsIterableContainingInAnyOrder.containsInAnyOrder(ResourceType.Core.HEAP, ResourceType.Core.OFFHEAP))), + IsMapContaining.hasEntry(Is.is("bar-manager"), IsMapContaining.hasEntry(Is.is("bar"), IsIterableContainingInAnyOrder.containsInAnyOrder(ResourceType.Core.HEAP))) + )); + } + + @Test + public void building() { + XmlMultiConfiguration sourceConfiguration = XmlMultiConfiguration + .from(getClass().getResource("/configs/docs/multi/multiple-variants.xml")) + .build(); + Configuration barConfiguration = sourceConfiguration.configuration("bar-manager"); + Configuration heapConfiguration = sourceConfiguration.configuration("foo-manager", "heap"); + Configuration offheapConfiguration = sourceConfiguration.configuration("foo-manager", "offheap"); + + //tag::building[] + XmlMultiConfiguration multiConfiguration = XmlMultiConfiguration.fromNothing() // <1> + .withManager("bar", barConfiguration) // <2> + .withManager("foo").variant("heap", heapConfiguration).variant("offheap", offheapConfiguration) // <3> + .build(); // <4> + //end::building[] + + //tag::modifying[] + XmlMultiConfiguration modified = XmlMultiConfiguration.from(multiConfiguration) // <1> + .withManager("foo") // <2> + .build(); + //end::modifying[] + + //tag::rendering[] + String xmlString = multiConfiguration.asRenderedDocument(); // <1> + Document xmlDocument = multiConfiguration.asDocument(); // <2> + //end::rendering[] + } + + private static Map>>> resourceMap(Map configurations) { + return configurations.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + manager -> manager.getValue().getCacheConfigurations().entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + cache -> cache.getValue().getResourcePools().getResourceTypeSet())))); + } +} diff --git a/xml/src/test/java/org/ehcache/xml/BarConfiguration.java b/ehcache-xml/src/test/java/org/ehcache/xml/BarConfiguration.java similarity index 97% rename from xml/src/test/java/org/ehcache/xml/BarConfiguration.java rename to ehcache-xml/src/test/java/org/ehcache/xml/BarConfiguration.java index 43de91b5c0..80fedfa68a 100644 --- a/xml/src/test/java/org/ehcache/xml/BarConfiguration.java +++ b/ehcache-xml/src/test/java/org/ehcache/xml/BarConfiguration.java @@ -22,7 +22,7 @@ /** * BarConfiguration */ -public class BarConfiguration implements ServiceCreationConfiguration { +public class BarConfiguration implements ServiceCreationConfiguration { @Override public Class getServiceType() { return Service.class; diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/BarParser.java b/ehcache-xml/src/test/java/org/ehcache/xml/BarParser.java new file mode 100644 index 0000000000..bd38ee2f65 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/BarParser.java @@ -0,0 +1,71 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.xml.sax.SAXException; + +import java.io.IOException; +import java.net.URI; +import java.net.URL; + +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.transform.Source; +import javax.xml.transform.stream.StreamSource; + +/** + * BarParser + */ +public class BarParser implements CacheManagerServiceConfigurationParser { + + private static final URI NAMESPACE = URI.create("http://www.example.com/bar"); + private static final URL XML_SCHEMA = FooParser.class.getResource("/configs/bar.xsd"); + + @Override + public Source getXmlSchema() throws IOException { + return new StreamSource(XML_SCHEMA.openStream()); + } + + @Override + public URI getNamespace() { + return NAMESPACE; + } + + @Override + public ServiceCreationConfiguration parseServiceCreationConfiguration(Element fragment, ClassLoader classLoader) { + return new BarConfiguration(); + } + + @Override + public Class getServiceType() { + return Service.class; + } + + @Override + public Element unparseServiceCreationConfiguration(ServiceCreationConfiguration serviceCreationConfiguration) { + try { + Document document = DomUtil.createAndGetDocumentBuilder().newDocument(); + return document.createElementNS(NAMESPACE.toString(), "bar:bar"); + } catch (SAXException | ParserConfigurationException | IOException e) { + throw new XmlConfigurationException(e); + } + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/BazParser.java b/ehcache-xml/src/test/java/org/ehcache/xml/BazParser.java new file mode 100644 index 0000000000..566a5bbbd1 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/BazParser.java @@ -0,0 +1,78 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.ResourcePool; +import org.ehcache.config.ResourceType; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.xml.sax.SAXException; + +import java.io.IOException; +import java.net.URI; +import java.net.URL; +import java.util.Collections; +import java.util.Set; + +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.transform.Source; +import javax.xml.transform.stream.StreamSource; + +/** + * Parser for the test resource {@code BazResource} + */ +public class BazParser implements CacheResourceConfigurationParser { + + private static final URI NAMESPACE = URI.create("http://www.example.com/baz"); + private static final URL XML_SCHEMA = FooParser.class.getResource("/configs/baz.xsd"); + + @Override + public Source getXmlSchema() throws IOException { + return new StreamSource(XML_SCHEMA.openStream()); + } + + @Override + public URI getNamespace() { + return NAMESPACE; + } + + + @Override + public ResourcePool parseResourceConfiguration(Element fragment) { + String elementName = fragment.getLocalName(); + if (elementName.equals("baz")) { + return new BazResource(); + } + return null; + } + + @Override + public Element unparseResourcePool(final ResourcePool resourcePool) { + try { + Document document = DomUtil.createAndGetDocumentBuilder().newDocument(); + return document.createElementNS(NAMESPACE.toString(), "baz:baz"); + } catch (SAXException | ParserConfigurationException | IOException e) { + throw new XmlConfigurationException(e); + } + } + + @Override + public Set> getResourceTypes() { + return Collections.singleton(BazResource.class); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/BazResource.java b/ehcache-xml/src/test/java/org/ehcache/xml/BazResource.java new file mode 100644 index 0000000000..d01ce0f89b --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/BazResource.java @@ -0,0 +1,63 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.ResourcePool; +import org.ehcache.config.ResourceType; + +public class BazResource implements ResourcePool { + + @Override + public ResourceType getType() { + return Type.BAZ_TYPE; + } + + @Override + public boolean isPersistent() { + return false; + } + + @Override + public void validateUpdate(ResourcePool newPool) { + + } + + public static class Type implements ResourceType { + + public static final Type BAZ_TYPE = new Type(); + + @Override + public Class getResourcePoolClass() { + return BazResource.class; + } + + @Override + public boolean isPersistable() { + return false; + } + + @Override + public boolean requiresSerialization() { + return false; + } + + @Override + public int getTierHeight() { + return 0; + } + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/CoreCacheConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/CoreCacheConfigurationParserTest.java new file mode 100644 index 0000000000..0befa07036 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/CoreCacheConfigurationParserTest.java @@ -0,0 +1,125 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheType; +import org.ehcache.xml.model.TimeTypeWithPropSubst; +import org.ehcache.xml.model.TimeUnit; +import org.hamcrest.CoreMatchers; +import org.junit.Test; + +import com.pany.ehcache.MyExpiry; +import com.pany.ehcache.integration.TestEvictionAdvisor; + +import java.math.BigInteger; +import java.time.Duration; + +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; + +public class CoreCacheConfigurationParserTest { + + CacheConfigurationBuilder cacheConfigurationBuilder = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)); + CoreCacheConfigurationParser parser = new CoreCacheConfigurationParser(); + + @Test + public void parseConfigurationExpiryPolicy() throws Exception { + Configuration configuration = new XmlConfiguration(getClass().getResource("/configs/expiry-caches.xml")); + + ExpiryPolicy expiry = configuration.getCacheConfigurations().get("none").getExpiryPolicy(); + ExpiryPolicy value = ExpiryPolicyBuilder.noExpiration(); + assertThat(expiry, is(value)); + + expiry = configuration.getCacheConfigurations().get("notSet").getExpiryPolicy(); + value = ExpiryPolicyBuilder.noExpiration(); + assertThat(expiry, is(value)); + + expiry = configuration.getCacheConfigurations().get("class").getExpiryPolicy(); + assertThat(expiry, CoreMatchers.instanceOf(com.pany.ehcache.MyExpiry.class)); + + expiry = configuration.getCacheConfigurations().get("deprecatedClass").getExpiryPolicy(); + assertThat(expiry.getExpiryForCreation(null, null), is(Duration.ofSeconds(42))); + assertThat(expiry.getExpiryForAccess(null, () -> null), is(Duration.ofSeconds(42))); + assertThat(expiry.getExpiryForUpdate(null, () -> null, null), is(Duration.ofSeconds(42))); + + expiry = configuration.getCacheConfigurations().get("tti").getExpiryPolicy(); + value = ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(500)); + assertThat(expiry, equalTo(value)); + + expiry = configuration.getCacheConfigurations().get("ttl").getExpiryPolicy(); + value = ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(30)); + assertThat(expiry, equalTo(value)); + } + + @Test + public void unparseConfigurationNoExpiry() { + CacheConfiguration cacheConfiguration = buildCacheConfigWith(ExpiryPolicyBuilder.noExpiration()); + CacheType cacheType = parser.unparseConfiguration(cacheConfiguration, new CacheType()); + assertThat(cacheType.getExpiry().getNone(), notNullValue()); + } + + @Test(expected = XmlConfigurationException.class) + public void unparseConfigurationCustomExpiry() { + CacheConfiguration cacheConfiguration = buildCacheConfigWith(new MyExpiry()); + parser.unparseConfiguration(cacheConfiguration, new CacheType()); + } + + @Test + public void unparseConfigurationTtiExpiry() { + CacheConfiguration cacheConfiguration = buildCacheConfigWith(ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(2500))); + CacheType cacheType = parser.unparseConfiguration(cacheConfiguration, new CacheType()); + TimeTypeWithPropSubst tti = cacheType.getExpiry().getTti(); + assertThat(tti, notNullValue()); + assertThat(tti.getValue(), is(BigInteger.valueOf(2500))); + assertThat(tti.getUnit(), is(TimeUnit.MILLIS)); + } + + @Test + public void unparseConfigurationTtlExpiry() { + CacheConfiguration cacheConfiguration = buildCacheConfigWith(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMinutes(60))); + CacheType cacheType = parser.unparseConfiguration(cacheConfiguration, new CacheType()); + TimeTypeWithPropSubst ttl = cacheType.getExpiry().getTtl(); + assertThat(ttl, notNullValue()); + assertThat(ttl.getValue(), is(BigInteger.valueOf(1))); + assertThat(ttl.getUnit(), is(TimeUnit.HOURS)); + } + + @Test(expected = XmlConfigurationException.class) + public void unparseConfigurationEvictionAdvisor() { + CacheConfiguration cacheConfiguration = buildCacheConfigWith(new TestEvictionAdvisor<>()); + parser.unparseConfiguration(cacheConfiguration, new CacheType()); + } + + private CacheConfiguration buildCacheConfigWith(ExpiryPolicy expiryPolicy) { + return cacheConfigurationBuilder.withExpiry(expiryPolicy).build(); + } + + private CacheConfiguration buildCacheConfigWith(EvictionAdvisor evictionAdvisor) { + return cacheConfigurationBuilder.withEvictionAdvisor(evictionAdvisor).build(); + } +} diff --git a/xml/src/test/java/org/ehcache/xml/FancyParser.java b/ehcache-xml/src/test/java/org/ehcache/xml/FancyParser.java similarity index 81% rename from xml/src/test/java/org/ehcache/xml/FancyParser.java rename to ehcache-xml/src/test/java/org/ehcache/xml/FancyParser.java index 7d5b110490..7f79e3eb55 100644 --- a/xml/src/test/java/org/ehcache/xml/FancyParser.java +++ b/ehcache-xml/src/test/java/org/ehcache/xml/FancyParser.java @@ -42,7 +42,7 @@ public Source getXmlSchema() throws IOException { } @Override - public ServiceConfiguration parseServiceConfiguration(Element fragment) { + public ServiceConfiguration parseServiceConfiguration(Element fragment, ClassLoader classLoader) { return new FooConfiguration(); } @@ -51,4 +51,14 @@ public URI getNamespace() { return NAMESPACE; } + @Override + public Class getServiceType() { + return null; + } + + @Override + public Element unparseServiceConfiguration(ServiceConfiguration serviceConfiguration) { + return null; + } + } diff --git a/xml/src/test/java/org/ehcache/xml/FooConfiguration.java b/ehcache-xml/src/test/java/org/ehcache/xml/FooConfiguration.java similarity index 92% rename from xml/src/test/java/org/ehcache/xml/FooConfiguration.java rename to ehcache-xml/src/test/java/org/ehcache/xml/FooConfiguration.java index 00fdd485ca..22cd8611b5 100644 --- a/xml/src/test/java/org/ehcache/xml/FooConfiguration.java +++ b/ehcache-xml/src/test/java/org/ehcache/xml/FooConfiguration.java @@ -23,7 +23,7 @@ * * @author cdennis */ -class FooConfiguration implements ServiceConfiguration { +class FooConfiguration implements ServiceConfiguration { @Override public Class getServiceType() { diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/FooParser.java b/ehcache-xml/src/test/java/org/ehcache/xml/FooParser.java new file mode 100644 index 0000000000..c193618848 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/FooParser.java @@ -0,0 +1,73 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.xml.sax.SAXException; + +import java.io.IOException; +import java.net.URI; +import java.net.URL; + +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.transform.Source; +import javax.xml.transform.stream.StreamSource; + +/** + * + * @author cdennis + */ +public class FooParser implements CacheServiceConfigurationParser { + + private static final URI NAMESPACE = URI.create("http://www.example.com/foo"); + private static final URL XML_SCHEMA = FooParser.class.getResource("/configs/foo.xsd"); + + @Override + public Source getXmlSchema() throws IOException { + return new StreamSource(XML_SCHEMA.openStream()); + } + + @Override + public ServiceConfiguration parseServiceConfiguration(Element fragment, ClassLoader classLoader) { + return new FooConfiguration(); + } + + @Override + public Class getServiceType() { + return Service.class; + } + + @Override + public Element unparseServiceConfiguration(ServiceConfiguration serviceConfiguration) { + try { + Document document = DomUtil.createAndGetDocumentBuilder().newDocument(); + return document.createElementNS(NAMESPACE.toString(), "foo:foo"); + } catch (SAXException | ParserConfigurationException | IOException e) { + throw new XmlConfigurationException(e); + } + } + + @Override + public URI getNamespace() { + return NAMESPACE; + } + +} diff --git a/xml/src/test/java/org/ehcache/xml/FromTemplateCacheConfigurationBuilderDefaultTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/FromTemplateCacheConfigurationBuilderDefaultTest.java similarity index 96% rename from xml/src/test/java/org/ehcache/xml/FromTemplateCacheConfigurationBuilderDefaultTest.java rename to ehcache-xml/src/test/java/org/ehcache/xml/FromTemplateCacheConfigurationBuilderDefaultTest.java index b4a4b19917..ee4a219dce 100644 --- a/xml/src/test/java/org/ehcache/xml/FromTemplateCacheConfigurationBuilderDefaultTest.java +++ b/ehcache-xml/src/test/java/org/ehcache/xml/FromTemplateCacheConfigurationBuilderDefaultTest.java @@ -21,8 +21,8 @@ import org.junit.Test; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * TemplateDefaultTest diff --git a/xml/src/test/java/org/ehcache/xml/IntegrationConfigurationTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/IntegrationConfigurationTest.java similarity index 96% rename from xml/src/test/java/org/ehcache/xml/IntegrationConfigurationTest.java rename to ehcache-xml/src/test/java/org/ehcache/xml/IntegrationConfigurationTest.java index d945d747d3..e77d27dbd2 100644 --- a/xml/src/test/java/org/ehcache/xml/IntegrationConfigurationTest.java +++ b/ehcache-xml/src/test/java/org/ehcache/xml/IntegrationConfigurationTest.java @@ -51,11 +51,11 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.StringContains.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; /** @@ -189,7 +189,7 @@ public void testCacheEventListenerThreadPoolName() throws Exception { Configuration configuration = new XmlConfiguration(this.getClass().getResource("/configs/ehcache-cacheEventListener.xml")); CacheConfiguration template1 = configuration.getCacheConfigurations().get("template1"); DefaultCacheEventDispatcherConfiguration eventDispatcherConfig = null; - for (ServiceConfiguration serviceConfiguration : template1.getServiceConfigurations()) { + for (ServiceConfiguration serviceConfiguration : template1.getServiceConfigurations()) { if (serviceConfiguration instanceof DefaultCacheEventDispatcherConfiguration) { eventDispatcherConfig = (DefaultCacheEventDispatcherConfiguration) serviceConfiguration; } @@ -225,8 +225,8 @@ public void testThreadPools() throws Exception { cacheManager.init(); try { Cache cache = cacheManager.createCache("testThreadPools", newCacheConfigurationBuilder(String.class, String.class, heap(10)) - .add(new DefaultCacheLoaderWriterConfiguration(ThreadRememberingLoaderWriter.class)) - .add(newUnBatchedWriteBehindConfiguration().useThreadPool("small")) + .withService(new DefaultCacheLoaderWriterConfiguration(ThreadRememberingLoaderWriter.class)) + .withService(newUnBatchedWriteBehindConfiguration().useThreadPool("small")) .build()); cache.put("foo", "bar"); @@ -246,8 +246,8 @@ public void testThreadPoolsUsingDefaultPool() throws Exception { cacheManager.init(); try { Cache cache = cacheManager.createCache("testThreadPools", newCacheConfigurationBuilder(String.class, String.class, heap(10)) - .add(new DefaultCacheLoaderWriterConfiguration(ThreadRememberingLoaderWriter.class)) - .add(newUnBatchedWriteBehindConfiguration()) + .withService(new DefaultCacheLoaderWriterConfiguration(ThreadRememberingLoaderWriter.class)) + .withService(newUnBatchedWriteBehindConfiguration()) .build()); cache.put("foo", "bar"); diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/JaxbParsersTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/JaxbParsersTest.java new file mode 100644 index 0000000000..a23d1ffe62 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/JaxbParsersTest.java @@ -0,0 +1,281 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestName; + +import java.math.BigInteger; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThrows; + +public class JaxbParsersTest { + + private static final String PROPERTY_PREFIX = JaxbParsersTest.class.getName() + ":"; + @Rule public final TestName testName = new TestName(); + + @Test + public void testParsePropertyOrStringFromNullString() { + assertThrows(NullPointerException.class, () -> JaxbParsers.parsePropertyOrString(null)); + } + + @Test + public void testParsePropertyOrStringWithoutProperty() { + assertThat(JaxbParsers.parsePropertyOrString("${foobar"), is("${foobar")); + assertThat(JaxbParsers.parsePropertyOrString("foobar"), is("foobar")); + assertThat(JaxbParsers.parsePropertyOrString("foobar}"), is("foobar}")); + assertThat(JaxbParsers.parsePropertyOrString("$foobar"), is("$foobar")); + } + + @Test + public void testParsePropertyOrStringWithProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + System.setProperty(property, "barfoo"); + try { + assertThat(JaxbParsers.parsePropertyOrString("${" + property + "}"), is("barfoo")); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testParsePropertyOrStringWithMissingProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + assertThrows(IllegalStateException.class, () -> JaxbParsers.parsePropertyOrString("${" + property + "}")); + } + + @Test + public void testParsePropertyOrIntegerFromNullString() { + assertThrows(NullPointerException.class, () -> JaxbParsers.parsePropertyOrInteger(null)); + } + + @Test + public void testParsePropertyOrIntegerValidWithoutProperty() { + assertThat(JaxbParsers.parsePropertyOrInteger("123"), is(BigInteger.valueOf(123))); + assertThat(JaxbParsers.parsePropertyOrInteger("-123"), is(BigInteger.valueOf(-123))); + } + + @Test + public void testParsePropertyOrIntegerInvalidWithoutProperty() { + assertThrows(NumberFormatException.class, () -> JaxbParsers.parsePropertyOrInteger("foobar")); + } + + @Test + public void testParsePropertyOrIntegerValidWithProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + System.setProperty(property, "123"); + try { + assertThat(JaxbParsers.parsePropertyOrInteger("${" + property + "}"), is(BigInteger.valueOf(123))); + } finally { + System.clearProperty(property); + } + System.setProperty(property, "-123"); + try { + assertThat(JaxbParsers.parsePropertyOrInteger("${" + property + "}"), is(BigInteger.valueOf(-123))); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testParsePropertyOrIntegerInvalidWithProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + System.setProperty(property, "barfoo"); + try { + assertThrows(NumberFormatException.class, () -> JaxbParsers.parsePropertyOrInteger("${" + property + "}")); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testParsePropertyOrIntegerWithMissingProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + assertThrows(IllegalStateException.class, () -> JaxbParsers.parsePropertyOrInteger("${" + property + "}")); + } + + + @Test + public void testParsePropertyOrPositiveIntegerFromNullString() { + assertThrows(NullPointerException.class, () -> JaxbParsers.parsePropertyOrPositiveInteger(null)); + } + + @Test + public void testParsePropertyOrPositiveIntegerValidWithoutProperty() { + assertThat(JaxbParsers.parsePropertyOrPositiveInteger("123"), is(BigInteger.valueOf(123))); + } + + @Test + public void testParsePropertyOrPositiveIntegerInvalidWithoutProperty() { + assertThrows(NumberFormatException.class, () -> JaxbParsers.parsePropertyOrPositiveInteger("foobar")); + } + + @Test + public void testParsePropertyOrPositiveIntegerOutOfRangeWithoutProperty() { + assertThrows(IllegalArgumentException.class, () -> JaxbParsers.parsePropertyOrPositiveInteger("0")); + } + + @Test + public void testParsePropertyOrPositiveIntegerValidWithProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + System.setProperty(property, "123"); + try { + assertThat(JaxbParsers.parsePropertyOrPositiveInteger("${" + property + "}"), is(BigInteger.valueOf(123))); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testParsePropertyOrPositiveIntegerInvalidWithProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + System.setProperty(property, "barfoo"); + try { + assertThrows(NumberFormatException.class, () -> JaxbParsers.parsePropertyOrPositiveInteger("${" + property + "}")); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testParsePropertyOrPositiveIntegerOutOfRangeWithProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + System.setProperty(property, "0"); + try { + assertThrows(IllegalArgumentException.class, () -> JaxbParsers.parsePropertyOrPositiveInteger("${" + property + "}")); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testParsePropertyOrPositiveIntegerWithMissingProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + assertThrows(IllegalStateException.class, () -> JaxbParsers.parsePropertyOrPositiveInteger("${" + property + "}")); + } + + @Test + public void parsePropertyOrNonNegativeInteger() { + } + + @Test + public void testParsePropertyOrNonNegativeIntegerFromNullString() { + assertThrows(NullPointerException.class, () -> JaxbParsers.parsePropertyOrNonNegativeInteger(null)); + } + + @Test + public void testParsePropertyOrNonNegativeIntegerValidWithoutProperty() { + assertThat(JaxbParsers.parsePropertyOrNonNegativeInteger("123"), is(BigInteger.valueOf(123))); + } + + @Test + public void testParsePropertyOrNonNegativeIntegerInvalidWithoutProperty() { + assertThrows(NumberFormatException.class, () -> JaxbParsers.parsePropertyOrNonNegativeInteger("foobar")); + } + + @Test + public void testParsePropertyOrNonNegativeIntegerOutOfRangeWithoutProperty() { + assertThrows(IllegalArgumentException.class, () -> JaxbParsers.parsePropertyOrNonNegativeInteger("-1")); + } + + @Test + public void testParsePropertyOrNonNegativeIntegerValidWithProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + System.setProperty(property, "123"); + try { + assertThat(JaxbParsers.parsePropertyOrNonNegativeInteger("${" + property + "}"), is(BigInteger.valueOf(123))); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testParsePropertyOrNonNegativeIntegerInvalidWithProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + System.setProperty(property, "barfoo"); + try { + assertThrows(NumberFormatException.class, () -> JaxbParsers.parsePropertyOrNonNegativeInteger("${" + property + "}")); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testParsePropertyOrNonNegativeIntegerOutOfRangeWithProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + System.setProperty(property, "-1"); + try { + assertThrows(IllegalArgumentException.class, () -> JaxbParsers.parsePropertyOrNonNegativeInteger("${" + property + "}")); + } finally { + System.clearProperty(property); + } + } + + @Test + public void testParsePropertyOrNonNegativeIntegerWithMissingProperty() { + String property = PROPERTY_PREFIX + testName.getMethodName(); + assertThrows(IllegalStateException.class, () -> JaxbParsers.parsePropertyOrNonNegativeInteger("${" + property + "}")); + } + + @Test + public void parseStringWithProperties() { + } + + @Test + public void testParseStringWithPropertiesFromNullString() { + assertThrows(NullPointerException.class, () -> JaxbParsers.parseStringWithProperties(null)); + } + + @Test + public void testParseStringWithPropertiesWithoutProperties() { + assertThat(JaxbParsers.parseStringWithProperties("foo${bar"), is("foo${bar")); + assertThat(JaxbParsers.parseStringWithProperties("foobar"), is("foobar")); + assertThat(JaxbParsers.parseStringWithProperties("foo}bar"), is("foo}bar")); + assertThat(JaxbParsers.parseStringWithProperties("foo$bar"), is("foo$bar")); + } + + @Test + public void testParseStringWithPropertiesWithProperties() { + String foo = PROPERTY_PREFIX + testName.getMethodName() + ":foo"; + String bar = PROPERTY_PREFIX + testName.getMethodName() + ":bar"; + System.setProperty(foo, "foo"); + System.setProperty(bar, "bar"); + try { + assertThat(JaxbParsers.parseStringWithProperties("start:${" + foo + "}:middle:${" + bar + "}:end"), is("start:foo:middle:bar:end")); + } finally { + System.clearProperty(foo); + System.clearProperty(bar); + } + } + + @Test + public void testParseStringWithPropertiesWithMissingProperty() { + String foo = PROPERTY_PREFIX + testName.getMethodName() + ":foo"; + String bar = PROPERTY_PREFIX + testName.getMethodName() + ":bar"; + assertThat(System.getProperty(bar), is(nullValue())); + System.setProperty(foo, "foo"); + try { + assertThrows(IllegalStateException.class, () -> JaxbParsers.parseStringWithProperties("start:${" + foo + "}:middle:${" + bar + "}:end")); + } finally { + System.clearProperty(foo); + } + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/NiResilience.java b/ehcache-xml/src/test/java/org/ehcache/xml/NiResilience.java new file mode 100644 index 0000000000..1235dae930 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/NiResilience.java @@ -0,0 +1,19 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.xml; + +public class NiResilience { +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/PropertySubstitutionTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/PropertySubstitutionTest.java new file mode 100644 index 0000000000..6993783696 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/PropertySubstitutionTest.java @@ -0,0 +1,119 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourceType; +import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration; +import org.ehcache.impl.config.loaderwriter.writebehind.DefaultWriteBehindConfiguration; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.hamcrest.CustomMatcher; +import org.hamcrest.CustomTypeSafeMatcher; +import org.junit.Test; + +import javax.xml.bind.UnmarshalException; +import java.net.URL; +import java.time.Duration; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThrows; +import static org.terracotta.utilities.test.matchers.Matchers.causedBy; + +public class PropertySubstitutionTest { + + @Test + public void testMissingProperties() { + final URL resource = PropertySubstitutionTest.class.getResource("/configs/ehcache-system-props.xml"); + XmlConfigurationException failure = assertThrows(XmlConfigurationException.class, () -> new XmlConfiguration(resource)); + assertThat(failure, causedBy(instanceOf(UnmarshalException.class))); + } + + @Test + public void testSubstitutions() { + Map neededProperties = new HashMap<>(); + neededProperties.put("ehcache.persistence.directory", "foobar"); + neededProperties.put("ehcache.thread-pools.min-size", "0"); + neededProperties.put("ehcache.thread-pools.max-size", "4"); + neededProperties.put("ehcache.expiry.ttl", "10"); + neededProperties.put("ehcache.expiry.tti", "20"); + neededProperties.put("ehcache.loader-writer.write-behind.size", "1000"); + neededProperties.put("ehcache.loader-writer.write-behind.concurrency", "4"); + neededProperties.put("ehcache.loader-writer.write-behind.batching.batch-size", "100"); + neededProperties.put("ehcache.loader-writer.write-behind.batching.max-write-delay", "10"); + neededProperties.put("ehcache.disk-store-settings.writer-concurrency", "8"); + neededProperties.put("ehcache.disk-store-settings.disk-segments", "16"); + neededProperties.put("ehcache.resources.heap", "1024"); + neededProperties.put("ehcache.resources.offheap", "2048"); + neededProperties.put("ehcache.resources.disk", "4096"); + + System.getProperties().putAll(neededProperties); + try { + final URL resource = PropertySubstitutionTest.class.getResource("/configs/ehcache-system-props.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + + Collection> serviceCreationConfigurations = xmlConfig.getServiceCreationConfigurations(); + + assertThat(findSingletonAmongst(DefaultPersistenceConfiguration.class, serviceCreationConfigurations).getRootDirectory().getAbsolutePath(), either( + is("/dir/path/foobar/tail")).or(new CustomTypeSafeMatcher("matches pattern [A-Z]:\\dir\\path\\foobar\\tail") { + @Override + protected boolean matchesSafely(String item) { + return item.matches("[A-Z]:\\\\dir\\\\path\\\\foobar\\\\tail"); + } + })); + PooledExecutionServiceConfiguration.PoolConfiguration poolConfiguration = findSingletonAmongst(PooledExecutionServiceConfiguration.class, serviceCreationConfigurations).getPoolConfigurations().get("theone"); + assertThat(poolConfiguration.minSize(), is(0)); + assertThat(poolConfiguration.maxSize(), is(4)); + + CacheConfiguration testCacheConfig = xmlConfig.getCacheConfigurations().get("test"); + assertThat(testCacheConfig.getExpiryPolicy().getExpiryForCreation(null, null), is(Duration.ofHours(10))); + assertThat(testCacheConfig.getExpiryPolicy().getExpiryForAccess(null, null), is(nullValue())); + assertThat(testCacheConfig.getExpiryPolicy().getExpiryForUpdate(null, null, null), is(Duration.ofHours(10))); + + DefaultWriteBehindConfiguration writeBehindConfiguration = findSingletonAmongst(DefaultWriteBehindConfiguration.class, testCacheConfig.getServiceConfigurations()); + assertThat(writeBehindConfiguration.getConcurrency(), is(4)); + assertThat(writeBehindConfiguration.getMaxQueueSize(), is(1000)); + assertThat(writeBehindConfiguration.getBatchingConfiguration().getBatchSize(), is(100)); + assertThat(writeBehindConfiguration.getBatchingConfiguration().getMaxDelay(), is(10L)); + + OffHeapDiskStoreConfiguration diskStoreConfiguration = findSingletonAmongst(OffHeapDiskStoreConfiguration.class, testCacheConfig.getServiceConfigurations()); + assertThat(diskStoreConfiguration.getDiskSegments(), is(16)); + assertThat(diskStoreConfiguration.getWriterConcurrency(), is(8)); + + assertThat(testCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), is(1024L)); + assertThat(testCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getSize(), is(2048L)); + assertThat(testCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK).getSize(), is(4096L)); + + CacheConfiguration anotherTestCacheConfig = xmlConfig.getCacheConfigurations().get("another-test"); + assertThat(anotherTestCacheConfig.getExpiryPolicy().getExpiryForCreation(null, null), is(Duration.ofMillis(20))); + assertThat(anotherTestCacheConfig.getExpiryPolicy().getExpiryForAccess(null, null), is(Duration.ofMillis(20))); + assertThat(anotherTestCacheConfig.getExpiryPolicy().getExpiryForUpdate(null, null, null), is(Duration.ofMillis(20))); + } finally { + neededProperties.keySet().forEach(System::clearProperty); + } + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/ShrubberyResilience.java b/ehcache-xml/src/test/java/org/ehcache/xml/ShrubberyResilience.java new file mode 100644 index 0000000000..dd7cf9776a --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/ShrubberyResilience.java @@ -0,0 +1,19 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.xml; + +public class ShrubberyResilience { +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java new file mode 100644 index 0000000000..4b50e7a2ff --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java @@ -0,0 +1,871 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.config.ResourceType; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.util.ClassLoading; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.config.copy.DefaultCopierConfiguration; +import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; +import org.ehcache.impl.config.event.DefaultCacheEventListenerConfiguration; +import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration; +import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration.PoolConfiguration; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyConfiguration; +import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; +import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; +import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; +import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; +import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; +import org.ehcache.impl.copy.SerializingCopier; +import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; +import org.ehcache.spi.loaderwriter.WriteBehindConfiguration.BatchingConfiguration; +import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.hamcrest.CoreMatchers; +import org.hamcrest.Matchers; +import org.hamcrest.core.IsCollectionContaining; +import org.hamcrest.core.IsEqual; +import org.hamcrest.core.IsNull; +import org.junit.Test; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.NodeList; +import org.xml.sax.SAXParseException; + +import com.pany.ehcache.copier.AnotherPersonCopier; +import com.pany.ehcache.copier.Description; +import com.pany.ehcache.copier.DescriptionCopier; +import com.pany.ehcache.copier.Person; +import com.pany.ehcache.copier.PersonCopier; +import com.pany.ehcache.serializer.TestSerializer; +import com.pany.ehcache.serializer.TestSerializer2; +import com.pany.ehcache.serializer.TestSerializer3; +import com.pany.ehcache.serializer.TestSerializer4; + +import java.io.File; +import java.net.URL; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; + +import static java.util.Collections.nCopies; +import static java.util.concurrent.TimeUnit.SECONDS; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; +import static org.ehcache.core.util.ClassLoading.getDefaultClassLoader; +import static org.ehcache.xml.XmlConfiguration.getClassForName; +import static org.ehcache.xml.XmlConfigurationMatchers.isSameConfigurationAs; +import static org.hamcrest.CoreMatchers.either; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.isIn; +import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.hamcrest.collection.IsIterableContainingInOrder.contains; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsCollectionContaining.hasItem; +import static org.hamcrest.core.IsInstanceOf.instanceOf; +import static org.hamcrest.core.IsSame.sameInstance; +import static org.hamcrest.core.StringContains.containsString; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.mock; + +/** + * + * @author Chris Dennis + */ +public class XmlConfigurationTest { + + @Test + public void testDefaultTypesConfig() throws Exception { + URL resource = XmlConfigurationTest.class.getResource("/configs/defaultTypes-cache.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(resource); + + assertThat(xmlConfig.getCacheConfigurations().keySet(), hasItem("foo")); + assertThat(xmlConfig.getCacheConfigurations().get("foo").getKeyType(), sameInstance((Class)Object.class)); + assertThat(xmlConfig.getCacheConfigurations().get("foo").getValueType(), sameInstance((Class)Object.class)); + + assertThat(xmlConfig.getCacheConfigurations().keySet(), hasItem("bar")); + assertThat(xmlConfig.getCacheConfigurations().get("bar").getKeyType(), sameInstance((Class)Number.class)); + assertThat(xmlConfig.getCacheConfigurations().get("bar").getValueType(), sameInstance((Class)Object.class)); + + assertThat(xmlConfig.newCacheConfigurationBuilderFromTemplate("example", Object.class, Object.class, heap(10)), notNullValue()); + + //Allow the key/value to be assignable for xml configuration in case of type definition in template class + assertThat(xmlConfig.newCacheConfigurationBuilderFromTemplate("example", Number.class, Object.class, heap(10)), notNullValue()); + } + + @Test + public void testNonExistentAdvisorClassInCacheThrowsException() throws Exception { + try { + new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/nonExistentAdvisor-cache.xml")); + fail(); + } catch (XmlConfigurationException xce) { + assertThat(xce.getCause(), instanceOf(ClassNotFoundException.class)); + } + } + + @Test + public void testNonExistentAdvisorClassInTemplateThrowsException() throws Exception { + try { + new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/nonExistentAdvisor-template.xml")); + fail(); + } catch (XmlConfigurationException xce) { + assertThat(xce.getCause(), instanceOf(ClassNotFoundException.class)); + } + } + + @Test + public void testOneServiceConfig() throws Exception { + URL resource = XmlConfigurationTest.class.getResource("/configs/one-service.xml"); + Configuration config = new XmlConfiguration(new XmlConfiguration(resource)); + + assertThat(config.getServiceCreationConfigurations(), IsCollectionContaining.hasItem(instanceOf(BarConfiguration.class))); + assertThat(config.getCacheConfigurations().keySet(), hasSize(0)); + } + + @Test + public void testOneCacheConfig() throws Exception { + URL resource = XmlConfigurationTest.class.getResource("/configs/one-cache.xml"); + Configuration config = new XmlConfiguration(new XmlConfiguration(resource)); + + assertThat(config.getServiceCreationConfigurations(), hasSize(0)); + assertThat(config.getCacheConfigurations().keySet(), hasItem("bar")); + assertThat(config.getCacheConfigurations().get("bar").getServiceConfigurations(), IsCollectionContaining.hasItem(instanceOf(FooConfiguration.class))); + } + + @Test + public void testAllExtensions() { + URL resource = XmlConfigurationTest.class.getResource("/configs/all-extensions.xml"); + Configuration config = new XmlConfiguration(new XmlConfiguration(resource)); + assertThat(config.getServiceCreationConfigurations(), IsCollectionContaining.hasItem(instanceOf(BarConfiguration.class))); + CacheConfiguration cacheConfiguration = config.getCacheConfigurations().get("fancy"); + assertThat(cacheConfiguration.getServiceConfigurations(), hasItem(instanceOf(FooConfiguration.class))); + assertThat(cacheConfiguration.getResourcePools().getResourceTypeSet(), hasItem(instanceOf(BazResource.Type.class))); + } + + @Test + public void testOneCacheConfigWithTemplate() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/template-cache.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(resource); + + assertThat(xmlConfig.getServiceCreationConfigurations(), hasSize(0)); + assertThat(xmlConfig.getCacheConfigurations().keySet(), hasItem("bar")); + assertThat(xmlConfig.getCacheConfigurations().get("bar").getServiceConfigurations(), IsCollectionContaining.hasItem(instanceOf(FooConfiguration.class))); + assertThat(xmlConfig.getCacheConfigurations().get("bar").getKeyType(), sameInstance((Class) Number.class)); + assertThat(xmlConfig.getCacheConfigurations().get("bar").getValueType(), sameInstance((Class)String.class)); + + final CacheConfigurationBuilder example = xmlConfig.newCacheConfigurationBuilderFromTemplate("example", String.class, String.class, + newResourcePoolsBuilder().heap(5, EntryUnit.ENTRIES)); + assertThat(example.build().getExpiryPolicy(), + equalTo((ExpiryPolicy) ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(30)))); + + try { + xmlConfig.newCacheConfigurationBuilderFromTemplate("example", String.class, Number.class); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), is("CacheTemplate 'example' declares value type of java.lang.String. Provided: class java.lang.Number")); + } + try { + xmlConfig.newCacheConfigurationBuilderFromTemplate("example", Number.class, String.class); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), is("CacheTemplate 'example' declares key type of java.lang.String. Provided: class java.lang.Number")); + } + + assertThat(xmlConfig.newCacheConfigurationBuilderFromTemplate("bar", Object.class, Object.class), nullValue()); + } + + @Test + public void testExpiryIsParsed() throws Exception { + URL resource = XmlConfigurationTest.class.getResource("/configs/expiry-caches.xml"); + final XmlConfiguration xmlConfiguration = new XmlConfiguration(resource); + + ExpiryPolicy expiry = xmlConfiguration.getCacheConfigurations().get("none").getExpiryPolicy(); + ExpiryPolicy value = ExpiryPolicyBuilder.noExpiration(); + assertThat(expiry, is(value)); + + expiry = xmlConfiguration.getCacheConfigurations().get("notSet").getExpiryPolicy(); + value = ExpiryPolicyBuilder.noExpiration(); + assertThat(expiry, is(value)); + + expiry = xmlConfiguration.getCacheConfigurations().get("class").getExpiryPolicy(); + assertThat(expiry, CoreMatchers.instanceOf(com.pany.ehcache.MyExpiry.class)); + + expiry = xmlConfiguration.getCacheConfigurations().get("deprecatedClass").getExpiryPolicy(); + assertThat(expiry.getExpiryForCreation(null, null), is(Duration.ofSeconds(42))); + assertThat(expiry.getExpiryForAccess(null, () -> null), is(Duration.ofSeconds(42))); + assertThat(expiry.getExpiryForUpdate(null, () -> null, null), is(Duration.ofSeconds(42))); + + expiry = xmlConfiguration.getCacheConfigurations().get("tti").getExpiryPolicy(); + value = ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(500)); + assertThat(expiry, equalTo(value)); + + expiry = xmlConfiguration.getCacheConfigurations().get("ttl").getExpiryPolicy(); + value = ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(30)); + assertThat(expiry, equalTo(value)); + } + + @Test + public void testInvalidCoreConfiguration() throws Exception { + try { + new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/invalid-core.xml")); + fail(); + } catch (XmlConfigurationException e) { + assertThat(e.getCause().getMessage(), + either(containsString("'ehcache:cach'")) + .or(containsString("'{\"http://www.ehcache.org/v3\":cach}'")) + .or(containsString(""))); + } + } + + @Test + public void testInvalidServiceConfiguration() throws Exception { + try { + new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/invalid-service.xml")); + fail(); + } catch (XmlConfigurationException e) { + assertThat(e.getCause().getMessage(), + either(containsString("'foo:bar'")) + .or(containsString("'{\"http://www.example.com/foo\":bar}'")) + .or(containsString(""))); + } + } + + @Test + public void testTwoCachesWithSameAlias() { + try { + new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/invalid-two-caches.xml")); + fail("Two caches with the same alias should not be allowed"); + } catch (XmlConfigurationException e) { + assertThat(e.getMessage(), is("Two caches defined with the same alias: foo")); + } + } + + @Test + public void testExposesProperURL() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/one-cache.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(resource); + + assertThat(xmlConfig.getURL(), equalTo(resource)); + } + + @Test + public void testResourcesCaches() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/resources-caches.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + + CacheConfiguration tieredCacheConfig = xmlConfig.getCacheConfigurations().get("tiered"); + assertThat(tieredCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(10L)); + assertThat(tieredCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK).getSize(), equalTo(100L)); + assertThat(tieredCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(), is(false)); + + CacheConfiguration tieredPersistentCacheConfig = xmlConfig.getCacheConfigurations().get("tieredPersistent"); + assertThat(tieredPersistentCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(10L)); + assertThat(tieredPersistentCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK).getSize(), equalTo(100L)); + assertThat(tieredPersistentCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(), is(true)); + + CacheConfiguration tieredOffHeapCacheConfig = xmlConfig.getCacheConfigurations().get("tieredOffHeap"); + assertThat(tieredOffHeapCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(10L)); + assertThat(tieredOffHeapCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getSize(), equalTo(10L)); + assertThat(tieredOffHeapCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getUnit(), equalTo(MemoryUnit.MB)); + + CacheConfiguration explicitHeapOnlyCacheConfig = xmlConfig.getCacheConfigurations().get("explicitHeapOnly"); + assertThat(explicitHeapOnlyCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(15L)); + assertThat(explicitHeapOnlyCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK), is(nullValue())); + + CacheConfiguration implicitHeapOnlyCacheConfig = xmlConfig.getCacheConfigurations().get("directHeapOnly"); + assertThat(implicitHeapOnlyCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(25L)); + assertThat(implicitHeapOnlyCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK), is(nullValue())); + } + + @Test + public void testResourcesTemplates() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/resources-templates.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(resource); + + CacheConfigurationBuilder tieredResourceTemplate = xmlConfig.newCacheConfigurationBuilderFromTemplate("tieredResourceTemplate", String.class, String.class); + assertThat(tieredResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(5L)); + assertThat(tieredResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.DISK).getSize(), equalTo(50L)); + assertThat(tieredResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(), is(false)); + + CacheConfigurationBuilder persistentTieredResourceTemplate = xmlConfig.newCacheConfigurationBuilderFromTemplate("persistentTieredResourceTemplate", String.class, String.class); + assertThat(persistentTieredResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(5L)); + assertThat(persistentTieredResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.DISK).getSize(), equalTo(50L)); + assertThat(persistentTieredResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(), is(true)); + + CacheConfigurationBuilder tieredOffHeapResourceTemplate = xmlConfig.newCacheConfigurationBuilderFromTemplate("tieredOffHeapResourceTemplate", String.class, String.class); + assertThat(tieredOffHeapResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(5L)); + assertThat(tieredOffHeapResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getSize(), equalTo(50L)); + assertThat(tieredOffHeapResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getUnit(), equalTo(MemoryUnit.MB)); + + CacheConfigurationBuilder explicitHeapResourceTemplate = xmlConfig.newCacheConfigurationBuilderFromTemplate("explicitHeapResourceTemplate", String.class, String.class); + assertThat(explicitHeapResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(15L)); + assertThat(explicitHeapResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.DISK), is(nullValue())); + + CacheConfiguration tieredCacheConfig = xmlConfig.getCacheConfigurations().get("templatedTieredResource"); + assertThat(tieredCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(5L)); + assertThat(tieredCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK).getSize(), equalTo(50L)); + + CacheConfiguration explicitHeapOnlyCacheConfig = xmlConfig.getCacheConfigurations().get("templatedExplicitHeapResource"); + assertThat(explicitHeapOnlyCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(15L)); + assertThat(explicitHeapOnlyCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK), is(nullValue())); + } + + @Test + public void testNoClassLoaderSpecified() throws Exception { + URL resource = XmlConfigurationTest.class.getResource("/configs/one-cache.xml"); + XmlConfiguration config = new XmlConfiguration(new XmlConfiguration(resource)); + + assertSame(config.getClassLoader(), ClassLoading.getDefaultClassLoader()); + assertNull(config.getCacheConfigurations().get("bar").getClassLoader()); + } + + @Test + public void testClassLoaderSpecified() throws Exception { + ClassLoader cl = new ClassLoader() { + // + }; + + URL resource = XmlConfigurationTest.class.getResource("/configs/one-cache.xml"); + XmlConfiguration config= new XmlConfiguration(new XmlConfiguration(resource, cl)); + + assertSame(cl, config.getClassLoader()); + assertNull(config.getCacheConfigurations().get("bar").getClassLoader()); + } + + @Test + public void testCacheClassLoaderSpecified() throws Exception { + ClassLoader cl = new ClassLoader() { + // + }; + + ClassLoader cl2 = new ClassLoader() { + // + }; + + assertNotSame(cl, cl2); + + Map loaders = new HashMap<>(); + loaders.put("bar", cl2); + URL resource = XmlConfigurationTest.class.getResource("/configs/one-cache.xml"); + XmlConfiguration config = new XmlConfiguration(new XmlConfiguration(resource, cl, loaders)); + + assertSame(cl, config.getClassLoader()); + assertSame(cl2, config.getCacheConfigurations().get("bar").getClassLoader()); + } + + @Test + public void testDefaultSerializerConfiguration() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/default-serializer.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + + assertThat(xmlConfig.getServiceCreationConfigurations().size(), is(1)); + + ServiceCreationConfiguration configuration = xmlConfig.getServiceCreationConfigurations().iterator().next(); + + assertThat(configuration, instanceOf(DefaultSerializationProviderConfiguration.class)); + + DefaultSerializationProviderConfiguration factoryConfiguration = (DefaultSerializationProviderConfiguration) configuration; + assertThat(factoryConfiguration.getDefaultSerializers().size(), is(4)); + assertThat(factoryConfiguration.getDefaultSerializers().get(CharSequence.class), Matchers.equalTo(TestSerializer.class)); + assertThat(factoryConfiguration.getDefaultSerializers().get(Number.class), Matchers.equalTo(TestSerializer2.class)); + assertThat(factoryConfiguration.getDefaultSerializers().get(Long.class), Matchers.equalTo(TestSerializer3.class)); + assertThat(factoryConfiguration.getDefaultSerializers().get(Integer.class), Matchers.equalTo(TestSerializer4.class)); + + + List> orderedServiceConfigurations = new ArrayList<>(xmlConfig.getCacheConfigurations() + .get("baz") + .getServiceConfigurations()); + // order services by class name so the test can rely on some sort of ordering + orderedServiceConfigurations.sort(Comparator.comparing(o -> o.getClass().getName())); + Iterator> it = orderedServiceConfigurations.iterator(); + + DefaultSerializerConfiguration keySerializationProviderConfiguration = (DefaultSerializerConfiguration) it.next(); + assertThat(keySerializationProviderConfiguration.getType(), isIn(new DefaultSerializerConfiguration.Type[] { DefaultSerializerConfiguration.Type.KEY, DefaultSerializerConfiguration.Type.VALUE })); + } + + @Test + public void testThreadPoolsConfiguration() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/thread-pools.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + + assertThat(xmlConfig.getServiceCreationConfigurations(), contains(instanceOf(PooledExecutionServiceConfiguration.class))); + + PooledExecutionServiceConfiguration configuration = (PooledExecutionServiceConfiguration) xmlConfig.getServiceCreationConfigurations().iterator().next(); + + assertThat(configuration.getPoolConfigurations().keySet(), containsInAnyOrder("big", "small")); + + PoolConfiguration small = configuration.getPoolConfigurations().get("small"); + assertThat(small.minSize(), is(1)); + assertThat(small.maxSize(), is(1)); + + PoolConfiguration big = configuration.getPoolConfigurations().get("big"); + assertThat(big.minSize(), is(4)); + assertThat(big.maxSize(), is(32)); + + assertThat(configuration.getDefaultPoolAlias(), is("big")); + } + + @Test + public void testCacheCopierConfiguration() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/cache-copiers.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + + assertThat(xmlConfig.getServiceCreationConfigurations().size(), is(1)); + + ServiceCreationConfiguration configuration = xmlConfig.getServiceCreationConfigurations().iterator().next(); + + assertThat(configuration, instanceOf(DefaultCopyProviderConfiguration.class)); + + DefaultCopyProviderConfiguration factoryConfiguration = (DefaultCopyProviderConfiguration) configuration; + assertThat(factoryConfiguration.getDefaults().size(), is(2)); + assertThat(factoryConfiguration.getDefaults().get(Description.class).getClazz(), + Matchers.>>equalTo(DescriptionCopier.class)); + assertThat(factoryConfiguration.getDefaults().get(Person.class).getClazz(), + Matchers.>>equalTo(PersonCopier.class)); + + + Collection> configs = xmlConfig.getCacheConfigurations().get("baz").getServiceConfigurations(); + for(ServiceConfiguration config: configs) { + if(config instanceof DefaultCopierConfiguration) { + DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration) config; + if(copierConfig.getType() == DefaultCopierConfiguration.Type.KEY) { + assertEquals(SerializingCopier.class, copierConfig.getClazz()); + } else { + assertEquals(AnotherPersonCopier.class, copierConfig.getClazz()); + } + } + } + + configs = xmlConfig.getCacheConfigurations().get("bak").getServiceConfigurations(); + for(ServiceConfiguration config: configs) { + if(config instanceof DefaultCopierConfiguration) { + DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration) config; + if(copierConfig.getType() == DefaultCopierConfiguration.Type.KEY) { + assertEquals(SerializingCopier.class, copierConfig.getClazz()); + } else { + assertEquals(AnotherPersonCopier.class, copierConfig.getClazz()); + } + } + } + } + + @Test + public void testPersistenceConfig() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/persistence-config.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + + ServiceCreationConfiguration serviceConfig = xmlConfig.getServiceCreationConfigurations().iterator().next(); + assertThat(serviceConfig, instanceOf(DefaultPersistenceConfiguration.class)); + + DefaultPersistenceConfiguration persistenceConfiguration = (DefaultPersistenceConfiguration)serviceConfig; + assertThat(persistenceConfiguration.getRootDirectory(), is(new File(" \n\t/my/caching/persistence directory\r\n "))); + } + + @Test + public void testPersistenceConfigXmlPersistencePathHasWhitespaces() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/persistence-config.xml"); + DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); + DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); + Document doc = dBuilder.parse(new File(resource.toURI())); + + Element persistence = (Element) doc.getElementsByTagName("ehcache:persistence").item(0); + String directoryValue = persistence.getAttribute("directory"); + assertThat(directoryValue, containsString(" ")); + assertThat(directoryValue, containsString("\r")); + assertThat(directoryValue, containsString("\n")); + assertThat(directoryValue, containsString("\t")); + } + + @Test + public void testWriteBehind() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/writebehind-cache.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(resource); + + Collection> serviceConfiguration = xmlConfig.getCacheConfigurations().get("bar").getServiceConfigurations(); + + assertThat(serviceConfiguration, IsCollectionContaining.hasItem(instanceOf(WriteBehindConfiguration.class))); + + serviceConfiguration = xmlConfig.newCacheConfigurationBuilderFromTemplate("example", Number.class, String.class).build().getServiceConfigurations(); + + assertThat(serviceConfiguration, IsCollectionContaining.hasItem(instanceOf(WriteBehindConfiguration.class))); + + for (ServiceConfiguration configuration : serviceConfiguration) { + if(configuration instanceof WriteBehindConfiguration) { + BatchingConfiguration batchingConfig = ((WriteBehindConfiguration) configuration).getBatchingConfiguration(); + assertThat(batchingConfig.getMaxDelay(), is(10L)); + assertThat(batchingConfig.getMaxDelayUnit(), is(SECONDS)); + assertThat(batchingConfig.isCoalescing(), is(false)); + assertThat(batchingConfig.getBatchSize(), is(2)); + assertThat(((WriteBehindConfiguration) configuration).getConcurrency(), is(1)); + assertThat(((WriteBehindConfiguration) configuration).getMaxQueueSize(), is(10)); + break; + } + } + } + + @Test + public void testCacheEventListener() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/ehcache-cacheEventListener.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + assertThat(xmlConfig.getCacheConfigurations().size(), is(2)); + + Collection configuration = xmlConfig.getCacheConfigurations().get("bar").getServiceConfigurations(); + checkListenerConfigurationExists(configuration); + } + + @Test + public void testCacheEventListenerThroughTemplate() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/ehcache-cacheEventListener.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(resource); + CacheConfiguration cacheConfig = xmlConfig.getCacheConfigurations().get("template1"); + checkListenerConfigurationExists(cacheConfig.getServiceConfigurations()); + + CacheConfigurationBuilder templateConfig = xmlConfig.newCacheConfigurationBuilderFromTemplate("example", Number.class, String.class); + assertThat(templateConfig.getService(DefaultCacheEventListenerConfiguration.class), notNullValue()); + } + + @Test + public void testDefaulSerializerXmlsSerializersValueHasWhitespaces() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/default-serializer.xml"); + DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); + DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); + Document doc = dBuilder.parse(new File(resource.toURI())); + + NodeList nList = doc.getElementsByTagName("ehcache:serializer"); + + assertThat(nList.item(2).getFirstChild().getNodeValue(), containsString(" ")); + assertThat(nList.item(2).getFirstChild().getNodeValue(), containsString("\n")); + + assertThat(nList.item(3).getFirstChild().getNodeValue(), containsString(" ")); + assertThat(nList.item(3).getFirstChild().getNodeValue(), containsString("\n")); + + + nList = doc.getElementsByTagName("ehcache:key-type"); + + assertThat(nList.item(0).getFirstChild().getNodeValue(), containsString(" ")); + assertThat(nList.item(0).getFirstChild().getNodeValue(), containsString("\n")); + + assertThat(nList.item(1).getFirstChild().getNodeValue(), containsString(" ")); + assertThat(nList.item(1).getFirstChild().getNodeValue(), containsString("\n")); + + nList = doc.getElementsByTagName("ehcache:value-type"); + assertThat(nList.item(0).getFirstChild().getNodeValue(), containsString(" ")); + assertThat(nList.item(0).getFirstChild().getNodeValue(), containsString("\n")); + + assertThat(nList.item(1).getFirstChild().getNodeValue(), containsString(" ")); + assertThat(nList.item(1).getFirstChild().getNodeValue(), containsString("\n")); + + + } + + @Test + public void testDiskStoreSettings() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/resources-caches.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + + CacheConfiguration cacheConfig = xmlConfig.getCacheConfigurations().get("tiered"); + + OffHeapDiskStoreConfiguration diskConfig = findSingletonAmongst(OffHeapDiskStoreConfiguration.class, cacheConfig.getServiceConfigurations()); + + assertThat(diskConfig.getThreadPoolAlias(), is("some-pool")); + assertThat(diskConfig.getWriterConcurrency(), is(2)); + assertThat(diskConfig.getDiskSegments(), is(4)); + } + + @Test + public void testNullUrlInConstructorThrowsNPE() throws Exception { + NullPointerException thrown = assertThrows(NullPointerException.class, () -> new XmlConfiguration((URL) null, mock(ClassLoader.class), getClassLoaderMapMock())); + assertThat(thrown, hasProperty("message", Matchers.is("The url can not be null"))); + } + + @Test + public void testNullClassLoaderInConstructorThrowsNPE() throws Exception { + NullPointerException thrown = assertThrows(NullPointerException.class, () -> new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/one-cache.xml"), null, getClassLoaderMapMock())); + assertThat(thrown, hasProperty("message", Matchers.is("The classLoader can not be null"))); + } + + @Test + public void testNullCacheClassLoaderMapInConstructorThrowsNPE() throws Exception { + NullPointerException thrown = assertThrows(NullPointerException.class, () -> new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/one-cache.xml"), mock(ClassLoader.class), null)); + assertThat(thrown, hasProperty("message", Matchers.is("The cacheClassLoaders map can not be null"))); + } + + @Test + public void testSizeOfEngineLimits() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/sizeof-engine.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + DefaultSizeOfEngineProviderConfiguration sizeOfEngineProviderConfig = findSingletonAmongst(DefaultSizeOfEngineProviderConfiguration.class, xmlConfig.getServiceCreationConfigurations()); + + assertThat(sizeOfEngineProviderConfig, notNullValue()); + assertEquals(sizeOfEngineProviderConfig.getMaxObjectGraphSize(), 200); + assertEquals(sizeOfEngineProviderConfig.getMaxObjectSize(), 100000); + + CacheConfiguration cacheConfig = xmlConfig.getCacheConfigurations().get("usesDefaultSizeOfEngine"); + DefaultSizeOfEngineConfiguration sizeOfEngineConfig = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfig.getServiceConfigurations()); + + assertThat(sizeOfEngineConfig, nullValue()); + + CacheConfiguration cacheConfig1 = xmlConfig.getCacheConfigurations().get("usesConfiguredInCache"); + DefaultSizeOfEngineConfiguration sizeOfEngineConfig1 = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfig1.getServiceConfigurations()); + + assertThat(sizeOfEngineConfig1, notNullValue()); + assertEquals(sizeOfEngineConfig1.getMaxObjectGraphSize(), 500); + assertEquals(sizeOfEngineConfig1.getMaxObjectSize(), 200000); + + CacheConfiguration cacheConfig2 = xmlConfig.getCacheConfigurations().get("usesPartialOneConfiguredInCache"); + DefaultSizeOfEngineConfiguration sizeOfEngineConfig2 = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfig2.getServiceConfigurations()); + + assertThat(sizeOfEngineConfig2, notNullValue()); + assertThat(sizeOfEngineConfig2.getMaxObjectGraphSize(), is(500L)); + assertThat(sizeOfEngineConfig2.getMaxObjectSize(), is(Long.MAX_VALUE)); + + CacheConfiguration cacheConfig3 = xmlConfig.getCacheConfigurations().get("usesPartialTwoConfiguredInCache"); + DefaultSizeOfEngineConfiguration sizeOfEngineConfig3 = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfig3.getServiceConfigurations()); + + assertThat(sizeOfEngineConfig3, notNullValue()); + assertThat(sizeOfEngineConfig3.getMaxObjectGraphSize(), is(1000L)); + assertThat(sizeOfEngineConfig3.getMaxObjectSize(), is(200000L)); + } + + @Test + public void testCacheManagerDefaultObjectGraphSize() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/sizeof-engine-cm-defaults-one.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + DefaultSizeOfEngineProviderConfiguration sizeOfEngineProviderConfig = findSingletonAmongst(DefaultSizeOfEngineProviderConfiguration.class, xmlConfig.getServiceCreationConfigurations()); + + assertThat(sizeOfEngineProviderConfig, notNullValue()); + assertThat(sizeOfEngineProviderConfig.getMaxObjectGraphSize(), is(1000L)); + assertThat(sizeOfEngineProviderConfig.getMaxObjectSize(), is(100000L)); + } + + @Test + public void testCacheManagerDefaultObjectSize() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/sizeof-engine-cm-defaults-two.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + DefaultSizeOfEngineProviderConfiguration sizeOfEngineProviderConfig = findSingletonAmongst(DefaultSizeOfEngineProviderConfiguration.class, xmlConfig.getServiceCreationConfigurations()); + + assertThat(sizeOfEngineProviderConfig, notNullValue()); + assertThat(sizeOfEngineProviderConfig.getMaxObjectGraphSize(), is(200L)); + assertThat(sizeOfEngineProviderConfig.getMaxObjectSize(), is(Long.MAX_VALUE)); + } + + @Test + public void testCustomResource() throws Exception { + try { + new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/custom-resource.xml")); + fail(); + } catch (XmlConfigurationException xce) { + assertThat(xce.getMessage(), containsString("Can't find parser for element")); + } + } + + @Test + public void testResilienceStrategy() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/resilience-config.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + CacheConfiguration cacheConfig = xmlConfig.getCacheConfigurations().get("ni"); + + DefaultResilienceStrategyConfiguration resilienceStrategyConfiguration = findSingletonAmongst(DefaultResilienceStrategyConfiguration.class, cacheConfig.getServiceConfigurations()); + assertThat(resilienceStrategyConfiguration.getClazz(), sameInstance(NiResilience.class)); + } + + @Test + public void testResilienceStrategyFromTemplate() throws Exception { + final URL resource = XmlConfigurationTest.class.getResource("/configs/resilience-config.xml"); + XmlConfiguration xmlConfig = new XmlConfiguration(new XmlConfiguration(resource)); + CacheConfiguration cacheConfig = xmlConfig.getCacheConfigurations().get("shrubbery"); + + DefaultResilienceStrategyConfiguration resilienceStrategyConfiguration = findSingletonAmongst(DefaultResilienceStrategyConfiguration.class, cacheConfig.getServiceConfigurations()); + assertThat(resilienceStrategyConfiguration.getClazz(), sameInstance(ShrubberyResilience.class)); + } + + @Test + public void testMultithreadedXmlParsing() throws InterruptedException, ExecutionException { + Callable parserTask = () -> new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/one-cache.xml")); + + ExecutorService service = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); + try { + for (Future c : service.invokeAll(nCopies(10, parserTask))) { + assertThat(c.get(), IsNull.notNullValue()); + } + } finally { + service.shutdown(); + } + } + + @Test + public void testCompleteXmlToString() { + URL resource = XmlConfigurationTest.class.getResource("/configs/ehcache-complete.xml"); + Configuration config = new XmlConfiguration(resource); + XmlConfiguration xmlConfig = new XmlConfiguration(config); + assertThat(xmlConfig.toString(), isSameConfigurationAs(resource)); + } + + @Test + public void testPrettyTypeNames() { + URL resource = XmlConfigurationTest.class.getResource("/configs/pretty-typed-caches.xml"); + Configuration config = new XmlConfiguration(new XmlConfiguration(resource)); + + CacheConfiguration byteArray = config.getCacheConfigurations().get("byte-array"); + assertThat(byteArray.getValueType(), equalTo(byte[].class)); + + CacheConfiguration stringArray = config.getCacheConfigurations().get("string-array"); + assertThat(stringArray.getValueType(), equalTo(String[].class)); + + CacheConfiguration string2dArray = config.getCacheConfigurations().get("string-2d-array"); + assertThat(string2dArray.getValueType(), equalTo(String[][].class)); + + CacheConfiguration mapEntry = config.getCacheConfigurations().get("map-entry"); + assertThat(mapEntry.getValueType(), equalTo(Map.Entry.class)); + } + + @Test + public void testPrimitiveNameConversion() throws ClassNotFoundException { + assertThat(getClassForName("boolean", getDefaultClassLoader()), IsEqual.equalTo(Boolean.TYPE)); + assertThat(getClassForName("byte", getDefaultClassLoader()), IsEqual.equalTo(Byte.TYPE)); + assertThat(getClassForName("short", getDefaultClassLoader()), IsEqual.equalTo(Short.TYPE)); + assertThat(getClassForName("int", getDefaultClassLoader()), IsEqual.equalTo(Integer.TYPE)); + assertThat(getClassForName("long", getDefaultClassLoader()), IsEqual.equalTo(Long.TYPE)); + assertThat(getClassForName("char", getDefaultClassLoader()), IsEqual.equalTo(Character.TYPE)); + assertThat(getClassForName("float", getDefaultClassLoader()), IsEqual.equalTo(Float.TYPE)); + assertThat(getClassForName("double", getDefaultClassLoader()), IsEqual.equalTo(Double.TYPE)); + } + + @Test + public void testPrimitiveArrayClassNameConversion() throws ClassNotFoundException { + assertThat(getClassForName("boolean[]", getDefaultClassLoader()), IsEqual.equalTo(boolean[].class)); + assertThat(getClassForName("byte[]", getDefaultClassLoader()), IsEqual.equalTo(byte[].class)); + assertThat(getClassForName("short[]", getDefaultClassLoader()), IsEqual.equalTo(short[].class)); + assertThat(getClassForName("int[]", getDefaultClassLoader()), IsEqual.equalTo(int[].class)); + assertThat(getClassForName("long[]", getDefaultClassLoader()), IsEqual.equalTo(long[].class)); + assertThat(getClassForName("char[]", getDefaultClassLoader()), IsEqual.equalTo(char[].class)); + assertThat(getClassForName("float[]", getDefaultClassLoader()), IsEqual.equalTo(float[].class)); + assertThat(getClassForName("double[]", getDefaultClassLoader()), IsEqual.equalTo(double[].class)); + } + + @Test + public void testMultiDimensionPrimitiveArrayClassNameConversion() throws ClassNotFoundException { + assertThat(getClassForName("byte[][][][]", getDefaultClassLoader()), IsEqual.equalTo(byte[][][][].class)); + assertThat(getClassForName("short[][][][]", getDefaultClassLoader()), IsEqual.equalTo(short[][][][].class)); + assertThat(getClassForName("int[][][][]", getDefaultClassLoader()), IsEqual.equalTo(int[][][][].class)); + assertThat(getClassForName("long[][][][]", getDefaultClassLoader()), IsEqual.equalTo(long[][][][].class)); + assertThat(getClassForName("char[][][][]", getDefaultClassLoader()), IsEqual.equalTo(char[][][][].class)); + assertThat(getClassForName("float[][][][]", getDefaultClassLoader()), IsEqual.equalTo(float[][][][].class)); + assertThat(getClassForName("double[][][][]", getDefaultClassLoader()), IsEqual.equalTo(double[][][][].class)); + } + + @Test + public void testArrayClassNameConversion() throws ClassNotFoundException { + assertThat(getClassForName("java.lang.String[]", getDefaultClassLoader()), IsEqual.equalTo(String[].class)); + } + + @Test + public void testMultiDimensionArrayClassNameConversion() throws ClassNotFoundException { + assertThat(getClassForName("java.lang.String[][][][]", getDefaultClassLoader()), IsEqual.equalTo(String[][][][].class)); + } + + @Test + public void testInnerClassNameConversion() throws ClassNotFoundException { + assertThat(getClassForName("java.util.Map.Entry", getDefaultClassLoader()), IsEqual.equalTo(Map.Entry.class)); + } + + @Test + public void testInnerClassNameArrayConversion() throws ClassNotFoundException { + assertThat(getClassForName("java.util.Map.Entry[]", getDefaultClassLoader()), IsEqual.equalTo(Map.Entry[].class)); + } + + @Test + public void testUnknownServiceCreation() throws Exception { + URL resource = XmlConfigurationTest.class.getResource("/configs/unknown-service-creation.xml"); + try { + new XmlConfiguration(resource); + } catch (XmlConfigurationException e) { + assertThat(e.getMessage(), is("Cannot confirm XML sub-type correctness. You might be missing client side libraries.")); + assertThat(e.getCause(), instanceOf(SAXParseException.class)); + } + } + + @Test + public void testUnknownService() throws Exception { + URL resource = XmlConfigurationTest.class.getResource("/configs/unknown-service.xml"); + try { + new XmlConfiguration(resource); + } catch (XmlConfigurationException e) { + assertThat(e.getMessage(), is("Cannot confirm XML sub-type correctness. You might be missing client side libraries.")); + assertThat(e.getCause(), instanceOf(SAXParseException.class)); + } + } + + @Test + public void testUnknownResource() throws Exception { + URL resource = XmlConfigurationTest.class.getResource("/configs/unknown-resource.xml"); + try { + new XmlConfiguration(resource); + } catch (XmlConfigurationException e) { + assertThat(e.getMessage(), is("Cannot confirm XML sub-type correctness. You might be missing client side libraries.")); + assertThat(e.getCause(), instanceOf(SAXParseException.class)); + } + } + + private void checkListenerConfigurationExists(Collection configuration) { + int count = 0; + for (Object o : configuration) { + if(o instanceof DefaultCacheEventListenerConfiguration) { + count++; + } + } + assertThat(count, is(1)); + } + + @SuppressWarnings("unchecked") + private Map getClassLoaderMapMock() { + return (Map) mock(Map.class); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/multi/XmlMultiConfigurationTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/multi/XmlMultiConfigurationTest.java new file mode 100644 index 0000000000..d2e9f45be1 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/multi/XmlMultiConfigurationTest.java @@ -0,0 +1,407 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.xml.multi; + +import org.ehcache.config.Configuration; +import org.ehcache.config.builders.ConfigurationBuilder; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.XmlConfigurationMatchers; +import org.ehcache.xml.XmlConfigurationTest; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.junit.Test; +import org.xmlunit.diff.DefaultNodeMatcher; + +import java.net.URISyntaxException; +import java.net.URL; + +import static org.ehcache.xml.XmlConfigurationMatchers.isSameConfigurationAs; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.collection.IsEmptyCollection.empty; +import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; +import static org.hamcrest.collection.IsMapContaining.hasKey; +import static org.hamcrest.core.IsNull.notNullValue; +import static org.hamcrest.core.IsNull.nullValue; +import static org.hamcrest.core.IsSame.sameInstance; +import static org.junit.Assert.fail; +import static org.xmlunit.builder.Input.fromURI; +import static org.xmlunit.diff.ElementSelectors.and; +import static org.xmlunit.diff.ElementSelectors.byNameAndAllAttributes; +import static org.xmlunit.diff.ElementSelectors.byNameAndText; + +public class XmlMultiConfigurationTest { + + @Test + public void testEmptyConfigurationFromBuilder() { + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.fromNothing().build(); + + assertThat(xmlMultiConfiguration.configuration("foo"), nullValue()); + assertThat(xmlMultiConfiguration.configuration("foo", "prod"), nullValue()); + + assertThat(xmlMultiConfiguration.identities(), empty()); + assertThrows(() -> xmlMultiConfiguration.variants("foo"), IllegalArgumentException.class); + + assertThat(xmlMultiConfiguration.toString(), + isSameConfigurationAs("")); + } + + @Test + public void testSimpleConfigurationBuiltFromEmpty() { + Configuration config = emptyConfiguration(); + + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.fromNothing().withManager("foo", config).build(); + + assertThat(xmlMultiConfiguration.configuration("foo"), sameInstance(config)); + assertThat(xmlMultiConfiguration.configuration("foo", "prod"), sameInstance(config)); + + assertThat(xmlMultiConfiguration.identities(), containsInAnyOrder("foo")); + assertThat(xmlMultiConfiguration.variants("foo"), empty()); + + assertThat(xmlMultiConfiguration.toString(), + isSameConfigurationAs("" + + "" + + "")); + } + + @Test + public void testVariantConfigurationBuiltFromEmpty() { + Configuration barVariant = emptyConfiguration(); + Configuration bazVariant = emptyConfiguration(); + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.fromNothing() + .withManager("foo").variant("bar", barVariant).variant("baz", bazVariant).build(); + + assertThrows(() -> xmlMultiConfiguration.configuration("foo"), IllegalStateException.class); + + assertThat(xmlMultiConfiguration.configuration("foo", "bar"), sameInstance(barVariant)); + assertThat(xmlMultiConfiguration.configuration("foo", "baz"), sameInstance(bazVariant)); + + assertThat(xmlMultiConfiguration.identities(), containsInAnyOrder("foo")); + assertThat(xmlMultiConfiguration.variants("foo"), containsInAnyOrder("bar", "baz")); + + assertThat(xmlMultiConfiguration.toString(), isSameConfigurationAs("" + + "" + + "" + + "" + + "" + + "")); + } + + @Test + public void testMixedConfigurationBuiltFromEmpty() { + Configuration barVariant = emptyConfiguration(); + Configuration bazVariant = emptyConfiguration(); + Configuration fiiConfig = emptyConfiguration(); + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.fromNothing() + .withManager("foo").variant("bar", barVariant).variant("baz", bazVariant) + .withManager("fum").variant("bar", barVariant) + .withManager("fii", fiiConfig).build(); + + assertThrows(() -> xmlMultiConfiguration.configuration("foo"), IllegalStateException.class); + + assertThat(xmlMultiConfiguration.configuration("foo", "bar"), sameInstance(barVariant)); + assertThat(xmlMultiConfiguration.configuration("foo", "baz"), sameInstance(bazVariant)); + assertThat(xmlMultiConfiguration.configuration("fum", "bar"), sameInstance(barVariant)); + assertThat(xmlMultiConfiguration.configuration("fii", "bar"), sameInstance(fiiConfig)); + assertThat(xmlMultiConfiguration.configuration("fii", "baz"), sameInstance(fiiConfig)); + assertThat(xmlMultiConfiguration.configuration("fii"), sameInstance(fiiConfig)); + + assertThat(xmlMultiConfiguration.identities(), containsInAnyOrder("foo", "fii", "fum")); + assertThat(xmlMultiConfiguration.variants("foo"), containsInAnyOrder("bar", "baz")); + assertThat(xmlMultiConfiguration.variants("fum"), containsInAnyOrder("bar")); + assertThat(xmlMultiConfiguration.variants("fii"), empty()); + + assertThat(xmlMultiConfiguration.toString(), isSameConfigurationAs("" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "")); + } + + @Test + public void testEmptyConfigurationFromXml() throws URISyntaxException { + URL resource = XmlConfigurationTest.class.getResource("/configs/multi/empty.xml"); + + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.from(resource).build(); + + assertThat(xmlMultiConfiguration.configuration("foo"), nullValue()); + assertThat(xmlMultiConfiguration.configuration("foo", "prod"), nullValue()); + + assertThat(xmlMultiConfiguration.identities(), empty()); + assertThrows(() -> xmlMultiConfiguration.variants("foo"), IllegalArgumentException.class); + + assertThat(xmlMultiConfiguration.toString(), isSameConfigurationAs(fromURI(resource.toURI()))); + } + + @Test + public void testMultipleConfigurationsFromXml() throws URISyntaxException { + URL resource = XmlConfigurationTest.class.getResource("/configs/multi/multiple-configs.xml"); + + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.from(resource).build(); + + assertThat(xmlMultiConfiguration.configuration("foo").getCacheConfigurations(), hasKey("foo")); + assertThat(xmlMultiConfiguration.configuration("foo", "prod").getCacheConfigurations(), hasKey("foo")); + assertThat(xmlMultiConfiguration.configuration("bar").getCacheConfigurations(), hasKey("bar")); + assertThat(xmlMultiConfiguration.configuration("bar", "prod").getCacheConfigurations(), hasKey("bar")); + + assertThat(xmlMultiConfiguration.identities(), containsInAnyOrder("foo", "bar")); + assertThat(xmlMultiConfiguration.variants("foo"), empty()); + assertThat(xmlMultiConfiguration.variants("bar"), empty()); + + assertThat(xmlMultiConfiguration.toString(), isSameConfigurationAs(fromURI(resource.toURI()))); + } + + @Test + public void testMultipleVariantsFromXml() throws URISyntaxException { + URL resource = XmlConfigurationTest.class.getResource("/configs/multi/multiple-variants.xml"); + + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.from(resource).build(); + + assertThrows(() -> xmlMultiConfiguration.configuration("foo"), IllegalStateException.class); + + assertThat(xmlMultiConfiguration.configuration("foo", "development").getCacheConfigurations(), hasKey("foo-dev")); + assertThat(xmlMultiConfiguration.configuration("foo", "production").getCacheConfigurations(), hasKey("foo-prod")); + assertThat(xmlMultiConfiguration.configuration("bar", "development").getCacheConfigurations(), hasKey("bar-dev")); + assertThat(xmlMultiConfiguration.configuration("bar", "production").getCacheConfigurations(), hasKey("bar-prod")); + + assertThat(xmlMultiConfiguration.identities(), containsInAnyOrder("foo", "bar")); + assertThat(xmlMultiConfiguration.variants("foo"), containsInAnyOrder("development", "production")); + assertThat(xmlMultiConfiguration.variants("bar"), containsInAnyOrder("development", "production")); + + assertThat(xmlMultiConfiguration.toString(), isSameConfigurationAs(fromURI(resource.toURI()))); + } + + @Test + public void testManagerRemovedFromXml() throws URISyntaxException { + URL resource = XmlConfigurationTest.class.getResource("/configs/multi/multiple-configs.xml"); + + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.from(resource).withoutManager("bar").build(); + + assertThat(xmlMultiConfiguration.configuration("foo").getCacheConfigurations(), hasKey("foo")); + assertThat(xmlMultiConfiguration.configuration("foo", "prod").getCacheConfigurations(), hasKey("foo")); + assertThat(xmlMultiConfiguration.configuration("bar"), nullValue()); + assertThat(xmlMultiConfiguration.configuration("bar", "prod"), nullValue()); + + assertThat(xmlMultiConfiguration.identities(), containsInAnyOrder("foo")); + assertThat(xmlMultiConfiguration.variants("foo"), empty()); + + assertThat(xmlMultiConfiguration.toString(), isSameConfigurationAs("" + + "" + + "" + + "100" + + "" + + "" + + "")); + } + + @Test + public void testManagerRemovedFromXmlAndReadded() throws URISyntaxException { + URL resource = XmlConfigurationTest.class.getResource("/configs/multi/multiple-configs.xml"); + + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.from(resource).withoutManager("bar").withManager("bar", emptyConfiguration()).build(); + + assertThat(xmlMultiConfiguration.configuration("foo").getCacheConfigurations(), hasKey("foo")); + assertThat(xmlMultiConfiguration.configuration("foo", "prod").getCacheConfigurations(), hasKey("foo")); + assertThat(xmlMultiConfiguration.configuration("bar"), notNullValue()); + assertThat(xmlMultiConfiguration.configuration("bar", "prod"), notNullValue()); + + assertThat(xmlMultiConfiguration.identities(), containsInAnyOrder("foo", "bar")); + assertThat(xmlMultiConfiguration.variants("foo"), empty()); + assertThat(xmlMultiConfiguration.variants("bar"), empty()); + + assertThat(xmlMultiConfiguration.toString(), isSameConfigurationAs("" + + "" + + "" + + "" + + "" + + "" + + "100" + + "" + + "" + + "")); + } + + @Test + public void testManagerAddedToXml() throws URISyntaxException { + URL resource = XmlConfigurationTest.class.getResource("/configs/multi/multiple-configs.xml"); + + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.from(resource).withManager("baz", emptyConfiguration()).build(); + + assertThat(xmlMultiConfiguration.configuration("foo").getCacheConfigurations(), hasKey("foo")); + assertThat(xmlMultiConfiguration.configuration("foo", "prod").getCacheConfigurations(), hasKey("foo")); + assertThat(xmlMultiConfiguration.configuration("bar").getCacheConfigurations(), hasKey("bar")); + assertThat(xmlMultiConfiguration.configuration("bar", "prod").getCacheConfigurations(), hasKey("bar")); + assertThat(xmlMultiConfiguration.configuration("baz"), notNullValue()); + assertThat(xmlMultiConfiguration.configuration("baz", "prod"), notNullValue()); + + assertThat(xmlMultiConfiguration.identities(), containsInAnyOrder("foo", "bar", "baz")); + assertThat(xmlMultiConfiguration.variants("foo"), empty()); + assertThat(xmlMultiConfiguration.variants("bar"), empty()); + assertThat(xmlMultiConfiguration.variants("baz"), empty()); + + assertThat(xmlMultiConfiguration.toString(), isSameConfigurationAs("" + + "" + + "" + + "100" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "100" + + "" + + "" + + "")); + } + + @Test + public void testManagerRemovedFromConfig() throws URISyntaxException { + XmlMultiConfiguration source = XmlMultiConfiguration.fromNothing().withManager("foo", emptyConfiguration()).build(); + + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.from(source).withoutManager("foo").build(); + + assertThat(xmlMultiConfiguration.configuration("foo"), nullValue()); + assertThat(xmlMultiConfiguration.configuration("foo", "prod"), nullValue()); + + assertThat(xmlMultiConfiguration.identities(), empty()); + assertThrows(() -> xmlMultiConfiguration.variants("foo"), IllegalArgumentException.class); + + assertThat(xmlMultiConfiguration.toString(), + isSameConfigurationAs("")); + } + + @Test + public void testManagerRemovedFromConfigAndReadded() throws URISyntaxException { + XmlMultiConfiguration source = XmlMultiConfiguration.fromNothing().withManager("foo", emptyConfiguration()).build(); + + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.from(source).withoutManager("foo").withManager("foo", emptyConfiguration()).build(); + + assertThat(xmlMultiConfiguration.configuration("foo"), notNullValue()); + assertThat(xmlMultiConfiguration.configuration("foo", "prod"), notNullValue()); + + assertThat(xmlMultiConfiguration.identities(), containsInAnyOrder("foo")); + assertThat(xmlMultiConfiguration.variants("foo"), empty()); + + assertThat(xmlMultiConfiguration.toString(), isSameConfigurationAs("" + + "" + + "")); + } + + @Test + public void testManagerAddedToConfig() throws URISyntaxException { + XmlMultiConfiguration source = XmlMultiConfiguration.fromNothing().withManager("foo", emptyConfiguration()).build(); + + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.from(source).withManager("baz", emptyConfiguration()).build(); + + assertThat(xmlMultiConfiguration.configuration("foo"), notNullValue()); + assertThat(xmlMultiConfiguration.configuration("foo", "prod"), notNullValue()); + assertThat(xmlMultiConfiguration.configuration("baz"), notNullValue()); + assertThat(xmlMultiConfiguration.configuration("baz", "prod"), notNullValue()); + + assertThat(xmlMultiConfiguration.identities(), containsInAnyOrder("foo", "baz")); + assertThat(xmlMultiConfiguration.variants("foo"), empty()); + assertThat(xmlMultiConfiguration.variants("baz"), empty()); + + assertThat(xmlMultiConfiguration.toString(), isSameConfigurationAs("" + + "" + + "" + + "")); + } + + @Test + public void testGenerateExtendedConfiguration() throws URISyntaxException { + XmlConfiguration extended = new XmlConfiguration(getClass().getResource("/configs/all-extensions.xml")); + + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.fromNothing().withManager("foo", extended).build(); + + assertThat(xmlMultiConfiguration.configuration("foo"), sameInstance(extended)); + + assertThat(xmlMultiConfiguration.identities(), containsInAnyOrder("foo")); + assertThat(xmlMultiConfiguration.variants("foo"), empty()); + + assertThat(xmlMultiConfiguration.toString(), isSameConfigurationAs("" + + "" + + "" + + "" + + "" + + "java.lang.String" + + "java.lang.String" + + "" + + "10" + + "" + + "" + + "" + + "" + + "" + + "" + + "")); + } + + @Test + public void testParseExtendedConfiguration() { + XmlMultiConfiguration xmlMultiConfiguration = XmlMultiConfiguration.from(getClass().getResource("/configs/multi/extended.xml")).build(); + + assertThat(xmlMultiConfiguration.configuration("foo"), notNullValue()); + + assertThat(xmlMultiConfiguration.identities(), containsInAnyOrder("foo")); + assertThat(xmlMultiConfiguration.variants("foo"), empty()); + + assertThat(xmlMultiConfiguration.toString(), isSameConfigurationAs("" + + "" + + "" + + "" + + "" + + "java.lang.String" + + "java.lang.String" + + "" + + "10" + + "" + + "" + + "" + + "" + + "" + + "" + + "")); + } + + @Test(expected = XmlConfigurationException.class) + public void testParseOrdinaryConfiguration() { + XmlMultiConfiguration.from(getClass().getResource("/configs/one-cache.xml")).build(); + } + + private static Configuration emptyConfiguration() { + return ConfigurationBuilder.newConfigurationBuilder().build(); + } + + private static void assertThrows(Runnable task, Class exception) { + try { + task.run(); + fail("Expected " + exception.getSimpleName()); + } catch (AssertionError e) { + throw e; + } catch (Throwable t) { + assertThat(t, instanceOf(exception)); + } + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/provider/CacheEventDispatcherFactoryConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/provider/CacheEventDispatcherFactoryConfigurationParserTest.java new file mode 100644 index 0000000000..eb126f095d --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/provider/CacheEventDispatcherFactoryConfigurationParserTest.java @@ -0,0 +1,58 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.config.Configuration; +import org.ehcache.config.builders.ConfigurationBuilder; +import org.ehcache.impl.config.event.CacheEventDispatcherFactoryConfiguration; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.junit.Test; +import org.xml.sax.SAXException; + +import java.io.IOException; + +import javax.xml.bind.JAXBException; +import javax.xml.parsers.ParserConfigurationException; + +import static org.assertj.core.api.Assertions.assertThat; + +public class CacheEventDispatcherFactoryConfigurationParserTest { + + @Test + public void parseServiceCreationConfiguration() throws SAXException, JAXBException, ParserConfigurationException, IOException, ClassNotFoundException { + Configuration xmlConfig = new XmlConfiguration(getClass().getResource("/configs/ehcache-cacheEventListener.xml")); + + assertThat(xmlConfig.getServiceCreationConfigurations()).hasSize(1); + + ServiceCreationConfiguration configuration = xmlConfig.getServiceCreationConfigurations().iterator().next(); + assertThat(configuration).isInstanceOf(CacheEventDispatcherFactoryConfiguration.class); + + CacheEventDispatcherFactoryConfiguration providerConfiguration = (CacheEventDispatcherFactoryConfiguration) configuration; + assertThat(providerConfiguration.getThreadPoolAlias()).isEqualTo("events-pool"); + } + + @Test + public void unparseServiceCreationConfiguration() { + Configuration config = ConfigurationBuilder.newConfigurationBuilder() + .withService(new CacheEventDispatcherFactoryConfiguration("foo")).build(); + ConfigType configType = new CacheEventDispatcherFactoryConfigurationParser().unparseServiceCreationConfiguration(config, new ConfigType()); + + assertThat(configType.getEventDispatch().getThreadPool()).isEqualTo("foo"); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/provider/CacheManagerPersistenceConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/provider/CacheManagerPersistenceConfigurationParserTest.java new file mode 100644 index 0000000000..0cc26057eb --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/provider/CacheManagerPersistenceConfigurationParserTest.java @@ -0,0 +1,61 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.config.Configuration; +import org.ehcache.config.builders.ConfigurationBuilder; +import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.junit.Test; +import org.xml.sax.SAXException; + +import java.io.File; +import java.io.IOException; +import java.util.List; + +import javax.xml.bind.JAXBException; +import javax.xml.parsers.ParserConfigurationException; + +import static java.util.stream.Collectors.toList; +import static org.assertj.core.api.Assertions.assertThat; + +public class CacheManagerPersistenceConfigurationParserTest { + + @Test + public void parseServiceCreationConfiguration() throws SAXException, JAXBException, ParserConfigurationException, IOException, ClassNotFoundException { + Configuration xmlConfig = new XmlConfiguration(getClass().getResource("/configs/disk-persistent-cache.xml")); + + List serviceConfig = xmlConfig.getServiceCreationConfigurations().stream() + .filter(i -> CacheManagerPersistenceConfiguration.class.equals(i.getClass())) + .map(CacheManagerPersistenceConfiguration.class::cast).collect(toList()); + assertThat(serviceConfig).hasSize(1); + + CacheManagerPersistenceConfiguration providerConfiguration = serviceConfig.iterator().next(); + assertThat(providerConfiguration.getRootDirectory()).isEqualTo(new File("some/dir")); + } + + + @Test + public void unparseServiceCreationConfiguration() { + Configuration config = ConfigurationBuilder.newConfigurationBuilder() + .withService(new CacheManagerPersistenceConfiguration(new File("foo"))).build(); + ConfigType configType = new CacheManagerPersistenceConfigurationParser().unparseServiceCreationConfiguration(config, new ConfigType()); + + assertThat(configType.getPersistence().getDirectory()).isEqualTo("foo"); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/provider/DefaultCopyProviderConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/provider/DefaultCopyProviderConfigurationParserTest.java new file mode 100644 index 0000000000..c950fc01a3 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/provider/DefaultCopyProviderConfigurationParserTest.java @@ -0,0 +1,84 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.config.Configuration; +import org.ehcache.config.builders.ConfigurationBuilder; +import org.ehcache.impl.config.copy.DefaultCopierConfiguration; +import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.CopierType; +import org.junit.Test; +import org.xml.sax.SAXException; + +import com.pany.ehcache.copier.Description; +import com.pany.ehcache.copier.DescriptionCopier; +import com.pany.ehcache.copier.Person; +import com.pany.ehcache.copier.PersonCopier; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import javax.xml.bind.JAXBException; +import javax.xml.parsers.ParserConfigurationException; + +import static org.assertj.core.api.Assertions.assertThat; + +public class DefaultCopyProviderConfigurationParserTest { + + @Test + public void parseServiceCreationConfiguration() throws SAXException, JAXBException, ParserConfigurationException, IOException, ClassNotFoundException { + Configuration xmlConfig = new XmlConfiguration(getClass().getResource("/configs/cache-copiers.xml")); + + assertThat(xmlConfig.getServiceCreationConfigurations()).hasSize(1); + + ServiceCreationConfiguration configuration = xmlConfig.getServiceCreationConfigurations().iterator().next(); + + assertThat(configuration).isExactlyInstanceOf(DefaultCopyProviderConfiguration.class); + + DefaultCopyProviderConfiguration factoryConfiguration = (DefaultCopyProviderConfiguration) configuration; + Map, DefaultCopierConfiguration> defaults = factoryConfiguration.getDefaults(); + assertThat(defaults).hasSize(2); + assertThat(defaults.get(Description.class).getClazz()).isEqualTo(DescriptionCopier.class); + assertThat(defaults.get(Person.class).getClazz()).isEqualTo((PersonCopier.class)); + } + + @Test + public void unparseServiceCreationConfiguration() { + DefaultCopyProviderConfiguration providerConfig = new DefaultCopyProviderConfiguration(); + providerConfig.addCopierFor(Description.class, DescriptionCopier.class); + providerConfig.addCopierFor(Person.class, PersonCopier.class); + + Configuration config = ConfigurationBuilder.newConfigurationBuilder().withService(providerConfig).build(); + ConfigType configType = new DefaultCopyProviderConfigurationParser().unparseServiceCreationConfiguration(config, new ConfigType()); + + List copiers = configType.getDefaultCopiers().getCopier(); + assertThat(copiers).hasSize(2); + copiers.forEach(copier -> { + if (copier.getType().equals(Description.class.getName())) { + assertThat(copier.getValue()).isEqualTo(DescriptionCopier.class.getName()); + } else if (copier.getType().equals(Person.class.getName())) { + assertThat(copier.getValue()).isEqualTo(PersonCopier.class.getName()); + } else { + throw new AssertionError("Not expected"); + } + }); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/provider/DefaultSerializationProviderConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/provider/DefaultSerializationProviderConfigurationParserTest.java new file mode 100644 index 0000000000..ea08c210c8 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/provider/DefaultSerializationProviderConfigurationParserTest.java @@ -0,0 +1,89 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.config.Configuration; +import org.ehcache.config.builders.ConfigurationBuilder; +import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.SerializerType; +import org.junit.Test; +import org.xml.sax.SAXException; + +import com.pany.ehcache.copier.Description; +import com.pany.ehcache.copier.Person; +import com.pany.ehcache.serializer.TestSerializer; +import com.pany.ehcache.serializer.TestSerializer2; +import com.pany.ehcache.serializer.TestSerializer3; +import com.pany.ehcache.serializer.TestSerializer4; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import javax.xml.bind.JAXBException; +import javax.xml.parsers.ParserConfigurationException; + +import static org.assertj.core.api.Assertions.assertThat; + +public class DefaultSerializationProviderConfigurationParserTest { + + @Test + public void parseServiceCreationConfiguration() throws SAXException, JAXBException, ParserConfigurationException, IOException, ClassNotFoundException { + Configuration xmlConfig = new XmlConfiguration(getClass().getResource("/configs/default-serializer.xml")); + + assertThat(xmlConfig.getServiceCreationConfigurations()).hasSize(1); + + ServiceCreationConfiguration configuration = xmlConfig.getServiceCreationConfigurations().iterator().next(); + + assertThat(configuration).isExactlyInstanceOf(DefaultSerializationProviderConfiguration.class); + + DefaultSerializationProviderConfiguration factoryConfiguration = (DefaultSerializationProviderConfiguration) configuration; + Map, Class>> defaultSerializers = factoryConfiguration.getDefaultSerializers(); + assertThat(defaultSerializers).hasSize(4); + assertThat(defaultSerializers.get(CharSequence.class)).isEqualTo(TestSerializer.class); + assertThat(defaultSerializers.get(Number.class)).isEqualTo(TestSerializer2.class); + assertThat(defaultSerializers.get(Long.class)).isEqualTo(TestSerializer3.class); + assertThat(defaultSerializers.get(Integer.class)).isEqualTo(TestSerializer4.class); + } + + + @Test @SuppressWarnings("unchecked") + public void unparseServiceCreationConfiguration() { + DefaultSerializationProviderConfiguration providerConfig = new DefaultSerializationProviderConfiguration(); + providerConfig.addSerializerFor(Description.class, (Class) TestSerializer3.class); + providerConfig.addSerializerFor(Person.class, (Class) TestSerializer4.class); + + Configuration config = ConfigurationBuilder.newConfigurationBuilder().withService(providerConfig).build(); + ConfigType configType = new DefaultSerializationProviderConfigurationParser().unparseServiceCreationConfiguration(config, new ConfigType()); + + List serializers = configType.getDefaultSerializers().getSerializer(); + assertThat(serializers).hasSize(2); + serializers.forEach(serializer -> { + if (serializer.getType().equals(Description.class.getName())) { + assertThat(serializer.getValue()).isEqualTo(TestSerializer3.class.getName()); + } else if (serializer.getType().equals(Person.class.getName())) { + assertThat(serializer.getValue()).isEqualTo(TestSerializer4.class.getName()); + } else { + throw new AssertionError("Not expected"); + } + }); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/provider/DefaultSizeOfEngineProviderConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/provider/DefaultSizeOfEngineProviderConfigurationParserTest.java new file mode 100644 index 0000000000..3ad074be31 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/provider/DefaultSizeOfEngineProviderConfigurationParserTest.java @@ -0,0 +1,66 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.config.Configuration; +import org.ehcache.config.builders.ConfigurationBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.SizeofType; +import org.junit.Test; +import org.xml.sax.SAXException; + +import java.io.IOException; + +import javax.xml.bind.JAXBException; +import javax.xml.parsers.ParserConfigurationException; + +import static org.assertj.core.api.Assertions.assertThat; + +public class DefaultSizeOfEngineProviderConfigurationParserTest { + + @Test + public void parseServiceCreationConfiguration() throws SAXException, JAXBException, ParserConfigurationException, IOException, ClassNotFoundException { + Configuration xmlConfig = new XmlConfiguration(getClass().getResource("/configs/sizeof-engine.xml")); + + assertThat(xmlConfig.getServiceCreationConfigurations()).hasSize(1); + + + ServiceCreationConfiguration configuration = xmlConfig.getServiceCreationConfigurations().iterator().next(); + assertThat(configuration).isExactlyInstanceOf(DefaultSizeOfEngineProviderConfiguration.class); + + DefaultSizeOfEngineProviderConfiguration sizeOfEngineProviderConfig = (DefaultSizeOfEngineProviderConfiguration) configuration; + assertThat(sizeOfEngineProviderConfig.getMaxObjectGraphSize()).isEqualTo(200); + assertThat(sizeOfEngineProviderConfig.getMaxObjectSize()).isEqualTo(100000); + } + + @Test + public void unparseServiceCreationConfiguration() { + ConfigType configType = new ConfigType(); + Configuration config = ConfigurationBuilder.newConfigurationBuilder() + .withService(new DefaultSizeOfEngineProviderConfiguration(123, MemoryUnit.MB, 987)).build(); + configType = new DefaultSizeOfEngineProviderConfigurationParser().unparseServiceCreationConfiguration(config, configType); + + SizeofType heapStore = configType.getHeapStore(); + assertThat(heapStore.getMaxObjectGraphSize().getValue()).isEqualTo(987); + assertThat(heapStore.getMaxObjectSize().getValue()).isEqualTo(123); + assertThat(heapStore.getMaxObjectSize().getUnit().value()).isEqualTo("MB"); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/provider/OffHeapDiskStoreProviderConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/provider/OffHeapDiskStoreProviderConfigurationParserTest.java new file mode 100644 index 0000000000..e5600e8290 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/provider/OffHeapDiskStoreProviderConfigurationParserTest.java @@ -0,0 +1,61 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.config.Configuration; +import org.ehcache.config.builders.ConfigurationBuilder; +import org.ehcache.impl.config.store.disk.OffHeapDiskStoreProviderConfiguration; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.junit.Test; +import org.xml.sax.SAXException; + +import java.io.IOException; + +import javax.xml.bind.JAXBException; +import javax.xml.parsers.ParserConfigurationException; + +import static org.assertj.core.api.Assertions.assertThat; + +public class OffHeapDiskStoreProviderConfigurationParserTest { + + @Test + public void parseServiceCreationConfiguration() throws SAXException, JAXBException, ParserConfigurationException, IOException, ClassNotFoundException { + Configuration xmlConfig = new XmlConfiguration(getClass().getResource("/configs/resources-caches.xml")); + + assertThat(xmlConfig.getServiceCreationConfigurations()).hasSize(1); + + + ServiceCreationConfiguration configuration = xmlConfig.getServiceCreationConfigurations().iterator().next(); + + assertThat(configuration).isExactlyInstanceOf(OffHeapDiskStoreProviderConfiguration.class); + + OffHeapDiskStoreProviderConfiguration providerConfiguration = (OffHeapDiskStoreProviderConfiguration) configuration; + assertThat(providerConfiguration.getThreadPoolAlias()).isEqualTo("disk-pool"); + } + + @Test + public void unparseServiceCreationConfiguration() { + ConfigType configType = new ConfigType(); + Configuration config = ConfigurationBuilder.newConfigurationBuilder() + .withService(new OffHeapDiskStoreProviderConfiguration("foo")).build(); + configType = new OffHeapDiskStoreProviderConfigurationParser().unparseServiceCreationConfiguration(config, configType); + + assertThat(configType.getDiskStore().getThreadPool()).isEqualTo("foo"); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/provider/PooledExecutionServiceConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/provider/PooledExecutionServiceConfigurationParserTest.java new file mode 100644 index 0000000000..2c55ccbd6e --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/provider/PooledExecutionServiceConfigurationParserTest.java @@ -0,0 +1,88 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.config.Configuration; +import org.ehcache.config.builders.ConfigurationBuilder; +import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.ehcache.xml.model.ThreadPoolsType; +import org.junit.Test; +import org.xml.sax.SAXException; + +import java.io.IOException; +import java.util.List; + +import javax.xml.bind.JAXBException; +import javax.xml.parsers.ParserConfigurationException; + +import static org.assertj.core.api.Assertions.assertThat; + +public class PooledExecutionServiceConfigurationParserTest { + + @Test + public void parseServiceCreationConfiguration() throws SAXException, JAXBException, ParserConfigurationException, IOException, ClassNotFoundException { + Configuration xmlConfig = new XmlConfiguration(getClass().getResource("/configs/thread-pools.xml")); + + assertThat(xmlConfig.getServiceCreationConfigurations()).hasSize(1); + + ServiceCreationConfiguration configuration = xmlConfig.getServiceCreationConfigurations().iterator().next(); + assertThat(configuration).isExactlyInstanceOf(PooledExecutionServiceConfiguration.class); + + PooledExecutionServiceConfiguration providerConfiguration = (PooledExecutionServiceConfiguration) configuration; + assertThat(providerConfiguration.getPoolConfigurations()).containsKeys("big", "small"); + + PooledExecutionServiceConfiguration.PoolConfiguration small = providerConfiguration.getPoolConfigurations().get("small"); + assertThat(small.minSize()).isEqualTo(1); + assertThat(small.maxSize()).isEqualTo(1); + + PooledExecutionServiceConfiguration.PoolConfiguration big = providerConfiguration.getPoolConfigurations().get("big"); + assertThat(big.minSize()).isEqualTo(4); + assertThat(big.maxSize()).isEqualTo(32); + + assertThat(providerConfiguration.getDefaultPoolAlias()).isEqualTo("big"); + } + + @Test + public void unparseServiceCreationConfiguration() { + PooledExecutionServiceConfiguration providerConfig = new PooledExecutionServiceConfiguration(); + providerConfig.addDefaultPool("foo", 5, 9); + providerConfig.addPool("bar", 2, 6); + + Configuration config = ConfigurationBuilder.newConfigurationBuilder().withService(providerConfig).build(); + ConfigType configType = new ConfigType(); + configType = new PooledExecutionServiceConfigurationParser().unparseServiceCreationConfiguration(config, configType); + + List threadPools = configType.getThreadPools().getThreadPool(); + assertThat(threadPools).hasSize(2); + threadPools.forEach(pool -> { + if (pool.getAlias().equals("foo")) { + assertThat(pool.getMinSize()).isEqualTo(5); + assertThat(pool.getMaxSize()).isEqualTo(9); + assertThat(pool.isDefault()).isEqualTo(true); + } else if (pool.getAlias().equals("bar")) { + assertThat(pool.getMinSize()).isEqualTo(2); + assertThat(pool.getMaxSize()).isEqualTo(6); + assertThat(pool.isDefault()).isEqualTo(false); + } else { + throw new AssertionError("Not expected"); + } + }); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/provider/WriteBehindProviderConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/provider/WriteBehindProviderConfigurationParserTest.java new file mode 100644 index 0000000000..fd35e5b71b --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/provider/WriteBehindProviderConfigurationParserTest.java @@ -0,0 +1,60 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.provider; + +import org.ehcache.config.Configuration; +import org.ehcache.config.builders.ConfigurationBuilder; +import org.ehcache.impl.config.loaderwriter.writebehind.WriteBehindProviderConfiguration; +import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.model.ConfigType; +import org.junit.Test; +import org.xml.sax.SAXException; + +import java.io.IOException; + +import javax.xml.bind.JAXBException; +import javax.xml.parsers.ParserConfigurationException; + +import static org.assertj.core.api.Assertions.assertThat; + +public class WriteBehindProviderConfigurationParserTest { + + @Test + public void parseServiceCreationConfiguration() throws SAXException, JAXBException, ParserConfigurationException, IOException, ClassNotFoundException { + Configuration xmlConfig = new XmlConfiguration(getClass().getResource("/configs/writebehind-cache.xml")); + + assertThat(xmlConfig.getServiceCreationConfigurations()).hasSize(1); + + ServiceCreationConfiguration configuration = xmlConfig.getServiceCreationConfigurations().iterator().next(); + + assertThat(configuration).isExactlyInstanceOf(WriteBehindProviderConfiguration.class); + + WriteBehindProviderConfiguration providerConfiguration = (WriteBehindProviderConfiguration) configuration; + assertThat(providerConfiguration.getThreadPoolAlias()).isEqualTo("write-behind-pool"); + } + + @Test + public void unparseServiceCreationConfiguration() { + ConfigType configType = new ConfigType(); + Configuration config = ConfigurationBuilder.newConfigurationBuilder() + .withService(new WriteBehindProviderConfiguration("foo")).build(); + configType = new WriteBehindProviderConfigurationParser().unparseServiceCreationConfiguration(config, configType); + + assertThat(configType.getWriteBehind().getThreadPool()).isEqualTo("foo"); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultCacheEventDispatcherConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultCacheEventDispatcherConfigurationParserTest.java new file mode 100644 index 0000000000..13e669db7e --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultCacheEventDispatcherConfigurationParserTest.java @@ -0,0 +1,52 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.impl.config.event.DefaultCacheEventDispatcherConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.model.CacheType; +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; + +public class DefaultCacheEventDispatcherConfigurationParserTest { + + @Test + public void parseServiceConfiguration() throws Exception { + CacheConfiguration cacheConfiguration = new XmlConfiguration(getClass().getResource("/configs/ehcache-cacheEventListener.xml")).getCacheConfigurations().get("template1"); + + DefaultCacheEventDispatcherConfiguration eventDispatcherConfig = + findSingletonAmongst(DefaultCacheEventDispatcherConfiguration.class, cacheConfiguration.getServiceConfigurations()); + + assertThat(eventDispatcherConfig).isNotNull(); + assertThat(eventDispatcherConfig.getThreadPoolAlias()).isEqualTo("listeners-pool"); + } + + @Test + public void unparseServiceConfiguration() { + CacheConfiguration cacheConfig = + newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(new DefaultCacheEventDispatcherConfiguration("foo")).build(); + CacheType cacheType = new CacheType(); + cacheType = new DefaultCacheEventDispatcherConfigurationParser().unparseServiceConfiguration(cacheConfig, cacheType); + + assertThat(cacheType.getListeners().getDispatcherThreadPool()).isEqualTo("foo"); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultCacheEventListenerConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultCacheEventListenerConfigurationParserTest.java new file mode 100644 index 0000000000..2ec0aa4b16 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultCacheEventListenerConfigurationParserTest.java @@ -0,0 +1,96 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.event.EventType; +import org.ehcache.impl.config.event.DefaultCacheEventListenerConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheType; +import org.ehcache.xml.model.EventFiringType; +import org.ehcache.xml.model.EventOrderingType; +import org.ehcache.xml.model.ListenersType; +import org.junit.Test; + +import com.pany.ehcache.integration.TestCacheEventListener; + +import java.util.EnumSet; +import java.util.List; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; +import static org.ehcache.event.EventFiring.SYNCHRONOUS; +import static org.ehcache.event.EventOrdering.UNORDERED; +import static org.ehcache.event.EventType.CREATED; +import static org.ehcache.event.EventType.REMOVED; + +public class DefaultCacheEventListenerConfigurationParserTest { + + @Test + public void parseServiceConfiguration() throws Exception { + CacheConfiguration cacheConfiguration = new XmlConfiguration(getClass().getResource("/configs/ehcache-cacheEventListener.xml")).getCacheConfigurations().get("bar"); + + DefaultCacheEventListenerConfiguration listenerConfig = + findSingletonAmongst(DefaultCacheEventListenerConfiguration.class, cacheConfiguration.getServiceConfigurations()); + + assertThat(listenerConfig).isNotNull(); + assertThat(listenerConfig.getClazz()).isEqualTo(TestCacheEventListener.class); + assertThat(listenerConfig.firingMode()).isEqualTo(SYNCHRONOUS); + assertThat(listenerConfig.orderingMode()).isEqualTo(UNORDERED); + assertThat(listenerConfig.fireOn()).containsExactlyInAnyOrder(EventType.values()); + } + + + @Test + public void unparseServiceConfiguration() { + DefaultCacheEventListenerConfiguration listenerConfig = + new DefaultCacheEventListenerConfiguration(EnumSet.of(CREATED, REMOVED), TestCacheEventListener.class); + listenerConfig.setEventFiringMode(SYNCHRONOUS); + listenerConfig.setEventOrderingMode(UNORDERED); + + CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(listenerConfig).build(); + CacheType cacheType = new CacheType(); + cacheType = new DefaultCacheEventListenerConfigurationParser().unparseServiceConfiguration(cacheConfig, cacheType); + + List listeners = cacheType.getListeners().getListener(); + assertThat(listeners).hasSize(1); + ListenersType.Listener listener = listeners.get(0); + assertThat(listener.getEventFiringMode()).isEqualTo(EventFiringType.SYNCHRONOUS); + assertThat(listener.getEventOrderingMode()).isEqualTo(EventOrderingType.UNORDERED); + assertThat(listener.getEventsToFireOn()).contains(org.ehcache.xml.model.EventType.CREATED, org.ehcache.xml.model.EventType.REMOVED); + } + + @Test + public void unparseServiceConfigurationWithInstance() { + TestCacheEventListener testCacheEventListener = new TestCacheEventListener(); + DefaultCacheEventListenerConfiguration listenerConfig = + new DefaultCacheEventListenerConfiguration(EnumSet.of(CREATED, REMOVED), testCacheEventListener); + listenerConfig.setEventFiringMode(SYNCHRONOUS); + listenerConfig.setEventOrderingMode(UNORDERED); + + CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(listenerConfig).build(); + CacheType cacheType = new CacheType(); + assertThatExceptionOfType(XmlConfigurationException.class).isThrownBy(() -> + new DefaultCacheEventListenerConfigurationParser().unparseServiceConfiguration(cacheConfig, cacheType)) + .withMessage("%s", "XML translation for instance based initialization for " + + "DefaultCacheEventListenerConfiguration is not supported"); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultCacheLoaderWriterConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultCacheLoaderWriterConfigurationParserTest.java new file mode 100644 index 0000000000..740f52d293 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultCacheLoaderWriterConfigurationParserTest.java @@ -0,0 +1,66 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheType; +import org.junit.Test; + +import com.pany.ehcache.integration.TestCacheLoaderWriter; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; + +public class DefaultCacheLoaderWriterConfigurationParserTest { + + @Test + public void parseServiceConfiguration() throws Exception { + CacheConfiguration cacheConfiguration = new XmlConfiguration(getClass().getResource("/configs/writebehind-cache.xml")).getCacheConfigurations().get("bar"); + DefaultCacheLoaderWriterConfiguration loaderWriterConfig = + findSingletonAmongst(DefaultCacheLoaderWriterConfiguration.class, cacheConfiguration.getServiceConfigurations()); + + assertThat(loaderWriterConfig).isNotNull(); + assertThat(loaderWriterConfig.getClazz()).isEqualTo(TestCacheLoaderWriter.class); + } + + @Test + public void unparseServiceConfiguration() { + CacheConfiguration cacheConfig = + newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(new DefaultCacheLoaderWriterConfiguration(TestCacheLoaderWriter.class)).build(); + CacheType cacheType = new DefaultCacheLoaderWriterConfigurationParser().unparseServiceConfiguration(cacheConfig, new CacheType()); + + + assertThat(cacheType.getLoaderWriter().getClazz()).isEqualTo(TestCacheLoaderWriter.class.getName()); + } + + @Test + public void unparseServiceConfigurationWithInstance() { + TestCacheLoaderWriter testCacheLoaderWriter = new TestCacheLoaderWriter(); + CacheConfiguration cacheConfig = + newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(new DefaultCacheLoaderWriterConfiguration(testCacheLoaderWriter)).build(); + assertThatExceptionOfType(XmlConfigurationException.class).isThrownBy(() -> + new DefaultCacheLoaderWriterConfigurationParser().unparseServiceConfiguration(cacheConfig, new CacheType())) + .withMessage("%s", "XML translation for instance based initialization for " + + "DefaultCacheLoaderWriterConfiguration is not supported"); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultCopierConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultCopierConfigurationParserTest.java new file mode 100644 index 0000000000..00a02cc416 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultCopierConfigurationParserTest.java @@ -0,0 +1,107 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.impl.config.copy.DefaultCopierConfiguration; +import org.ehcache.impl.copy.SerializingCopier; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheEntryType; +import org.ehcache.xml.model.CacheType; +import org.junit.Test; + +import com.pany.ehcache.copier.AnotherPersonCopier; +import com.pany.ehcache.copier.Description; +import com.pany.ehcache.copier.DescriptionCopier; +import com.pany.ehcache.copier.Person; +import com.pany.ehcache.copier.PersonCopier; + +import java.util.Collection; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.spi.service.ServiceUtils.findAmongst; + +public class DefaultCopierConfigurationParserTest { + + @Test + public void parseServiceConfiguration() throws Exception { + CacheConfiguration cacheConfiguration = new XmlConfiguration(getClass().getResource("/configs/cache-copiers.xml")).getCacheConfigurations().get("baz"); + + @SuppressWarnings("rawtypes") + Collection copierConfigs = findAmongst(DefaultCopierConfiguration.class, cacheConfiguration.getServiceConfigurations()); + + assertThat(copierConfigs).hasSize(2); + for(DefaultCopierConfiguration copierConfig : copierConfigs) { + if(copierConfig.getType() == DefaultCopierConfiguration.Type.KEY) { + assertThat(copierConfig.getClazz()).isEqualTo(SerializingCopier.class); + } else { + assertThat(copierConfig.getClazz()).isEqualTo(AnotherPersonCopier.class); + } + } + } + + @Test + public void unparseServiceConfiguration() { + @SuppressWarnings({"unchecked", "rawtypes"}) + CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Description.class, Person.class, heap(10)) + .withService(new DefaultCopierConfiguration(DescriptionCopier.class, DefaultCopierConfiguration.Type.KEY)) + .withService(new DefaultCopierConfiguration(PersonCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .build(); + + CacheType cacheType = new CacheType(); + CacheEntryType keyType = new CacheEntryType(); + keyType.setValue("foo"); + cacheType.setKeyType(keyType); + CacheEntryType valueType = new CacheEntryType(); + valueType.setValue("bar"); + cacheType.setValueType(valueType); + + cacheType = new DefaultCopierConfigurationParser().unparseServiceConfiguration(cacheConfig, cacheType); + + assertThat(cacheType.getKeyType().getCopier()).isEqualTo(DescriptionCopier.class.getName()); + assertThat(cacheType.getValueType().getCopier()).isEqualTo(PersonCopier.class.getName()); + } + + @Test + public void unparseServiceConfigurationWithInstance() { + DescriptionCopier descriptionCopier = new DescriptionCopier(); + PersonCopier personCopier = new PersonCopier(); + DefaultCopierConfiguration config1 = + new DefaultCopierConfiguration<>(descriptionCopier, DefaultCopierConfiguration.Type.KEY); + DefaultCopierConfiguration config2 = + new DefaultCopierConfiguration<>(personCopier, DefaultCopierConfiguration.Type.VALUE); + + CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Description.class, Person.class, heap(10)) + .withService(config1).withService(config2).build(); + + CacheType cacheType = new CacheType(); + CacheEntryType keyType = new CacheEntryType(); + keyType.setValue("foo"); + cacheType.setKeyType(keyType); + CacheEntryType valueType = new CacheEntryType(); + valueType.setValue("bar"); + cacheType.setValueType(valueType); + assertThatExceptionOfType(XmlConfigurationException.class).isThrownBy(() -> + new DefaultCopierConfigurationParser().unparseServiceConfiguration(cacheConfig, cacheType)) + .withMessage("%s", "XML translation for instance based initialization for " + + "DefaultCopierConfiguration is not supported"); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultResilienceStrategyConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultResilienceStrategyConfigurationParserTest.java new file mode 100644 index 0000000000..8de81e8c9a --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultResilienceStrategyConfigurationParserTest.java @@ -0,0 +1,67 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.impl.config.resilience.DefaultResilienceStrategyConfiguration; +import org.ehcache.xml.NiResilience; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheType; +import org.junit.Test; + +import com.pany.ehcache.integration.TestResilienceStrategy; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; + +public class DefaultResilienceStrategyConfigurationParserTest { + + @Test + public void parseServiceConfiguration() throws Exception { + CacheConfiguration cacheConfiguration = new XmlConfiguration(getClass().getResource("/configs/resilience-config.xml")).getCacheConfigurations().get("ni"); + DefaultResilienceStrategyConfiguration resilienceStrategyConfig = + findSingletonAmongst(DefaultResilienceStrategyConfiguration.class, cacheConfiguration.getServiceConfigurations()); + + assertThat(resilienceStrategyConfig).isNotNull(); + assertThat(resilienceStrategyConfig.getClazz()).isEqualTo(NiResilience.class); + } + + @Test + public void unparseServiceConfiguration() { + CacheConfiguration cacheConfig = + newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(new DefaultResilienceStrategyConfiguration(TestResilienceStrategy.class)).build(); + CacheType cacheType = new DefaultResilienceStrategyConfigurationParser().unparseServiceConfiguration(cacheConfig, new CacheType()); + + assertThat(cacheType.getResilience()).isEqualTo(TestResilienceStrategy.class.getName()); + + } + + @Test + public void unparseServiceConfigurationWithInstance() { + TestResilienceStrategy testObject = new TestResilienceStrategy<>(); + CacheConfiguration cacheConfig = + newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(new DefaultResilienceStrategyConfiguration(testObject)).build(); + assertThatExceptionOfType(XmlConfigurationException.class).isThrownBy(() -> + new DefaultResilienceStrategyConfigurationParser().unparseServiceConfiguration(cacheConfig, new CacheType())) + .withMessage("%s", "XML translation for instance based initialization for " + + "DefaultResilienceStrategyConfiguration is not supported"); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultSerializerConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultSerializerConfigurationParserTest.java new file mode 100644 index 0000000000..7afffe6edf --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultSerializerConfigurationParserTest.java @@ -0,0 +1,104 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.core.util.ClassLoading; +import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.exceptions.XmlConfigurationException; +import org.ehcache.xml.model.CacheEntryType; +import org.ehcache.xml.model.CacheType; +import org.junit.Test; + +import com.pany.ehcache.copier.Description; +import com.pany.ehcache.copier.Person; +import com.pany.ehcache.serializer.TestSerializer3; +import com.pany.ehcache.serializer.TestSerializer4; + +import java.util.Collection; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.spi.service.ServiceUtils.findAmongst; + +public class DefaultSerializerConfigurationParserTest { + + @Test + public void parseServiceConfiguration() throws Exception { + CacheConfiguration cacheConfiguration = new XmlConfiguration(getClass().getResource("/configs/default-serializer.xml")).getCacheConfigurations().get("foo"); + @SuppressWarnings("rawtypes") + Collection copierConfigs = + findAmongst(DefaultSerializerConfiguration.class, cacheConfiguration.getServiceConfigurations()); + assertThat(copierConfigs).hasSize(2); + + for(DefaultSerializerConfiguration copierConfig : copierConfigs) { + if(copierConfig.getType() == DefaultSerializerConfiguration.Type.KEY) { + assertThat(copierConfig.getClazz()).isEqualTo(TestSerializer3.class); + } else { + assertThat(copierConfig.getClazz()).isEqualTo(TestSerializer4.class); + } + } + } + + @Test + public void unparseServiceConfiguration() { + @SuppressWarnings({"unchecked", "rawtypes"}) + CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Description.class, Person.class, heap(10)) + .withService(new DefaultSerializerConfiguration(TestSerializer3.class, DefaultSerializerConfiguration.Type.KEY)) + .withService(new DefaultSerializerConfiguration(TestSerializer4.class, DefaultSerializerConfiguration.Type.VALUE)) + .build(); + + CacheType cacheType = new CacheType(); + CacheEntryType keyType = new CacheEntryType(); + keyType.setValue("foo"); + cacheType.setKeyType(keyType); + CacheEntryType valueType = new CacheEntryType(); + valueType.setValue("bar"); + cacheType.setValueType(valueType); + + cacheType = new DefaultSerializerConfigurationParser().unparseServiceConfiguration(cacheConfig, cacheType); + + assertThat(cacheType.getKeyType().getSerializer()).isEqualTo(TestSerializer3.class.getName()); + assertThat(cacheType.getValueType().getSerializer()).isEqualTo(TestSerializer4.class.getName()); + } + + @Test + public void unparseServiceConfigurationWithInstance() { + TestSerializer3 testSerializer3 = new TestSerializer3<>(ClassLoading.getDefaultClassLoader()); + TestSerializer4 testSerializer4 = new TestSerializer4<>(ClassLoading.getDefaultClassLoader()); + + DefaultSerializerConfiguration config1 = new DefaultSerializerConfiguration<>(testSerializer3, DefaultSerializerConfiguration.Type.KEY); + DefaultSerializerConfiguration config2 = new DefaultSerializerConfiguration<>(testSerializer4, DefaultSerializerConfiguration.Type.VALUE); + CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Description.class, Person.class, heap(10)) + .withService(config1).withService(config2).build(); + + CacheType cacheType = new CacheType(); + CacheEntryType keyType = new CacheEntryType(); + keyType.setValue("foo"); + cacheType.setKeyType(keyType); + CacheEntryType valueType = new CacheEntryType(); + valueType.setValue("bar"); + cacheType.setValueType(valueType); + assertThatExceptionOfType(XmlConfigurationException.class).isThrownBy(() -> + new DefaultSerializerConfigurationParser().unparseServiceConfiguration(cacheConfig, cacheType)) + .withMessage("%s", "XML translation for instance based initialization for " + + "DefaultSerializerConfiguration is not supported"); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultSizeOfEngineConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultSizeOfEngineConfigurationParserTest.java new file mode 100644 index 0000000000..07a4f16365 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultSizeOfEngineConfigurationParserTest.java @@ -0,0 +1,76 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.model.CacheType; +import org.ehcache.xml.model.SizeofType; +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; + +public class DefaultSizeOfEngineConfigurationParserTest { + + @Test + public void parseServiceConfiguration() throws Exception { + XmlConfiguration configuration = new XmlConfiguration(getClass().getResource("/configs/sizeof-engine.xml")); + CacheConfiguration cacheConfig = configuration.getCacheConfigurations().get("usesDefaultSizeOfEngine"); + DefaultSizeOfEngineConfiguration sizeOfEngineConfig = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfig.getServiceConfigurations()); + + assertThat(sizeOfEngineConfig).isNull(); + + CacheConfiguration cacheConfig1 = configuration.getCacheConfigurations().get("usesConfiguredInCache"); + DefaultSizeOfEngineConfiguration sizeOfEngineConfig1 = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfig1.getServiceConfigurations()); + + assertThat(sizeOfEngineConfig1).isNotNull(); + assertThat(sizeOfEngineConfig1.getMaxObjectGraphSize()).isEqualTo(500); + assertThat(sizeOfEngineConfig1.getMaxObjectSize()).isEqualTo(200000); + + CacheConfiguration cacheConfig2 = configuration.getCacheConfigurations().get("usesPartialOneConfiguredInCache"); + DefaultSizeOfEngineConfiguration sizeOfEngineConfig2 = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfig2.getServiceConfigurations()); + + assertThat(sizeOfEngineConfig2).isNotNull(); + assertThat(sizeOfEngineConfig2.getMaxObjectGraphSize()).isEqualTo(500L); + assertThat(sizeOfEngineConfig2.getMaxObjectSize()).isEqualTo(Long.MAX_VALUE); + + CacheConfiguration cacheConfig3 = configuration.getCacheConfigurations().get("usesPartialTwoConfiguredInCache"); + DefaultSizeOfEngineConfiguration sizeOfEngineConfig3 = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfig3.getServiceConfigurations()); + + assertThat(sizeOfEngineConfig3).isNotNull(); + assertThat(sizeOfEngineConfig3.getMaxObjectGraphSize()).isEqualTo(1000L); + assertThat(sizeOfEngineConfig3.getMaxObjectSize()).isEqualTo(200000L); + } + + @Test + public void unparseServiceConfiguration() { + CacheConfiguration cacheConfig = + newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(new DefaultSizeOfEngineConfiguration(123, MemoryUnit.MB, 987)).build(); + CacheType cacheType = new CacheType(); + cacheType = new DefaultSizeOfEngineConfigurationParser().unparseServiceConfiguration(cacheConfig, cacheType); + + SizeofType heapStore = cacheType.getHeapStoreSettings(); + assertThat(heapStore.getMaxObjectGraphSize().getValue()).isEqualTo(987); + assertThat(heapStore.getMaxObjectSize().getValue()).isEqualTo(123); + assertThat(heapStore.getMaxObjectSize().getUnit().value()).isEqualTo("MB"); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultWriteBehindConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultWriteBehindConfigurationParserTest.java new file mode 100644 index 0000000000..92e02443ed --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/service/DefaultWriteBehindConfigurationParserTest.java @@ -0,0 +1,85 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.WriteBehindConfigurationBuilder; +import org.ehcache.impl.config.loaderwriter.writebehind.DefaultWriteBehindConfiguration; +import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.model.CacheLoaderWriterType; +import org.ehcache.xml.model.CacheType; +import org.junit.Test; + +import java.util.concurrent.TimeUnit; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; + +public class DefaultWriteBehindConfigurationParserTest { + + @Test + public void parseServiceConfigurationNonBatching() throws Exception { + CacheConfiguration cacheConfiguration = new XmlConfiguration(getClass().getResource("/configs/writebehind-cache.xml")).getCacheConfigurations().get("bar"); + DefaultWriteBehindConfiguration writeBehindConfig = + findSingletonAmongst(DefaultWriteBehindConfiguration.class, cacheConfiguration.getServiceConfigurations()); + + assertThat(writeBehindConfig).isNotNull(); + assertThat(writeBehindConfig.getConcurrency()).isEqualTo(1); + assertThat(writeBehindConfig.getMaxQueueSize()).isEqualTo(10); + assertThat(writeBehindConfig.getBatchingConfiguration()).isNull(); + } + + @Test + public void parseServiceConfigurationBatching() throws Exception { + CacheConfiguration cacheConfiguration = new XmlConfiguration(getClass().getResource("/configs/writebehind-cache.xml")).getCacheConfigurations().get("template1"); + DefaultWriteBehindConfiguration writeBehindConfig = + findSingletonAmongst(DefaultWriteBehindConfiguration.class, cacheConfiguration.getServiceConfigurations()); + + assertThat(writeBehindConfig).isNotNull(); + assertThat(writeBehindConfig.getConcurrency()).isEqualTo(1); + assertThat(writeBehindConfig.getMaxQueueSize()).isEqualTo(10); + WriteBehindConfiguration.BatchingConfiguration batchingConfiguration = writeBehindConfig.getBatchingConfiguration(); + assertThat(batchingConfiguration).isNotNull(); + assertThat(batchingConfiguration.getBatchSize()).isEqualTo(2); + assertThat(batchingConfiguration.isCoalescing()).isEqualTo(false); + assertThat(batchingConfiguration.getMaxDelay()).isEqualTo(10); + assertThat(batchingConfiguration.getMaxDelayUnit()).isEqualTo(TimeUnit.SECONDS); + } + + @Test + public void unparseServiceConfigurationBatched() { + WriteBehindConfiguration writeBehindConfiguration = + WriteBehindConfigurationBuilder.newBatchedWriteBehindConfiguration(123, TimeUnit.SECONDS, 987) + .enableCoalescing().concurrencyLevel(8).useThreadPool("foo").queueSize(16).build(); + CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(writeBehindConfiguration).build(); + CacheType cacheType = new CacheType(); + cacheType = new DefaultWriteBehindConfigurationParser().unparseServiceConfiguration(cacheConfig, cacheType); + + CacheLoaderWriterType.WriteBehind writeBehind = cacheType.getLoaderWriter().getWriteBehind(); + assertThat(writeBehind.getThreadPool()).isEqualTo("foo"); + assertThat(writeBehind.getSize()).isEqualTo(16); + assertThat(writeBehind.getConcurrency()).isEqualTo(8); + CacheLoaderWriterType.WriteBehind.Batching batching = writeBehind.getBatching(); + assertThat(batching.getBatchSize()).isEqualTo(987); + assertThat(batching.isCoalesce()).isEqualTo(true); + assertThat(batching.getMaxWriteDelay().getValue()).isEqualTo(123); + assertThat(batching.getMaxWriteDelay().getUnit()).isEqualTo(org.ehcache.xml.model.TimeUnit.SECONDS); + } +} diff --git a/ehcache-xml/src/test/java/org/ehcache/xml/service/OffHeapDiskStoreConfigurationParserTest.java b/ehcache-xml/src/test/java/org/ehcache/xml/service/OffHeapDiskStoreConfigurationParserTest.java new file mode 100644 index 0000000000..0cd20ce318 --- /dev/null +++ b/ehcache-xml/src/test/java/org/ehcache/xml/service/OffHeapDiskStoreConfigurationParserTest.java @@ -0,0 +1,57 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.xml.service; + +import org.ehcache.config.CacheConfiguration; +import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.ehcache.xml.model.CacheType; +import org.ehcache.xml.model.DiskStoreSettingsType; +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; + +public class OffHeapDiskStoreConfigurationParserTest { + + @Test + public void parseServiceConfiguration() throws Exception { + CacheConfiguration cacheConfiguration = new XmlConfiguration(getClass().getResource("/configs/resources-caches.xml")).getCacheConfigurations().get("tiered"); + OffHeapDiskStoreConfiguration diskConfig = + findSingletonAmongst(OffHeapDiskStoreConfiguration.class, cacheConfiguration.getServiceConfigurations()); + + assertThat(diskConfig.getThreadPoolAlias()).isEqualTo("some-pool"); + assertThat(diskConfig.getWriterConcurrency()).isEqualTo(2); + assertThat(diskConfig.getDiskSegments()).isEqualTo(4); + } + + @Test + public void unparseServiceConfiguration() { + CacheConfiguration cacheConfig = + newCacheConfigurationBuilder(Object.class, Object.class, heap(10)).withService(new OffHeapDiskStoreConfiguration("foo", 4, 8)).build(); + CacheType cacheType = new CacheType(); + cacheType = new OffHeapDiskStoreConfigurationParser().unparseServiceConfiguration(cacheConfig, cacheType); + + DiskStoreSettingsType diskStoreSettings = cacheType.getDiskStoreSettings(); + assertThat(diskStoreSettings.getThreadPool()).isEqualTo("foo"); + assertThat(diskStoreSettings.getWriterConcurrency()).isEqualTo(4); + assertThat(diskStoreSettings.getDiskSegments()).isEqualTo(8); + } + +} diff --git a/xml/src/test/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser b/ehcache-xml/src/test/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser similarity index 100% rename from xml/src/test/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser rename to ehcache-xml/src/test/resources/META-INF/services/org.ehcache.xml.CacheManagerServiceConfigurationParser diff --git a/ehcache-xml/src/test/resources/META-INF/services/org.ehcache.xml.CacheResourceConfigurationParser b/ehcache-xml/src/test/resources/META-INF/services/org.ehcache.xml.CacheResourceConfigurationParser new file mode 100644 index 0000000000..cb8bfded53 --- /dev/null +++ b/ehcache-xml/src/test/resources/META-INF/services/org.ehcache.xml.CacheResourceConfigurationParser @@ -0,0 +1 @@ +org.ehcache.xml.BazParser diff --git a/xml/src/test/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser b/ehcache-xml/src/test/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser similarity index 100% rename from xml/src/test/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser rename to ehcache-xml/src/test/resources/META-INF/services/org.ehcache.xml.CacheServiceConfigurationParser diff --git a/ehcache-xml/src/test/resources/configs/all-extensions.xml b/ehcache-xml/src/test/resources/configs/all-extensions.xml new file mode 100644 index 0000000000..e0c40e6ec4 --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/all-extensions.xml @@ -0,0 +1,36 @@ + + + + + + + + + + java.lang.String + java.lang.String + + 10 + + + + + diff --git a/xml/src/test/resources/configs/bar.xsd b/ehcache-xml/src/test/resources/configs/bar.xsd similarity index 100% rename from xml/src/test/resources/configs/bar.xsd rename to ehcache-xml/src/test/resources/configs/bar.xsd diff --git a/ehcache-xml/src/test/resources/configs/baz.xsd b/ehcache-xml/src/test/resources/configs/baz.xsd new file mode 100644 index 0000000000..cbe98bd41c --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/baz.xsd @@ -0,0 +1,25 @@ + + + + + + + diff --git a/xml/src/test/resources/configs/cache-copiers.xml b/ehcache-xml/src/test/resources/configs/cache-copiers.xml similarity index 93% rename from xml/src/test/resources/configs/cache-copiers.xml rename to ehcache-xml/src/test/resources/configs/cache-copiers.xml index a8c15d6e5a..d95315edb2 100644 --- a/xml/src/test/resources/configs/cache-copiers.xml +++ b/ehcache-xml/src/test/resources/configs/cache-copiers.xml @@ -14,9 +14,7 @@ ~ limitations under the License. --> - + com.pany.ehcache.copier.DescriptionCopier diff --git a/xml/src/test/resources/configs/cache-integration.xml b/ehcache-xml/src/test/resources/configs/cache-integration.xml similarity index 87% rename from xml/src/test/resources/configs/cache-integration.xml rename to ehcache-xml/src/test/resources/configs/cache-integration.xml index 73fdb780c8..80e8578c20 100644 --- a/xml/src/test/resources/configs/cache-integration.xml +++ b/ehcache-xml/src/test/resources/configs/cache-integration.xml @@ -14,10 +14,7 @@ ~ limitations under the License. --> - + java.lang.Number java.lang.String diff --git a/xml/src/test/resources/configs/custom-resource.xml b/ehcache-xml/src/test/resources/configs/custom-resource.xml similarity index 78% rename from xml/src/test/resources/configs/custom-resource.xml rename to ehcache-xml/src/test/resources/configs/custom-resource.xml index 08ff612e75..e35d015bb1 100644 --- a/xml/src/test/resources/configs/custom-resource.xml +++ b/ehcache-xml/src/test/resources/configs/custom-resource.xml @@ -15,11 +15,8 @@ --> + xmlns:ehcache='http://www.ehcache.org/v3'> java.lang.String java.lang.String diff --git a/ehcache-xml/src/test/resources/configs/default-serializer.xml b/ehcache-xml/src/test/resources/configs/default-serializer.xml new file mode 100644 index 0000000000..1b68d8c363 --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/default-serializer.xml @@ -0,0 +1,67 @@ + + + + + + com.pany.ehcache.serializer.TestSerializer + com.pany.ehcache.serializer.TestSerializer2 + + com.pany.ehcache.serializer.TestSerializer3 + + + com.pany.ehcache.serializer.TestSerializer4 + + + + + + + java.lang.Long + + + java.lang.Double + + + 10 + + + + + + java.lang.Long + + java.lang.Double + + + 10 + 1 + + + + + + java.lang.String + + + java.lang.String + + + 10 + 1 + + + diff --git a/xml/src/test/resources/configs/defaultTypes-cache.xml b/ehcache-xml/src/test/resources/configs/defaultTypes-cache.xml similarity index 81% rename from xml/src/test/resources/configs/defaultTypes-cache.xml rename to ehcache-xml/src/test/resources/configs/defaultTypes-cache.xml index ebd36b4c93..02c668a2fb 100644 --- a/xml/src/test/resources/configs/defaultTypes-cache.xml +++ b/ehcache-xml/src/test/resources/configs/defaultTypes-cache.xml @@ -14,10 +14,7 @@ ~ limitations under the License. --> - + diff --git a/ehcache-xml/src/test/resources/configs/disk-persistent-cache.xml b/ehcache-xml/src/test/resources/configs/disk-persistent-cache.xml new file mode 100644 index 0000000000..7a4f4a4003 --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/disk-persistent-cache.xml @@ -0,0 +1,31 @@ + + + + + + + + + java.lang.String + java.lang.String + + 100 + 100 + + + + diff --git a/xml/src/test/resources/configs/docs/expiry.xml b/ehcache-xml/src/test/resources/configs/docs/expiry.xml similarity index 83% rename from xml/src/test/resources/configs/docs/expiry.xml rename to ehcache-xml/src/test/resources/configs/docs/expiry.xml index 335745c9e7..93b5b98c9d 100644 --- a/xml/src/test/resources/configs/docs/expiry.xml +++ b/ehcache-xml/src/test/resources/configs/docs/expiry.xml @@ -14,10 +14,7 @@ ~ limitations under the License. --> - + diff --git a/xml/src/test/resources/configs/docs/getting-started.xml b/ehcache-xml/src/test/resources/configs/docs/getting-started.xml similarity index 80% rename from xml/src/test/resources/configs/docs/getting-started.xml rename to ehcache-xml/src/test/resources/configs/docs/getting-started.xml index 11d241e7a0..9000aefcd5 100644 --- a/xml/src/test/resources/configs/docs/getting-started.xml +++ b/ehcache-xml/src/test/resources/configs/docs/getting-started.xml @@ -14,17 +14,14 @@ ~ limitations under the License. --> - + java.lang.String java.lang.String - 2000 - 500 + 20 + 10 diff --git a/ehcache-xml/src/test/resources/configs/docs/multi/multiple-managers.xml b/ehcache-xml/src/test/resources/configs/docs/multi/multiple-managers.xml new file mode 100644 index 0000000000..e7bd481a6f --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/docs/multi/multiple-managers.xml @@ -0,0 +1,30 @@ + + + + + + java.lang.String + java.lang.String + + 20 + 10 + + + + + + + + + java.lang.String + java.lang.String + + 20 + 10 + + + + + diff --git a/ehcache-xml/src/test/resources/configs/docs/multi/multiple-variants.xml b/ehcache-xml/src/test/resources/configs/docs/multi/multiple-variants.xml new file mode 100644 index 0000000000..e04b3c858e --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/docs/multi/multiple-variants.xml @@ -0,0 +1,44 @@ + + + + + + + + java.lang.String + java.lang.String + + 1000 + + + + + + + + java.lang.String + java.lang.String + + 1000 + 128 + + + + + + + + + + + java.lang.String + java.lang.String + + 1000 + + + + + diff --git a/xml/src/test/resources/configs/docs/template-sample.xml b/ehcache-xml/src/test/resources/configs/docs/template-sample.xml similarity index 81% rename from xml/src/test/resources/configs/docs/template-sample.xml rename to ehcache-xml/src/test/resources/configs/docs/template-sample.xml index 75bfcae4dd..5248231cee 100644 --- a/xml/src/test/resources/configs/docs/template-sample.xml +++ b/ehcache-xml/src/test/resources/configs/docs/template-sample.xml @@ -14,10 +14,7 @@ ~ limitations under the License. --> - + diff --git a/ehcache-xml/src/test/resources/configs/docs/thread-pools.xml b/ehcache-xml/src/test/resources/configs/docs/thread-pools.xml new file mode 100644 index 0000000000..7e6a5dd133 --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/docs/thread-pools.xml @@ -0,0 +1,61 @@ + + + + + + + + + + + + + + + + + java.lang.Long + java.lang.String + + + 10 + 10 + + + + + java.lang.Long + java.lang.String + + + org.ehcache.docs.plugs.ListenerObject + + + 10 + + + + + + 10 + 10 + + + + + + \ No newline at end of file diff --git a/xml/src/test/resources/configs/ehcache-cacheEventListener.xml b/ehcache-xml/src/test/resources/configs/ehcache-cacheEventListener.xml similarity index 91% rename from xml/src/test/resources/configs/ehcache-cacheEventListener.xml rename to ehcache-xml/src/test/resources/configs/ehcache-cacheEventListener.xml index 7b7e58701a..ec46c1c18f 100644 --- a/xml/src/test/resources/configs/ehcache-cacheEventListener.xml +++ b/ehcache-xml/src/test/resources/configs/ehcache-cacheEventListener.xml @@ -14,10 +14,9 @@ ~ limitations under the License. --> - + + + java.lang.Number @@ -58,4 +57,4 @@ - \ No newline at end of file + diff --git a/ehcache-xml/src/test/resources/configs/ehcache-complete.xml b/ehcache-xml/src/test/resources/configs/ehcache-complete.xml new file mode 100644 index 0000000000..3744479b4d --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/ehcache-complete.xml @@ -0,0 +1,105 @@ + + + + + + + + + + com.pany.ehcache.serializer.TestSerializer + + + com.pany.ehcache.copier.AnotherPersonCopier + + + + + + + + + 1000 + 100 + + + + + com.pany.ehcache.copier.Description + com.pany.ehcache.copier.Person + + 6 + + + + com.pany.ehcache.integration.TestCacheLoaderWriter + + + 5 + + + + + + com.pany.ehcache.integration.TestCacheEventListener + ASYNCHRONOUS + UNORDERED + EVICTED + EXPIRED + + + + 100 + 80 + 100 + + + + 10 + 100 + + + + + + + java.lang.Long + java.lang.String + + 3500 + + + com.pany.ehcache.integration.TestCacheLoaderWriter + + + + + + + com.pany.ehcache.integration.TestCacheEventListener + SYNCHRONOUS + ORDERED + CREATED + REMOVED + UPDATED + + + + 4096 + + + diff --git a/xml/src/test/resources/configs/ehcache-multipleCacheEventListener.xml b/ehcache-xml/src/test/resources/configs/ehcache-multipleCacheEventListener.xml similarity index 90% rename from xml/src/test/resources/configs/ehcache-multipleCacheEventListener.xml rename to ehcache-xml/src/test/resources/configs/ehcache-multipleCacheEventListener.xml index 5a8b4fdd10..c18819a6fc 100644 --- a/xml/src/test/resources/configs/ehcache-multipleCacheEventListener.xml +++ b/ehcache-xml/src/test/resources/configs/ehcache-multipleCacheEventListener.xml @@ -14,10 +14,7 @@ ~ limitations under the License. --> - + java.lang.Number diff --git a/ehcache-xml/src/test/resources/configs/ehcache-system-props.xml b/ehcache-xml/src/test/resources/configs/ehcache-system-props.xml new file mode 100644 index 0000000000..152607a540 --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/ehcache-system-props.xml @@ -0,0 +1,55 @@ + + + + + + + + + + + + com.pany.ehcache.copier.Description + com.pany.ehcache.copier.Person + + ${ehcache.expiry.ttl} + + + com.pany.ehcache.integration.TestCacheLoaderWriter + + + ${ehcache.loader-writer.write-behind.batching.max-write-delay} + + + + + ${ehcache.resources.heap} + ${ehcache.resources.offheap} + ${ehcache.resources.disk} + + + + + + java.lang.Long + java.lang.String + + ${ehcache.expiry.tti} + + 4096 + + diff --git a/xml/src/test/resources/configs/expiry-caches.xml b/ehcache-xml/src/test/resources/configs/expiry-caches.xml similarity index 83% rename from xml/src/test/resources/configs/expiry-caches.xml rename to ehcache-xml/src/test/resources/configs/expiry-caches.xml index c23f4a4eed..629d0fbc57 100644 --- a/xml/src/test/resources/configs/expiry-caches.xml +++ b/ehcache-xml/src/test/resources/configs/expiry-caches.xml @@ -14,10 +14,7 @@ ~ limitations under the License. --> - + java.lang.String @@ -28,6 +25,15 @@ 5 + + java.lang.String + java.lang.String + + com.pany.ehcache.DeprecatedExpiry + + 5 + + java.lang.String java.lang.String diff --git a/xml/src/test/resources/configs/fancy.xsd b/ehcache-xml/src/test/resources/configs/fancy.xsd similarity index 100% rename from xml/src/test/resources/configs/fancy.xsd rename to ehcache-xml/src/test/resources/configs/fancy.xsd diff --git a/xml/src/test/resources/configs/foo.xsd b/ehcache-xml/src/test/resources/configs/foo.xsd similarity index 100% rename from xml/src/test/resources/configs/foo.xsd rename to ehcache-xml/src/test/resources/configs/foo.xsd diff --git a/xml/src/test/resources/configs/invalid-core.xml b/ehcache-xml/src/test/resources/configs/invalid-core.xml similarity index 84% rename from xml/src/test/resources/configs/invalid-core.xml rename to ehcache-xml/src/test/resources/configs/invalid-core.xml index becc7e392f..35b913aa1d 100644 --- a/xml/src/test/resources/configs/invalid-core.xml +++ b/ehcache-xml/src/test/resources/configs/invalid-core.xml @@ -1,5 +1,4 @@ diff --git a/xml/src/test/resources/configs/invalid-service.xml b/ehcache-xml/src/test/resources/configs/invalid-service.xml similarity index 75% rename from xml/src/test/resources/configs/invalid-service.xml rename to ehcache-xml/src/test/resources/configs/invalid-service.xml index 7fad4a9256..1372dc9511 100644 --- a/xml/src/test/resources/configs/invalid-service.xml +++ b/ehcache-xml/src/test/resources/configs/invalid-service.xml @@ -1,5 +1,4 @@ diff --git a/ehcache-xml/src/test/resources/configs/invalid-two-caches.xml b/ehcache-xml/src/test/resources/configs/invalid-two-caches.xml new file mode 100644 index 0000000000..b9dc49818c --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/invalid-two-caches.xml @@ -0,0 +1,11 @@ + + + + 2000 + + + + 2000 + + + diff --git a/ehcache-xml/src/test/resources/configs/multi/empty.xml b/ehcache-xml/src/test/resources/configs/multi/empty.xml new file mode 100644 index 0000000000..0f2027efef --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/multi/empty.xml @@ -0,0 +1,2 @@ + + diff --git a/ehcache-xml/src/test/resources/configs/multi/extended.xml b/ehcache-xml/src/test/resources/configs/multi/extended.xml new file mode 100644 index 0000000000..81a028949c --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/multi/extended.xml @@ -0,0 +1,23 @@ + + + + + + + + java.lang.String + java.lang.String + + 10 + + + + + + + diff --git a/ehcache-xml/src/test/resources/configs/multi/multiple-configs.xml b/ehcache-xml/src/test/resources/configs/multi/multiple-configs.xml new file mode 100644 index 0000000000..1b74af9b2d --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/multi/multiple-configs.xml @@ -0,0 +1,19 @@ + + + + + + 100 + + + + + + + 100 + + + + diff --git a/ehcache-xml/src/test/resources/configs/multi/multiple-variants.xml b/ehcache-xml/src/test/resources/configs/multi/multiple-variants.xml new file mode 100644 index 0000000000..c2ba086e56 --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/multi/multiple-variants.xml @@ -0,0 +1,38 @@ + + + + + + + 100 + + + + + + + 100 + + + + + + + + + + 100 + + + + + + + 100 + + + + + diff --git a/xml/src/test/resources/configs/nonExistentAdvisor-cache.xml b/ehcache-xml/src/test/resources/configs/nonExistentAdvisor-cache.xml similarity index 79% rename from xml/src/test/resources/configs/nonExistentAdvisor-cache.xml rename to ehcache-xml/src/test/resources/configs/nonExistentAdvisor-cache.xml index 52ad6e1c24..30a767611a 100644 --- a/xml/src/test/resources/configs/nonExistentAdvisor-cache.xml +++ b/ehcache-xml/src/test/resources/configs/nonExistentAdvisor-cache.xml @@ -14,10 +14,7 @@ ~ limitations under the License. --> - + com.foo.NonExistentAdvisorInCache diff --git a/xml/src/test/resources/configs/nonExistentAdvisor-template.xml b/ehcache-xml/src/test/resources/configs/nonExistentAdvisor-template.xml similarity index 80% rename from xml/src/test/resources/configs/nonExistentAdvisor-template.xml rename to ehcache-xml/src/test/resources/configs/nonExistentAdvisor-template.xml index 84be19ae24..74abaeed8b 100644 --- a/xml/src/test/resources/configs/nonExistentAdvisor-template.xml +++ b/ehcache-xml/src/test/resources/configs/nonExistentAdvisor-template.xml @@ -14,10 +14,7 @@ ~ limitations under the License. --> - + com.foo.NonExistentAdvisorInTemplate diff --git a/xml/src/test/resources/configs/one-cache.xml b/ehcache-xml/src/test/resources/configs/one-cache.xml similarity index 78% rename from xml/src/test/resources/configs/one-cache.xml rename to ehcache-xml/src/test/resources/configs/one-cache.xml index 2d4d64eb61..98091e81f8 100644 --- a/xml/src/test/resources/configs/one-cache.xml +++ b/ehcache-xml/src/test/resources/configs/one-cache.xml @@ -15,11 +15,8 @@ --> + xmlns:ehcache='http://www.ehcache.org/v3'> java.lang.String java.lang.String diff --git a/xml/src/test/resources/configs/one-service.xml b/ehcache-xml/src/test/resources/configs/one-service.xml similarity index 75% rename from xml/src/test/resources/configs/one-service.xml rename to ehcache-xml/src/test/resources/configs/one-service.xml index ab9e7028bd..fda8541468 100644 --- a/xml/src/test/resources/configs/one-service.xml +++ b/ehcache-xml/src/test/resources/configs/one-service.xml @@ -15,11 +15,8 @@ --> + xmlns:ehcache='http://www.ehcache.org/v3'> diff --git a/ehcache-xml/src/test/resources/configs/persistence-config.xml b/ehcache-xml/src/test/resources/configs/persistence-config.xml new file mode 100644 index 0000000000..bbba65a64b --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/persistence-config.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/ehcache-xml/src/test/resources/configs/pretty-typed-caches.xml b/ehcache-xml/src/test/resources/configs/pretty-typed-caches.xml new file mode 100644 index 0000000000..583b53dce2 --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/pretty-typed-caches.xml @@ -0,0 +1,42 @@ + + + + + + java.lang.Integer + byte[] + 5 + + + + java.lang.String + java.lang.String[] + 5 + + + + java.lang.String + java.lang.String[][] + 5 + + + + java.lang.String + java.util.Map.Entry + 5 + + diff --git a/ehcache-xml/src/test/resources/configs/resilience-config.xml b/ehcache-xml/src/test/resources/configs/resilience-config.xml new file mode 100644 index 0000000000..3902b2ab62 --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/resilience-config.xml @@ -0,0 +1,39 @@ + + + + + + java.lang.Number + java.lang.String + org.ehcache.xml.NiResilience + + 10 + + + + + java.lang.Number + java.lang.String + org.ehcache.xml.ShrubberyResilience + + 20 + + + + + + diff --git a/xml/src/test/resources/configs/resources-caches.xml b/ehcache-xml/src/test/resources/configs/resources-caches.xml similarity index 91% rename from xml/src/test/resources/configs/resources-caches.xml rename to ehcache-xml/src/test/resources/configs/resources-caches.xml index a1def6d89c..5a81099b44 100644 --- a/xml/src/test/resources/configs/resources-caches.xml +++ b/ehcache-xml/src/test/resources/configs/resources-caches.xml @@ -14,10 +14,9 @@ ~ limitations under the License. --> - + + + java.lang.String diff --git a/xml/src/test/resources/configs/resources-templates.xml b/ehcache-xml/src/test/resources/configs/resources-templates.xml similarity index 92% rename from xml/src/test/resources/configs/resources-templates.xml rename to ehcache-xml/src/test/resources/configs/resources-templates.xml index 22d67c644b..2369fbf895 100644 --- a/xml/src/test/resources/configs/resources-templates.xml +++ b/ehcache-xml/src/test/resources/configs/resources-templates.xml @@ -14,10 +14,7 @@ ~ limitations under the License. --> - + java.lang.String diff --git a/xml/src/test/resources/configs/sizeof-engine-cm-defaults-one.xml b/ehcache-xml/src/test/resources/configs/sizeof-engine-cm-defaults-one.xml similarity index 84% rename from xml/src/test/resources/configs/sizeof-engine-cm-defaults-one.xml rename to ehcache-xml/src/test/resources/configs/sizeof-engine-cm-defaults-one.xml index c8aa2fc11d..d78453284a 100644 --- a/xml/src/test/resources/configs/sizeof-engine-cm-defaults-one.xml +++ b/ehcache-xml/src/test/resources/configs/sizeof-engine-cm-defaults-one.xml @@ -13,10 +13,7 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + 100000 diff --git a/xml/src/test/resources/configs/sizeof-engine-cm-defaults-two.xml b/ehcache-xml/src/test/resources/configs/sizeof-engine-cm-defaults-two.xml similarity index 84% rename from xml/src/test/resources/configs/sizeof-engine-cm-defaults-two.xml rename to ehcache-xml/src/test/resources/configs/sizeof-engine-cm-defaults-two.xml index 1cf04f67ea..a4fbb7517a 100644 --- a/xml/src/test/resources/configs/sizeof-engine-cm-defaults-two.xml +++ b/ehcache-xml/src/test/resources/configs/sizeof-engine-cm-defaults-two.xml @@ -13,10 +13,7 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + 200 diff --git a/xml/src/test/resources/configs/sizeof-engine.xml b/ehcache-xml/src/test/resources/configs/sizeof-engine.xml similarity index 92% rename from xml/src/test/resources/configs/sizeof-engine.xml rename to ehcache-xml/src/test/resources/configs/sizeof-engine.xml index a8341f0818..95aaa398e9 100644 --- a/xml/src/test/resources/configs/sizeof-engine.xml +++ b/ehcache-xml/src/test/resources/configs/sizeof-engine.xml @@ -13,10 +13,7 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + 200 diff --git a/xml/src/test/resources/configs/template-cache.xml b/ehcache-xml/src/test/resources/configs/template-cache.xml similarity index 81% rename from xml/src/test/resources/configs/template-cache.xml rename to ehcache-xml/src/test/resources/configs/template-cache.xml index 086470eaf6..38bab168b4 100644 --- a/xml/src/test/resources/configs/template-cache.xml +++ b/ehcache-xml/src/test/resources/configs/template-cache.xml @@ -15,11 +15,8 @@ --> + xmlns:ehcache='http://www.ehcache.org/v3'> java.lang.String diff --git a/ehcache-xml/src/test/resources/configs/template-defaults.xml b/ehcache-xml/src/test/resources/configs/template-defaults.xml new file mode 100644 index 0000000000..62cb319745 --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/template-defaults.xml @@ -0,0 +1,22 @@ + + + + + + + + diff --git a/ehcache-xml/src/test/resources/configs/thread-pools.xml b/ehcache-xml/src/test/resources/configs/thread-pools.xml new file mode 100644 index 0000000000..00376dbb6f --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/thread-pools.xml @@ -0,0 +1,23 @@ + + + + + + + + + \ No newline at end of file diff --git a/ehcache-xml/src/test/resources/configs/unknown-resource.xml b/ehcache-xml/src/test/resources/configs/unknown-resource.xml new file mode 100644 index 0000000000..bf94d680ba --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/unknown-resource.xml @@ -0,0 +1,11 @@ + + + + + + + + + diff --git a/ehcache-xml/src/test/resources/configs/unknown-service-creation.xml b/ehcache-xml/src/test/resources/configs/unknown-service-creation.xml new file mode 100644 index 0000000000..254a6dec2b --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/unknown-service-creation.xml @@ -0,0 +1,9 @@ + + + + + + + diff --git a/ehcache-xml/src/test/resources/configs/unknown-service.xml b/ehcache-xml/src/test/resources/configs/unknown-service.xml new file mode 100644 index 0000000000..2873544bcf --- /dev/null +++ b/ehcache-xml/src/test/resources/configs/unknown-service.xml @@ -0,0 +1,8 @@ + + + + + + + diff --git a/xml/src/test/resources/configs/writebehind-cache.xml b/ehcache-xml/src/test/resources/configs/writebehind-cache.xml similarity index 85% rename from xml/src/test/resources/configs/writebehind-cache.xml rename to ehcache-xml/src/test/resources/configs/writebehind-cache.xml index 28c969c0c1..68925f1e53 100644 --- a/xml/src/test/resources/configs/writebehind-cache.xml +++ b/ehcache-xml/src/test/resources/configs/writebehind-cache.xml @@ -14,12 +14,11 @@ ~ limitations under the License. --> - + + + + - java.lang.Number java.lang.String @@ -33,7 +32,7 @@ 10 - + java.lang.Number java.lang.String @@ -49,7 +48,7 @@ 20 - - - \ No newline at end of file + + + diff --git a/ehcache-xml/src/testFixtures/java/org/ehcache/xml/XmlConfigurationMatchers.java b/ehcache-xml/src/testFixtures/java/org/ehcache/xml/XmlConfigurationMatchers.java new file mode 100644 index 0000000000..318ee81ca7 --- /dev/null +++ b/ehcache-xml/src/testFixtures/java/org/ehcache/xml/XmlConfigurationMatchers.java @@ -0,0 +1,57 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.xml; + +import org.xmlunit.diff.DefaultNodeMatcher; +import org.xmlunit.diff.ElementSelector; +import org.xmlunit.matchers.CompareMatcher; +import org.xmlunit.util.Nodes; + +import javax.xml.namespace.QName; + +import java.util.ArrayList; +import java.util.List; + +import static java.util.Arrays.asList; +import static org.xmlunit.diff.ElementSelectors.Default; +import static org.xmlunit.diff.ElementSelectors.byName; +import static org.xmlunit.diff.ElementSelectors.byNameAndAttributes; +import static org.xmlunit.diff.ElementSelectors.byNameAndText; +import static org.xmlunit.diff.ElementSelectors.conditionalSelector; +import static org.xmlunit.diff.ElementSelectors.selectorForElementNamed; + +public class XmlConfigurationMatchers { + + private static final String EHCACHE_NAMESPACE = "http://www.ehcache.org/v3"; + private static final QName CACHE_QNAME = new QName(EHCACHE_NAMESPACE, "cache"); + private static final QName RESOURCES_QNAME = new QName(EHCACHE_NAMESPACE, "resources"); + private static final QName EVENTS_TO_FIRE_ON_QNAME = new QName(EHCACHE_NAMESPACE, "events-to-fire-on"); + + private static final String MULTI_NAMESPACE = "http://www.ehcache.org/v3/multi"; + private static final QName MULTI_CONFIGURATION_QNAME = new QName(MULTI_NAMESPACE, "configuration"); + + public static CompareMatcher isSameConfigurationAs(Object input, ElementSelector... extraElementSelectors) { + List elementSelectors = new ArrayList<>(asList(extraElementSelectors)); + elementSelectors.add(selectorForElementNamed(MULTI_CONFIGURATION_QNAME, byNameAndAttributes("identity"))); + elementSelectors.add(selectorForElementNamed(EVENTS_TO_FIRE_ON_QNAME, byNameAndText)); + elementSelectors.add(selectorForElementNamed(CACHE_QNAME, byNameAndAttributes("alias"))); + elementSelectors.add(conditionalSelector(element -> Nodes.getQName(element.getParentNode()).equals(RESOURCES_QNAME), byName)); + elementSelectors.add(Default); + + return CompareMatcher.isSimilarTo(input).ignoreComments().ignoreWhitespace() + .withNodeMatcher(new DefaultNodeMatcher(elementSelectors.toArray(new ElementSelector[0]))); + } +} diff --git a/ehcache/build.gradle b/ehcache/build.gradle new file mode 100644 index 0000000000..ef5a4436b9 --- /dev/null +++ b/ehcache/build.gradle @@ -0,0 +1,96 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +import aQute.bnd.osgi.Constants + +import static org.gradle.api.attributes.Bundling.EXTERNAL +import static org.gradle.api.attributes.Category.DOCUMENTATION +import static org.gradle.api.attributes.Usage.JAVA_RUNTIME + +plugins { + id 'org.ehcache.build.package' +} + +publishing.publications.withType(MavenPublication) { + pom { + name = 'Ehcache' + description = 'End-user ehcache3 jar artifact' + } +} + +configurations { + contents { + exclude group:'org.glassfish.jaxb' + exclude group:'org.slf4j' + exclude group:'javax.cache' + exclude group:'javax.xml.bind' + } +} + +dependencies { + contents project(':ehcache-api') + contents project(':ehcache-core') + contents project(':ehcache-impl') + contents project(':ehcache-107') + contents project(':ehcache-xml') + + api "javax.cache:cache-api:$parent.jcacheVersion" + implementation "org.slf4j:slf4j-api:$parent.slf4jVersion" + runtimeOnly 'org.glassfish.jaxb:jaxb-runtime:[2.2,3)' +} + +tasks.named('jar') { + osgi { + instruction Constants.BUNDLE_NAME, 'Ehcache 3' + instruction Constants.BUNDLE_SYMBOLICNAME, 'org.ehcache' + instruction Constants.BUNDLE_DESCRIPTION, 'Ehcache is an open-source caching library, compliant with the JSR-107 standard.' + instruction Constants.BUNDLE_ACTIVATOR, 'org.ehcache.core.osgi.EhcacheActivator' + instruction Constants.EXPORT_PACKAGE, '!org.ehcache.jsr107.tck, !org.ehcache.*.internal.*, org.ehcache.*' + instruction Constants.IMPORT_PACKAGE, 'javax.cache.*;resolution:=optional, !javax.annotation, !sun.misc, javax.xml.bind*;version="[2.2,3)", *' + } +} + +tasks.withType(Javadoc).matching({ name.equals('spiJavadoc') }).configureEach { + exclude '**/core/**', '**/impl/**', '**/xml/**', '**/jsr107/**', '**/transactions/**', '**/management/**', '**/tck/**' +} + +TaskProvider spiJavadoc = tasks.register('spiJavadoc', Javadoc) { + title = "$project.archivesBaseName $project.version API & SPI"; + source = tasks.javadoc.source + setClasspath(tasks.javadoc.classpath) + exclude "**/internal/**" + setDestinationDir project.file("$project.buildDir/docs/spi-javadoc") +} + +TaskProvider spiJavadocJar = project.getTasks().register('spiJavadocJar', Jar) { + from(spiJavadoc); + getArchiveClassifier().set("spi-javadoc"); +} +Configuration spiJavadocElements = project.getConfigurations().create("spiJavadocElements", config -> { + config.setDescription("javadoc elements for SPI documentation."); + config.attributes(attributes -> { + attributes.attribute(Usage.USAGE_ATTRIBUTE, project.getObjects().named(Usage.class, JAVA_RUNTIME)); + attributes.attribute(Category.CATEGORY_ATTRIBUTE, project.getObjects().named(Category.class, DOCUMENTATION)); + attributes.attribute(Bundling.BUNDLING_ATTRIBUTE, project.getObjects().named(Bundling.class, EXTERNAL)); + attributes.attribute(DocsType.DOCS_TYPE_ATTRIBUTE, project.getObjects().named(DocsType.class, "spi-javadoc")); + }); + config.getOutgoing().artifact(spiJavadocJar); +}); + +components.named('java', AdhocComponentWithVariants) { + addVariantsFromConfiguration(spiJavadocElements) {} +} diff --git a/ehcache/templates/github-release-issue.md b/ehcache/templates/github-release-issue.md new file mode 100644 index 0000000000..3abcc705c2 --- /dev/null +++ b/ehcache/templates/github-release-issue.md @@ -0,0 +1,11 @@ +- [ ] Tag release +- [ ] Build and verify release + - includes checking javadoc and source jars +- [ ] Prepare release on GitHub +- [ ] Prepare website update + - includes checking XSDs +- [ ] Publish jars to Maven Central +- [ ] Publish release on GitHub +- [ ] Publish website +- [ ] Update readme / bump version on release branch +- [ ] Email announcement diff --git a/ehcache/templates/github-release.md b/ehcache/templates/github-release.md new file mode 100644 index 0000000000..c4b3a17152 --- /dev/null +++ b/ehcache/templates/github-release.md @@ -0,0 +1,48 @@ +## Getting started + +=> Please add what this release is about here + +As usual, it contains numerous [bug fixes and enhancements](https://github.com/ehcache/ehcache3/milestone/%MILESTONE%?closed=1). + +Ehcache ${version} has been released to maven central under the following coordinates: + +### Main module + +``` xml + + org.ehcache + ehcache + %VERSION% + +``` + +### Transactions module + +``` xml + + org.ehcache + ehcache-transactions + %VERSION% + +``` + +### Clustering module + +``` xml + + org.ehcache + ehcache-clustered + %VERSION% + +``` + +Or can be downloaded below. +Note that if you download Ehcache jar you will need one additional jar in your classpath: +- [slf4j-api-1.7.25.jar](http://search.maven.org/#artifactdetails%7Corg.slf4j%7Cslf4j-api%7C1.7.25%7Cjar) + +## Clustering kit + +For clustering a kit is also provided that includes the Terracotta Server component. See below. + +## Further reading +- [Ehcache 3 documentation](http://www.ehcache.org/documentation/%MAJORVERSION%/) diff --git a/gradle.properties b/gradle.properties index 07797c462d..4fa2259dcb 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,29 +1,27 @@ +# Ehcache version +ehcacheVersion = 3.10-SNAPSHOT # Terracotta third parties -offheapVersion = 2.4.0 -statisticVersion = 1.5.0 -jcacheVersion = 1.0.0 +offheapVersion = 2.5.2 +statisticVersion = 2.1 +jcacheVersion = 1.1.0 slf4jVersion = 1.7.25 -sizeofVersion = 0.3.0 -ehcache2Version = 2.10.3 +sizeofVersion = 0.4.0 # Terracotta clustered -terracottaPlatformVersion = 5.4.0-pre6 -terracottaApisVersion = 1.4.0-pre7 -terracottaCoreVersion = 5.4.0-pre13 -terracottaPassthroughTestingVersion = 1.4.0-pre8 +terracottaPlatformVersion = 5.8.9-pre5 +terracottaApisVersion = 1.8.1 +terracottaCoreVersion = 5.8.4 +terracottaPassthroughTestingVersion = 1.8.2 +terracottaUtilitiesVersion = 0.0.9 # Test lib versions -junitVersion = 4.12 -assertjVersion = 3.8.0 +junitVersion = 4.13.1 +assertjVersion = 3.9.0 hamcrestVersion = 1.3 -mockitoVersion = 2.12.0 -jacksonVersion = 2.7.5 - -# Tools -findbugsVersion = 3.0.1 -checkstyleVersion = 5.9 -jacocoVersion = 0.7.9 +mockitoVersion = 2.23.4 +jacksonVersion = 2.12.4 +jcacheTckVersion = 1.1.0 sonatypeUser = OVERRIDE_ME sonatypePwd = OVERRIDE_ME diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 01b8bf6b1f..cc4fdc293d 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index b6517bb1d1..a0f7639f7d 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-7.2-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip diff --git a/gradlew b/gradlew index cccdd3d517..2fe81a7d95 100755 --- a/gradlew +++ b/gradlew @@ -1,5 +1,21 @@ #!/usr/bin/env sh +# +# Copyright 2015 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + ############################################################################## ## ## Gradle start up script for UN*X @@ -28,7 +44,7 @@ APP_NAME="Gradle" APP_BASE_NAME=`basename "$0"` # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS="" +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD="maximum" @@ -109,8 +125,8 @@ if $darwin; then GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" fi -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then +# For Cygwin or MSYS, switch paths to Windows format before running java +if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then APP_HOME=`cygpath --path --mixed "$APP_HOME"` CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` JAVACMD=`cygpath --unix "$JAVACMD"` @@ -138,19 +154,19 @@ if $cygwin ; then else eval `echo args$i`="\"$arg\"" fi - i=$((i+1)) + i=`expr $i + 1` done case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + 0) set -- ;; + 1) set -- "$args0" ;; + 2) set -- "$args0" "$args1" ;; + 3) set -- "$args0" "$args1" "$args2" ;; + 4) set -- "$args0" "$args1" "$args2" "$args3" ;; + 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; esac fi @@ -159,14 +175,9 @@ save () { for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done echo " " } -APP_ARGS=$(save "$@") +APP_ARGS=`save "$@"` # Collect all arguments for the java command, following the shell quoting and substitution rules eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" -fi - exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat index f9553162f1..9618d8d960 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -1,3 +1,19 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + @if "%DEBUG%" == "" @echo off @rem ########################################################################## @rem @@ -14,7 +30,7 @@ set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS= +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome diff --git a/impl/.gitignore b/impl/.gitignore deleted file mode 100755 index ae3c172604..0000000000 --- a/impl/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/bin/ diff --git a/impl/build.gradle b/impl/build.gradle deleted file mode 100644 index e980104840..0000000000 --- a/impl/build.gradle +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: EhDeploy - -dependencies { - compile project(':api'), project(':core') - compile group: 'org.terracotta', name: 'offheap-store', version: parent.offheapVersion - compile (group: 'org.ehcache', name: 'sizeof', version: parent.sizeofVersion) { - exclude group:'org.slf4j', module:'slf4j-api' - } - testCompile project(path: ':core-spi-test'), 'org.ow2.asm:asm-all:5.0.4' - testCompile (group: 'net.sf.ehcache', name: 'ehcache', version: parent.ehcache2Version) { - exclude group:'org.slf4j', module:'slf4j-api' - } -} - -jar { - from "$rootDir/NOTICE" -} diff --git a/impl/gradle.properties b/impl/gradle.properties deleted file mode 100644 index a89604d2bc..0000000000 --- a/impl/gradle.properties +++ /dev/null @@ -1,4 +0,0 @@ -subPomName = Ehcache 3 Implementation module -subPomDesc = The implementation module of Ehcache 3 -osgi = {"Export-Package" : ["!org.terracotta.offheapstore.*", "!org.ehcache.impl.internal.*"],\ - "Import-Package" : ["!sun.misc.*", "!sun.security.action.*"]} diff --git a/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java b/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java deleted file mode 100644 index 2a73b359d4..0000000000 --- a/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java +++ /dev/null @@ -1,649 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.config.builders; - -import org.ehcache.config.Builder; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.core.config.store.StoreEventSourceConfiguration; -import org.ehcache.core.spi.store.heap.SizeOfEngine; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.config.copy.DefaultCopierConfiguration; -import org.ehcache.impl.config.event.DefaultCacheEventDispatcherConfiguration; -import org.ehcache.impl.config.event.DefaultCacheEventListenerConfiguration; -import org.ehcache.impl.config.event.DefaultEventSourceConfiguration; -import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; -import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; -import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; -import org.ehcache.impl.copy.SerializingCopier; -import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.spi.service.ServiceConfiguration; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; - -import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE; -import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE; -import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_UNIT; - - -/** - * The {@code CacheConfigurationBuilder} enables building {@link CacheConfiguration}s using a fluent style. - *

                                      - * As with all Ehcache builders, all instances are immutable and calling any method on the builder will return a new - * instance without modifying the one on which the method was called. - * This enables the sharing of builder instances without any risk of seeing them modified by code elsewhere. - */ -public class CacheConfigurationBuilder implements Builder> { - - private final Collection> serviceConfigurations = new HashSet<>(); - private Expiry expiry; - private ClassLoader classLoader = null; - private EvictionAdvisor evictionAdvisor; - private ResourcePools resourcePools; - private Class keyType; - private Class valueType; - - /** - * Creates a new instance ready to produce a {@link CacheConfiguration} with key type {@code } and with value type - * {@code } and which will use the {@link ResourcePools configured resources}. - * - * @param keyType the key type - * @param valueType the value type - * @param resourcePools the resources to use - * @param the key type - * @param the value type - * @return a {@code CacheConfigurationBuilder} - */ - public static CacheConfigurationBuilder newCacheConfigurationBuilder(Class keyType, Class valueType, ResourcePools resourcePools) { - return new CacheConfigurationBuilder<>(keyType, valueType, resourcePools); - } - - /** - * Creates a new instance ready to produce a {@link CacheConfiguration} with key type {@code } and with value type - * {@code } and which will use the {@link ResourcePools configured resources}, passed as a {@link ResourcePoolsBuilder}. - * - * @param keyType the key type - * @param valueType the value type - * @param resourcePoolsBuilder the resources to use, as a builder - * @param the key type - * @param the value type - * @return a {@code CacheConfigurationBuilder} - */ - public static CacheConfigurationBuilder newCacheConfigurationBuilder(Class keyType, Class valueType, Builder resourcePoolsBuilder) { - return new CacheConfigurationBuilder<>(keyType, valueType, resourcePoolsBuilder.build()); - } - - /** - * Creates a new instance ready to produce a {@link CacheConfiguration} functionally equivalent to the supplied configuration. - * - * @param configuration seed configuration - * @param the key type - * @param the value type - * @return a {@code CacheConfigurationBuilder} - */ - public static CacheConfigurationBuilder newCacheConfigurationBuilder(CacheConfiguration configuration) { - CacheConfigurationBuilder builder = newCacheConfigurationBuilder(configuration.getKeyType(), configuration.getValueType(), configuration.getResourcePools()) - .withClassLoader(configuration.getClassLoader()) - .withEvictionAdvisor(configuration.getEvictionAdvisor()) - .withExpiry(configuration.getExpiry()); - for (ServiceConfiguration serviceConfig : configuration.getServiceConfigurations()) { - builder = builder.add(serviceConfig); - } - return builder; - } - - private CacheConfigurationBuilder(Class keyType, Class valueType, ResourcePools resourcePools) { - this.keyType = keyType; - this.valueType = valueType; - this.resourcePools = resourcePools; - } - - private CacheConfigurationBuilder(CacheConfigurationBuilder other) { - this.keyType = other.keyType; - this.valueType = other.valueType; - this.expiry = other.expiry; - this.classLoader = other.classLoader; - this.evictionAdvisor = other.evictionAdvisor; - this.resourcePools = other.resourcePools; - this.serviceConfigurations.addAll(other.serviceConfigurations); - } - - /** - * Adds a {@link ServiceConfiguration} to the returned builder. - * - * @param configuration the service configuration to add - * @return a new builder with the added service configuration - */ - public CacheConfigurationBuilder add(ServiceConfiguration configuration) { - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - if (getExistingServiceConfiguration(configuration.getClass()) != null) { - if (configuration instanceof DefaultCopierConfiguration) { - DefaultCopierConfiguration copierConfiguration = (DefaultCopierConfiguration) configuration; - removeExistingCopierConfigFor(copierConfiguration.getType(), otherBuilder); - } else if (configuration instanceof DefaultSerializerConfiguration) { - DefaultSerializerConfiguration serializerConfiguration = (DefaultSerializerConfiguration) configuration; - removeExistingSerializerConfigFor(serializerConfiguration.getType(), otherBuilder); - } else if (!(configuration instanceof DefaultCacheEventListenerConfiguration)) { - throw new IllegalStateException("Cannot add a generic service configuration when another one already exists. " + - "Rely on specific with* methods or make sure your remove other configuration first."); - } - } - otherBuilder.serviceConfigurations.add(configuration); - return otherBuilder; - } - - /** - * Convenience method to add a {@link ServiceConfiguration} that is produced by a {@link Builder}. - * - * @param configurationBuilder the service configuration to add, {@link Builder#build()} will be called on it - * @return a new builder with the added service configuration - * - * @see #add(ServiceConfiguration) - */ - public CacheConfigurationBuilder add(Builder> configurationBuilder) { - return add(configurationBuilder.build()); - } - - /** - * Adds an {@link EvictionAdvisor} to the returned builder. - * - * @param evictionAdvisor the eviction advisor to be used - * @return a new builder with the added eviction advisor - */ - public CacheConfigurationBuilder withEvictionAdvisor(final EvictionAdvisor evictionAdvisor) { - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - otherBuilder.evictionAdvisor = evictionAdvisor; - return otherBuilder; - } - - /** - * Removes a {@link ServiceConfiguration} from the returned builder. - * - * @param configuration the service configuration to remove - * @return a new builder without the specified configuration - */ - public CacheConfigurationBuilder remove(ServiceConfiguration configuration) { - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - otherBuilder.serviceConfigurations.remove(configuration); - return otherBuilder; - } - - /** - * Clears all {@link ServiceConfiguration}s from the returned builder. - * - * @return a new builder with no service configurations left - */ - public CacheConfigurationBuilder clearAllServiceConfig() { - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - otherBuilder.serviceConfigurations.clear(); - return otherBuilder; - } - - /** - * Returns the first {@link ServiceConfiguration} with type matching the class passed in. - * - * @param clazz the service configuration class - * @param the type of the service configuration - * @return a matching service configuration, or {@code null} if none can be found - */ - public > T getExistingServiceConfiguration(Class clazz) { - for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { - if (clazz.equals(serviceConfiguration.getClass())) { - return clazz.cast(serviceConfiguration); - } - } - return null; - } - - /** - * Returns all {@link ServiceConfiguration}s of type matching the class passed in. - * - * @param clazz the service configuration class - * @param the type of the service configuration - * @return a list with service configurations - */ - public > List getExistingServiceConfigurations(Class clazz) { - ArrayList results = new ArrayList<>(); - for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { - if (clazz.equals(serviceConfiguration.getClass())) { - results.add(clazz.cast(serviceConfiguration)); - } - } - return results; - } - - /** - * Adds a {@link ClassLoader} to the returned builder. - *

                                      - * The {@link ClassLoader} will be used for resolving all non Ehcache types. - * - * @param classLoader the class loader to use - * @return a new builder with the added class loader - */ - public CacheConfigurationBuilder withClassLoader(ClassLoader classLoader) { - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - otherBuilder.classLoader = classLoader; - return otherBuilder; - } - - /** - * Adds the {@link ResourcePools} to the returned builder. - *

                                      - * {@link ResourcePools} is what determines the tiering of a cache. - * - * @param resourcePools the resource pools to use - * @return a new builder with the added resource pools - */ - public CacheConfigurationBuilder withResourcePools(ResourcePools resourcePools) { - if (resourcePools == null) { - throw new NullPointerException("Null resource pools"); - } - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - otherBuilder.resourcePools = resourcePools; - return otherBuilder; - } - - /** - * Convenience method to add a {@link ResourcePools} through a {@link ResourcePoolsBuilder} to the returned builder. - * - * @param resourcePoolsBuilder the builder providing the resource pool - * @return a new builder with the added resource pools - * - * @see #withResourcePools(ResourcePools) - */ - public CacheConfigurationBuilder withResourcePools(ResourcePoolsBuilder resourcePoolsBuilder) { - if (resourcePoolsBuilder == null) { - throw new NullPointerException("Null resource pools builder"); - } - return withResourcePools(resourcePoolsBuilder.build()); - } - - /** - * Adds {@link Expiry} configuration to the returned builder. - *

                                      - * {@link Expiry} is what controls data freshness in a cache. - * - * @param expiry the expiry to use - * @return a new builder with the added expiry - */ - public CacheConfigurationBuilder withExpiry(Expiry expiry) { - if (expiry == null) { - throw new NullPointerException("Null expiry"); - } - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - otherBuilder.expiry = expiry; - return otherBuilder; - } - - /** - * Indicates whether this builder has configured expiry or not. - * - * @return {@code true} if expiry configured, {@code false} otherwise - */ - public boolean hasConfiguredExpiry() { - return expiry != null; - } - - /** - * Adds a {@link CacheLoaderWriter} to the configured builder. - *

                                      - * Configuration of a {@link CacheLoaderWriter} is what enables cache-through patterns. - * - * @param loaderWriter the loaderwriter to use - * @return a new builder with the added loaderwriter configuration - */ - public CacheConfigurationBuilder withLoaderWriter(CacheLoaderWriter loaderWriter) { - if (loaderWriter == null) { - throw new NullPointerException("Null loaderWriter"); - } - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - DefaultCacheLoaderWriterConfiguration existingServiceConfiguration = otherBuilder.getExistingServiceConfiguration(DefaultCacheLoaderWriterConfiguration.class); - if (existingServiceConfiguration != null) { - otherBuilder.serviceConfigurations.remove(existingServiceConfiguration); - } - otherBuilder.serviceConfigurations.add(new DefaultCacheLoaderWriterConfiguration(loaderWriter)); - return otherBuilder; - } - - /** - * Adds a {@link CacheLoaderWriter} configured through a class and optional constructor arguments to the configured - * builder. - *

                                      - * Configuration of a {@link CacheLoaderWriter} is what enables cache-through patterns. - * - * @param loaderWriterClass the loaderwrite class - * @param arguments optional constructor arguments - * @return a new builder with the added loaderwriter configuration - */ - public CacheConfigurationBuilder withLoaderWriter(Class> loaderWriterClass, Object... arguments) { - if (loaderWriterClass == null) { - throw new NullPointerException("Null loaderWriterClass"); - } - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - DefaultCacheLoaderWriterConfiguration existingServiceConfiguration = otherBuilder.getExistingServiceConfiguration(DefaultCacheLoaderWriterConfiguration.class); - if (existingServiceConfiguration != null) { - otherBuilder.serviceConfigurations.remove(existingServiceConfiguration); - } - otherBuilder.serviceConfigurations.add(new DefaultCacheLoaderWriterConfiguration(loaderWriterClass, arguments)); - return otherBuilder; - } - - /** - * Adds by-value semantic using the cache key serializer for the key on heap. - *

                                      - * {@link Copier}s are what enable control of by-reference / by-value semantics for on-heap tier. - * - * @return a new builder with the added key copier - */ - public CacheConfigurationBuilder withKeySerializingCopier() { - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - removeExistingCopierConfigFor(DefaultCopierConfiguration.Type.KEY, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); - return otherBuilder; - } - - /** - * Adds by-value semantic using the cache value serializer for the value on heap. - *

                                      - * {@link Copier}s are what enable control of by-reference / by-value semantics for on-heap tier. - * - * @return a new builder with the added value copier - */ - public CacheConfigurationBuilder withValueSerializingCopier() { - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - removeExistingCopierConfigFor(DefaultCopierConfiguration.Type.VALUE, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); - return otherBuilder; - } - - /** - * Adds by-value semantic using the provided {@link Copier} for the key on heap. - *

                                      - * {@link Copier}s are what enable control of by-reference / by-value semantics for on-heap tier. - * - * @param keyCopier the key copier to use - * @return a new builder with the added key copier - */ - public CacheConfigurationBuilder withKeyCopier(Copier keyCopier) { - if (keyCopier == null) { - throw new NullPointerException("Null key copier"); - } - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - removeExistingCopierConfigFor(DefaultCopierConfiguration.Type.KEY, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration<>(keyCopier, DefaultCopierConfiguration.Type.KEY)); - return otherBuilder; - } - - /** - * Adds by-value semantic using the provided {@link Copier} class for the key on heap. - *

                                      - * {@link Copier}s are what enable control of by-reference / by-value semantics for on-heap tier. - * - * @param keyCopierClass the key copier class to use - * @return a new builder with the added key copier - */ - public CacheConfigurationBuilder withKeyCopier(Class> keyCopierClass) { - if (keyCopierClass == null) { - throw new NullPointerException("Null key copier class"); - } - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - removeExistingCopierConfigFor(DefaultCopierConfiguration.Type.KEY, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration<>(keyCopierClass, DefaultCopierConfiguration.Type.KEY)); - return otherBuilder; - } - - /** - * Adds by-value semantic using the provided {@link Copier} for the value on heap. - *

                                      - * {@link Copier}s are what enable control of by-reference / by-value semantics for on-heap tier. - * - * @param valueCopier the value copier to use - * @return a new builder with the added value copier - */ - public CacheConfigurationBuilder withValueCopier(Copier valueCopier) { - if (valueCopier == null) { - throw new NullPointerException("Null value copier"); - } - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - removeExistingCopierConfigFor(DefaultCopierConfiguration.Type.VALUE, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration<>(valueCopier, DefaultCopierConfiguration.Type.VALUE)); - return otherBuilder; - } - - /** - * Adds by-value semantic using the provided {@link Copier} class for the value on heap. - *

                                      - * {@link Copier}s are what enable control of by-reference / by-value semantics for on-heap tier. - * - * @param valueCopierClass the value copier class to use - * @return a new builder with the added value copier - */ - public CacheConfigurationBuilder withValueCopier(Class> valueCopierClass) { - if (valueCopierClass == null) { - throw new NullPointerException("Null value copier"); - } - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - removeExistingCopierConfigFor(DefaultCopierConfiguration.Type.VALUE, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration<>(valueCopierClass, DefaultCopierConfiguration.Type.VALUE)); - return otherBuilder; - } - - private void removeExistingCopierConfigFor(DefaultCopierConfiguration.Type type, CacheConfigurationBuilder otherBuilder) { - List existingServiceConfigurations = otherBuilder.getExistingServiceConfigurations(DefaultCopierConfiguration.class); - for (DefaultCopierConfiguration configuration : existingServiceConfigurations) { - if (configuration.getType().equals(type)) { - otherBuilder.serviceConfigurations.remove(configuration); - } - } - } - - private void removeExistingSerializerConfigFor(DefaultSerializerConfiguration.Type type, CacheConfigurationBuilder otherBuilder) { - List existingServiceConfigurations = otherBuilder.getExistingServiceConfigurations(DefaultSerializerConfiguration.class); - for (DefaultSerializerConfiguration configuration : existingServiceConfigurations) { - if (configuration.getType().equals(type)) { - otherBuilder.serviceConfigurations.remove(configuration); - } - } - } - - /** - * Adds a {@link Serializer} for cache keys to the configured builder. - *

                                      - * {@link Serializer}s are what enables cache storage beyond the heap tier. - * - * @param keySerializer the key serializer to use - * @return a new builder with the added key serializer - */ - public CacheConfigurationBuilder withKeySerializer(Serializer keySerializer) { - if (keySerializer == null) { - throw new NullPointerException("Null key serializer"); - } - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - removeExistingSerializerConfigFor(DefaultSerializerConfiguration.Type.KEY, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultSerializerConfiguration<>(keySerializer, DefaultSerializerConfiguration.Type.KEY)); - return otherBuilder; - } - - /** - * Adds a {@link Serializer} class for cache keys to the configured builder. - *

                                      - * {@link Serializer}s are what enables cache storage beyond the heap tier. - * - * @param keySerializerClass the key serializer to use - * @return a new builder with the added key serializer - */ - public CacheConfigurationBuilder withKeySerializer(Class> keySerializerClass) { - if (keySerializerClass == null) { - throw new NullPointerException("Null key serializer class"); - } - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - removeExistingSerializerConfigFor(DefaultSerializerConfiguration.Type.KEY, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultSerializerConfiguration<>(keySerializerClass, DefaultSerializerConfiguration.Type.KEY)); - return otherBuilder; - } - - /** - * Adds a {@link Serializer} for cache values to the configured builder. - *

                                      - * {@link Serializer}s are what enables cache storage beyond the heap tier. - * - * @param valueSerializer the key serializer to use - * @return a new builder with the added value serializer - */ - public CacheConfigurationBuilder withValueSerializer(Serializer valueSerializer) { - if (valueSerializer == null) { - throw new NullPointerException("Null value serializer"); - } - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - removeExistingSerializerConfigFor(DefaultSerializerConfiguration.Type.VALUE, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultSerializerConfiguration<>(valueSerializer, DefaultSerializerConfiguration.Type.VALUE)); - return otherBuilder; - } - - /** - * Adds a {@link Serializer} class for cache values to the configured builder. - *

                                      - * {@link Serializer}s are what enables cache storage beyond the heap tier. - * - * @param valueSerializerClass the key serializer to use - * @return a new builder with the added value serializer - */ - public CacheConfigurationBuilder withValueSerializer(Class> valueSerializerClass) { - if (valueSerializerClass == null) { - throw new NullPointerException("Null value serializer class"); - } - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - removeExistingSerializerConfigFor(DefaultSerializerConfiguration.Type.VALUE, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultSerializerConfiguration<>(valueSerializerClass, DefaultSerializerConfiguration.Type.VALUE)); - return otherBuilder; - } - - /** - * Adds {@link StoreEventSourceConfiguration} with the specified dispatcher concurrency - * to the configured builder. - * - * @param dispatcherConcurrency the level of concurrency in the dispatcher for ordered events - * @return a new builder with the added configuration - */ - public CacheConfigurationBuilder withDispatcherConcurrency(int dispatcherConcurrency) { - DefaultEventSourceConfiguration configuration = new DefaultEventSourceConfiguration(dispatcherConcurrency); - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - DefaultEventSourceConfiguration existingServiceConfiguration = otherBuilder.getExistingServiceConfiguration(DefaultEventSourceConfiguration.class); - if (existingServiceConfiguration != null) { - otherBuilder.serviceConfigurations.remove(existingServiceConfiguration); - } - otherBuilder.serviceConfigurations.add(configuration); - return otherBuilder; - } - - /** - * Adds a {@link ServiceConfiguration} for the {@link org.ehcache.core.events.CacheEventDispatcherFactory} specifying - * the thread pool alias to use. - * - * @param threadPoolAlias the thread pool alias to use - * @return a new builder with the added configuration - */ - public CacheConfigurationBuilder withEventListenersThreadPool(String threadPoolAlias) { - DefaultCacheEventDispatcherConfiguration configuration = new DefaultCacheEventDispatcherConfiguration(threadPoolAlias); - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - DefaultCacheEventDispatcherConfiguration existingServiceConfiguration = otherBuilder.getExistingServiceConfiguration(DefaultCacheEventDispatcherConfiguration.class); - if (existingServiceConfiguration != null) { - otherBuilder.serviceConfigurations.remove(existingServiceConfiguration); - } - otherBuilder.serviceConfigurations.add(configuration); - return otherBuilder; - } - - /** - * Adds a {@link ServiceConfiguration} for the {@link org.ehcache.impl.internal.store.disk.OffHeapDiskStore.Provider} - * indicating thread pool alias and write concurrency. - * - * @param threadPoolAlias the thread pool alias - * @param concurrency the write concurrency - * @return a new builder with the added configuration - */ - public CacheConfigurationBuilder withDiskStoreThreadPool(String threadPoolAlias, int concurrency) { - OffHeapDiskStoreConfiguration configuration = new OffHeapDiskStoreConfiguration(threadPoolAlias, concurrency); - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - OffHeapDiskStoreConfiguration existingServiceConfiguration = getExistingServiceConfiguration(OffHeapDiskStoreConfiguration.class); - if (existingServiceConfiguration != null) { - otherBuilder.serviceConfigurations.remove(existingServiceConfiguration); - } - otherBuilder.serviceConfigurations.add(configuration); - return otherBuilder; - } - - /** - * Adds or updates the {@link DefaultSizeOfEngineConfiguration} with the specified object graph maximum size to the configured - * builder. - *

                                      - * {@link SizeOfEngine} is what enables the heap tier to be sized in {@link MemoryUnit}. - * - * @param size the maximum graph size - * @return a new builder with the added / updated configuration - */ - public CacheConfigurationBuilder withSizeOfMaxObjectGraph(long size) { - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - DefaultSizeOfEngineConfiguration configuration = otherBuilder.getExistingServiceConfiguration(DefaultSizeOfEngineConfiguration.class); - if (configuration == null) { - otherBuilder.serviceConfigurations.add(new DefaultSizeOfEngineConfiguration(DEFAULT_MAX_OBJECT_SIZE, DEFAULT_UNIT, size)); - } else { - otherBuilder.serviceConfigurations.remove(configuration); - otherBuilder.serviceConfigurations.add(new DefaultSizeOfEngineConfiguration(configuration.getMaxObjectSize(), configuration.getUnit(), size)); - } - return otherBuilder; - } - - /** - * Adds or updates the {@link DefaultSizeOfEngineConfiguration} with the specified maximum mapping size to the configured - * builder. - *

                                      - * {@link SizeOfEngine} is what enables the heap tier to be sized in {@link MemoryUnit}. - * - * @param size the maximum mapping size - * @param unit the memory unit - * @return a new builder with the added / updated configuration - */ - public CacheConfigurationBuilder withSizeOfMaxObjectSize(long size, MemoryUnit unit) { - CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder<>(this); - DefaultSizeOfEngineConfiguration configuration = getExistingServiceConfiguration(DefaultSizeOfEngineConfiguration.class); - if (configuration == null) { - otherBuilder.serviceConfigurations.add(new DefaultSizeOfEngineConfiguration(size, unit, DEFAULT_OBJECT_GRAPH_SIZE)); - } else { - otherBuilder.serviceConfigurations.remove(configuration); - otherBuilder.serviceConfigurations.add(new DefaultSizeOfEngineConfiguration(size, unit, configuration.getMaxObjectGraphSize())); - } - return otherBuilder; - } - - @Override - public CacheConfiguration build() { - return new BaseCacheConfiguration<>(keyType, valueType, evictionAdvisor, - classLoader, expiry, resourcePools, - serviceConfigurations.toArray(new ServiceConfiguration[serviceConfigurations.size()])); - - } -} diff --git a/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java b/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java deleted file mode 100644 index 20318339db..0000000000 --- a/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java +++ /dev/null @@ -1,406 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.config.builders; - -import org.ehcache.CacheManager; -import org.ehcache.PersistentCacheManager; -import org.ehcache.config.Builder; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.Configuration; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.EhcacheManager; -import org.ehcache.core.spi.store.heap.SizeOfEngine; -import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; -import org.ehcache.impl.config.event.CacheEventDispatcherFactoryConfiguration; -import org.ehcache.impl.config.loaderwriter.writebehind.WriteBehindProviderConfiguration; -import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; -import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; -import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; -import org.ehcache.impl.config.store.disk.OffHeapDiskStoreProviderConfiguration; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceCreationConfiguration; - -import java.io.File; -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -import static java.util.Collections.emptySet; -import static java.util.Collections.unmodifiableSet; -import static org.ehcache.config.builders.ConfigurationBuilder.newConfigurationBuilder; -import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE; -import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE; -import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_UNIT; - -/** - * The {@code CacheManagerBuilder} enables building cache managers using a fluent style. - *

                                      - * As with all Ehcache builders, all instances are immutable and calling any method on the builder will return a new - * instance without modifying the one on which the method was called. - * This enables the sharing of builder instances without any risk of seeing them modified by code elsewhere. - */ -public class CacheManagerBuilder implements Builder { - - private final ConfigurationBuilder configBuilder; - private final Set services; - - /** - * Builds a {@link CacheManager} or a subtype of it and initializes it if requested. - * - * @param init whether the returned {@code CacheManager} is to be initialized or not - * @return a {@code CacheManager} or a subtype of it - */ - public T build(final boolean init) { - final T cacheManager = newCacheManager(services, configBuilder.build()); - if(init) { - cacheManager.init(); - } - return cacheManager; - } - - /** - * Builds a {@link CacheManager} or a subtype of it uninitialized. - * - * @return a {@code CacheManager} or a subtype of it uninitialized - */ - @Override - public T build() { - return build(false); - } - - private CacheManagerBuilder() { - this.configBuilder = newConfigurationBuilder(); - this.services = emptySet(); - } - - private CacheManagerBuilder(CacheManagerBuilder builder, Set services) { - this.configBuilder = builder.configBuilder; - this.services = unmodifiableSet(services); - } - - private CacheManagerBuilder(CacheManagerBuilder builder, ConfigurationBuilder configBuilder) { - this.configBuilder = configBuilder; - this.services = builder.services; - } - - /** - * Creates a new {@link CacheManager} based on the provided configuration. - * The returned {@code CacheManager} is uninitialized. - * - * @param configuration the configuration to use - * @return a {@code CacheManager} - */ - public static CacheManager newCacheManager(final Configuration configuration) { - return new EhcacheManager(configuration); - } - - T newCacheManager(Collection services, final Configuration configuration) { - final EhcacheManager ehcacheManager = new EhcacheManager(configuration, services); - return cast(ehcacheManager); - } - - @SuppressWarnings("unchecked") - T cast(EhcacheManager ehcacheManager) { - return (T) ehcacheManager; - } - - /** - * Adds a {@link CacheConfiguration} linked to the specified alias to the returned builder. - * - * @param alias the cache alias - * @param configuration the {@code CacheConfiguration} - * @param the cache key type - * @param the cache value type - * @return a new builder with the added cache configuration - * - * @see CacheConfigurationBuilder - */ - public CacheManagerBuilder withCache(String alias, CacheConfiguration configuration) { - return new CacheManagerBuilder<>(this, configBuilder.addCache(alias, configuration)); - } - - /** - * Convenience method to add a {@link CacheConfiguration} linked to the specified alias to the returned builder by - * building it from the provided {@link Builder}. - * - * @param alias the cache alias - * @param configurationBuilder the {@code Builder} to get {@code CacheConfiguration} from - * @param the cache key type - * @param the cache value type - * @return a new builder with the added cache configuration - * - * @see CacheConfigurationBuilder - */ - public CacheManagerBuilder withCache(String alias, Builder> configurationBuilder) { - return withCache(alias, configurationBuilder.build()); - } - - /** - * Specializes the returned {@link CacheManager} subtype through a specific {@link CacheManagerConfiguration} which - * will optionally add configurations to the returned builder. - * - * @param cfg the {@code CacheManagerConfiguration} to use - * @param the subtype of {@code CacheManager} - * @return a new builder ready to build a more specific subtype of cache manager - * - * @see #persistence(String) - * @see PersistentCacheManager - * @see CacheManagerPersistenceConfiguration - */ - public CacheManagerBuilder with(CacheManagerConfiguration cfg) { - return cfg.builder(this); - } - - /** - * Convenience method to specialize the returned {@link CacheManager} subtype through a {@link CacheManagerConfiguration} - * built using the provided {@link Builder}. - * - * @param cfgBuilder the {@code Builder} to get the {@code CacheManagerConfiguration} from - * @return a new builder ready to build a more specific subtype of cache manager - * - * @see CacheConfigurationBuilder - */ - public CacheManagerBuilder with(Builder> cfgBuilder) { - return with(cfgBuilder.build()); - } - - /** - * Adds a {@link Service} instance to the returned builder. - *

                                      - * The service instance will be used by the constructed {@link CacheManager}. - * - * @param service the {@code Service} to add - * @return a new builder with the added service - */ - public CacheManagerBuilder using(Service service) { - Set newServices = new HashSet<>(services); - newServices.add(service); - return new CacheManagerBuilder<>(this, newServices); - } - - /** - * Adds a default {@link Copier} for the specified type to the returned builder. - * - * @param clazz the {@code Class} for which the copier is - * @param copier the {@code Copier} instance - * @param the type which can be copied - * @return a new builder with the added default copier - */ - public CacheManagerBuilder withCopier(Class clazz, Class> copier) { - DefaultCopyProviderConfiguration service = configBuilder.findServiceByClass(DefaultCopyProviderConfiguration.class); - if (service == null) { - service = new DefaultCopyProviderConfiguration(); - service.addCopierFor(clazz, copier); - return new CacheManagerBuilder<>(this, configBuilder.addService(service)); - } else { - DefaultCopyProviderConfiguration newConfig = new DefaultCopyProviderConfiguration(service); - newConfig.addCopierFor(clazz, copier, true); - return new CacheManagerBuilder<>(this, configBuilder.removeService(service).addService(newConfig)); - } - } - - /** - * Adds a default {@link Serializer} for the specified type to the returned builder. - * - * @param clazz the {@code Class} for which the serializer is - * @param serializer the {@code Serializer} instance - * @param the type which can be serialized - * @return a new builder with the added default serializer - */ - public CacheManagerBuilder withSerializer(Class clazz, Class> serializer) { - DefaultSerializationProviderConfiguration service = configBuilder.findServiceByClass(DefaultSerializationProviderConfiguration.class); - if (service == null) { - service = new DefaultSerializationProviderConfiguration(); - service.addSerializerFor(clazz, serializer); - return new CacheManagerBuilder<>(this, configBuilder.addService(service)); - } else { - DefaultSerializationProviderConfiguration newConfig = new DefaultSerializationProviderConfiguration(service); - newConfig.addSerializerFor(clazz, serializer, true); - return new CacheManagerBuilder<>(this, configBuilder.removeService(service).addService(newConfig)); - } - } - - /** - * Adds a default {@link SizeOfEngine} configuration, that limits the max object graph to - * size, to the returned builder. - * - * @param size the max object graph size - * @return a new builder with the added configuration - */ - public CacheManagerBuilder withDefaultSizeOfMaxObjectGraph(long size) { - DefaultSizeOfEngineProviderConfiguration configuration = configBuilder.findServiceByClass(DefaultSizeOfEngineProviderConfiguration.class); - if (configuration == null) { - return new CacheManagerBuilder<>(this, configBuilder.addService(new DefaultSizeOfEngineProviderConfiguration(DEFAULT_MAX_OBJECT_SIZE, DEFAULT_UNIT, size))); - } else { - ConfigurationBuilder builder = configBuilder.removeService(configuration); - return new CacheManagerBuilder<>(this, builder.addService(new DefaultSizeOfEngineProviderConfiguration(configuration - .getMaxObjectSize(), configuration.getUnit(), size))); - } - } - - /** - * Adds a default {@link SizeOfEngine} configuration, that limits the max object size, to - * the returned builder. - * - * @param size the max object size - * @param unit the max object size unit - * @return a new builder with the added configuration - */ - public CacheManagerBuilder withDefaultSizeOfMaxObjectSize(long size, MemoryUnit unit) { - DefaultSizeOfEngineProviderConfiguration configuration = configBuilder.findServiceByClass(DefaultSizeOfEngineProviderConfiguration.class); - if (configuration == null) { - return new CacheManagerBuilder<>(this, configBuilder.addService(new DefaultSizeOfEngineProviderConfiguration(size, unit, DEFAULT_OBJECT_GRAPH_SIZE))); - } else { - ConfigurationBuilder builder = configBuilder.removeService(configuration); - return new CacheManagerBuilder<>(this, builder.addService(new DefaultSizeOfEngineProviderConfiguration(size, unit, configuration - .getMaxObjectGraphSize()))); - } - } - - /** - * Adds a {@link WriteBehindProviderConfiguration}, that specifies the thread pool to use, to the returned builder. - * - * @param threadPoolAlias the thread pool alias - * @return a new builder with the added configuration - * - * @see PooledExecutionServiceConfigurationBuilder - */ - public CacheManagerBuilder withDefaultWriteBehindThreadPool(String threadPoolAlias) { - WriteBehindProviderConfiguration config = configBuilder.findServiceByClass(WriteBehindProviderConfiguration.class); - if (config == null) { - return new CacheManagerBuilder<>(this, configBuilder.addService(new WriteBehindProviderConfiguration(threadPoolAlias))); - } else { - ConfigurationBuilder builder = configBuilder.removeService(config); - return new CacheManagerBuilder<>(this, builder.addService(new WriteBehindProviderConfiguration(threadPoolAlias))); - } - } - - /** - * Adds a {@link OffHeapDiskStoreProviderConfiguration}, that specifies the thread pool to use, to the returned - * builder. - * - * @param threadPoolAlias the thread pool alias - * @return a new builder with the added configuration - * - * @see PooledExecutionServiceConfigurationBuilder - */ - public CacheManagerBuilder withDefaultDiskStoreThreadPool(String threadPoolAlias) { - OffHeapDiskStoreProviderConfiguration config = configBuilder.findServiceByClass(OffHeapDiskStoreProviderConfiguration.class); - if (config == null) { - return new CacheManagerBuilder<>(this, configBuilder.addService(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias))); - } else { - ConfigurationBuilder builder = configBuilder.removeService(config); - return new CacheManagerBuilder<>(this, builder.addService(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias))); - } - } - - /** - * Adds a {@link CacheEventDispatcherFactoryConfiguration}, that specifies the thread pool to use, to the returned - * builder. - * - * @param threadPoolAlias the thread pool alias - * @return a new builder with the added configuration - * - * @see PooledExecutionServiceConfigurationBuilder - */ - public CacheManagerBuilder withDefaultEventListenersThreadPool(String threadPoolAlias) { - CacheEventDispatcherFactoryConfiguration config = configBuilder.findServiceByClass(CacheEventDispatcherFactoryConfiguration.class); - if (config == null) { - return new CacheManagerBuilder<>(this, configBuilder.addService(new CacheEventDispatcherFactoryConfiguration(threadPoolAlias))); - } else { - ConfigurationBuilder builder = configBuilder.removeService(config); - return new CacheManagerBuilder<>(this, builder.addService(new CacheEventDispatcherFactoryConfiguration(threadPoolAlias))); - } - } - - /** - * Adds a {@link ServiceCreationConfiguration} to the returned builder. - *

                                      - * These configurations are used to load services and configure them at creation time. - * - * @param serviceConfiguration the {@code ServiceCreationConfiguration} to use - * @return a new builder with the added configuration - */ - public CacheManagerBuilder using(ServiceCreationConfiguration serviceConfiguration) { - return new CacheManagerBuilder<>(this, configBuilder.addService(serviceConfiguration)); - } - - /** - * Replaces an existing {@link ServiceCreationConfiguration} of the same type on the returned builder. - *

                                      - * Duplicate service creation configuration will cause a cache manager to fail to initialize. - * - * @param overwriteServiceConfiguration the new {@code ServiceCreationConfiguration} to use - * @return a new builder with the replaced configuration - */ - public CacheManagerBuilder replacing(ServiceCreationConfiguration overwriteServiceConfiguration) { - ServiceCreationConfiguration existingConfiguration = configBuilder.findServiceByClass(overwriteServiceConfiguration.getClass()); - return new CacheManagerBuilder<>(this, configBuilder.removeService(existingConfiguration) - .addService(overwriteServiceConfiguration)); - } - - /** - * Adds a {@link ClassLoader}, to use for non Ehcache types, to the returned builder - * - * @param classLoader the class loader to use - * @return a new builder with the added class loader - */ - public CacheManagerBuilder withClassLoader(ClassLoader classLoader) { - return new CacheManagerBuilder<>(this, configBuilder.withClassLoader(classLoader)); - } - - /** - * Creates a new {@code CacheManagerBuilder} - * - * @return the cache manager builder - */ - public static CacheManagerBuilder newCacheManagerBuilder() { - return new CacheManagerBuilder<>(); - } - - /** - * Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual - * level of persistence is configured on the disk resource pool per cache. - * - * @param rootDirectory the root directory to use for disk storage - * @return a {@code CacheManagerConfiguration} - * - * @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean) - * @see #with(CacheManagerConfiguration) - * @see PersistentCacheManager - */ - public static CacheManagerConfiguration persistence(String rootDirectory) { - return persistence(new File(rootDirectory)); - } - - /** - * Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual - * level of persistence is configured on the disk resource pool per cache. - * - * @param rootDirectory the root directory to use for disk storage - * @return a {@code CacheManagerConfiguration} - * - * @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean) - * @see #with(CacheManagerConfiguration) - * @see PersistentCacheManager - */ - public static CacheManagerConfiguration persistence(File rootDirectory) { - return new CacheManagerPersistenceConfiguration(rootDirectory); - } -} diff --git a/impl/src/main/java/org/ehcache/config/builders/ConfigurationBuilder.java b/impl/src/main/java/org/ehcache/config/builders/ConfigurationBuilder.java deleted file mode 100644 index 57fcd94305..0000000000 --- a/impl/src/main/java/org/ehcache/config/builders/ConfigurationBuilder.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.config.builders; - -import org.ehcache.config.Builder; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.Configuration; -import org.ehcache.core.config.DefaultConfiguration; -import org.ehcache.spi.service.ServiceCreationConfiguration; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.unmodifiableList; -import static java.util.Collections.unmodifiableMap; - -/** - * Companion type to the {@link CacheManagerBuilder} that handles the {@link Configuration} building. - * - * @author Alex Snaps - */ -class ConfigurationBuilder implements Builder { - - private final Map> caches; - private final List> serviceConfigurations; - private final ClassLoader classLoader; - - static ConfigurationBuilder newConfigurationBuilder() { - return new ConfigurationBuilder(); - } - - private ConfigurationBuilder() { - this.caches = emptyMap(); - this.serviceConfigurations = emptyList(); - this.classLoader = null; - } - - private ConfigurationBuilder(ConfigurationBuilder builder, Map> caches) { - this.caches = unmodifiableMap(caches); - this.serviceConfigurations = builder.serviceConfigurations; - this.classLoader = builder.classLoader; - } - - private ConfigurationBuilder(ConfigurationBuilder builder, List> serviceConfigurations) { - this.caches = builder.caches; - this.serviceConfigurations = unmodifiableList(serviceConfigurations); - this.classLoader = builder.classLoader; - } - - private ConfigurationBuilder(ConfigurationBuilder builder, ClassLoader classLoader) { - this.caches = builder.caches; - this.serviceConfigurations = builder.serviceConfigurations; - this.classLoader = classLoader; - } - - @Override - public Configuration build() { - return new DefaultConfiguration(caches, classLoader, serviceConfigurations.toArray(new ServiceCreationConfiguration[serviceConfigurations.size()])); - } - - ConfigurationBuilder addCache(String alias, CacheConfiguration config) { - Map> newCaches = new HashMap<>(caches); - if(newCaches.put(alias, config) != null) { - throw new IllegalArgumentException("Cache alias '" + alias + "' already exists"); - } - return new ConfigurationBuilder(this, newCaches); - } - - public ConfigurationBuilder removeCache(String alias) { - Map> newCaches = new HashMap<>(caches); - newCaches.remove(alias); - return new ConfigurationBuilder(this, newCaches); - } - - ConfigurationBuilder addService(ServiceCreationConfiguration serviceConfiguration) { - if (findServiceByClass(serviceConfiguration.getClass()) != null) { - throw new IllegalArgumentException("There is already a ServiceCreationConfiguration registered for service " + serviceConfiguration - .getServiceType() + " of type " + serviceConfiguration.getClass()); - } - List> newServiceConfigurations = new ArrayList<>(serviceConfigurations); - newServiceConfigurations.add(serviceConfiguration); - return new ConfigurationBuilder(this, newServiceConfigurations); - } - - T findServiceByClass(Class type) { - for (ServiceCreationConfiguration serviceConfiguration : serviceConfigurations) { - if (serviceConfiguration.getClass().equals(type)) { - return type.cast(serviceConfiguration); - } - } - return null; - } - - ConfigurationBuilder removeService(ServiceCreationConfiguration serviceConfiguration) { - List> newServiceConfigurations = new ArrayList<>(serviceConfigurations); - newServiceConfigurations.remove(serviceConfiguration); - return new ConfigurationBuilder(this, newServiceConfigurations); - } - - ConfigurationBuilder withClassLoader(ClassLoader classLoader) { - return new ConfigurationBuilder(this, classLoader); - } -} diff --git a/impl/src/main/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterConfiguration.java b/impl/src/main/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterConfiguration.java deleted file mode 100644 index 0985d5d384..0000000000 --- a/impl/src/main/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterConfiguration.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.config.loaderwriter; - -import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; -import org.ehcache.spi.service.ServiceConfiguration; - -/** -* {@link ServiceConfiguration} for the default {@link CacheLoaderWriterProvider}. -*/ -public class DefaultCacheLoaderWriterConfiguration extends ClassInstanceConfiguration> implements ServiceConfiguration { - - /** - * Creates a new configuration object with the specified {@link CacheLoaderWriter} class and associated constructor - * arguments. - * - * @param clazz the cache loader writer class - * @param arguments the constructor arguments - */ - public DefaultCacheLoaderWriterConfiguration(final Class> clazz, Object... arguments) { - super(clazz, arguments); - } - - /** - * Creates a new configuration with the specified {@link CacheLoaderWriter} instance. - * - * @param loaderWriter the cache loader writer - */ - public DefaultCacheLoaderWriterConfiguration(CacheLoaderWriter loaderWriter) { - super(loaderWriter); - } - - /** - * {@inheritDoc} - */ - @Override - public Class getServiceType() { - return CacheLoaderWriterProvider.class; - } - -} diff --git a/impl/src/main/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterProviderConfiguration.java b/impl/src/main/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterProviderConfiguration.java deleted file mode 100644 index 4ff724373d..0000000000 --- a/impl/src/main/java/org/ehcache/impl/config/loaderwriter/DefaultCacheLoaderWriterProviderConfiguration.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.config.loaderwriter; - -import org.ehcache.impl.internal.classes.ClassInstanceProviderConfiguration; -import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.ehcache.spi.service.ServiceCreationConfiguration; - -/** - * {@link ServiceCreationConfiguration} for the default {@link CacheLoaderWriterProvider}. - */ -public class DefaultCacheLoaderWriterProviderConfiguration extends ClassInstanceProviderConfiguration> implements ServiceCreationConfiguration { - - /** - * {@inheritDoc} - */ - @Override - public Class getServiceType() { - return CacheLoaderWriterProvider.class; - } - - /** - * Adds a default {@link CacheLoaderWriter} class and associated constuctor arguments to be used with a cache matching - * the provided alias. - * - * @param alias the cache alias - * @param clazz the cache loader writer class - * @param arguments the constructor arguments - * - * @return this configuration instance - */ - public DefaultCacheLoaderWriterProviderConfiguration addLoaderFor(String alias, Class> clazz, Object... arguments) { - getDefaults().put(alias, new DefaultCacheLoaderWriterConfiguration(clazz, arguments)); - return this; - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceProviderConfiguration.java b/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceProviderConfiguration.java deleted file mode 100644 index bc7331dcf2..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceProviderConfiguration.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.classes; - -import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; - -import java.util.LinkedHashMap; -import java.util.Map; - -/** - * Base class for ProviderFactory config that instantiates service classes. - * Keeps the order in which defaults are added. - * - * @author Alex Snaps - */ -public class ClassInstanceProviderConfiguration { - - private Map> defaults = new LinkedHashMap<>(); - - public Map> getDefaults() { - return defaults; - } - -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/ArrayUtils.java b/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/ArrayUtils.java deleted file mode 100644 index 4afc31e285..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/ArrayUtils.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* - * This is a modified version of the original Apache class. It has had unused - * members removed. - */ -package org.ehcache.impl.internal.classes.commonslang; - -import java.lang.reflect.Array; - -/** - *

                                      Operations on arrays, primitive arrays (like {@code int[]}) and - * primitive wrapper arrays (like {@code Integer[]}). - * - *

                                      This class tries to handle {@code null} input gracefully. - * An exception will not be thrown for a {@code null} - * array input. However, an Object array that contains a {@code null} - * element may throw an exception. Each method documents its behaviour. - * - *

                                      #ThreadSafe# - * @since 2.0 - */ -public class ArrayUtils { - - /** - * An empty immutable {@code Object} array. - */ - public static final Object[] EMPTY_OBJECT_ARRAY = new Object[0]; - /** - * An empty immutable {@code Class} array. - */ - public static final Class[] EMPTY_CLASS_ARRAY = new Class[0]; - - /** - *

                                      ArrayUtils instances should NOT be constructed in standard programming. - * Instead, the class should be used as ArrayUtils.clone(new int[] {2}). - * - *

                                      This constructor is public to permit tools that require a JavaBean instance - * to operate. - */ - public ArrayUtils() { - super(); - } - - // nullToEmpty - //----------------------------------------------------------------------- - /** - *

                                      Defensive programming technique to change a {@code null} - * reference to an empty one. - * - *

                                      This method returns an empty array for a {@code null} input array. - * - *

                                      As a memory optimizing technique an empty array passed in will be overridden with - * the empty {@code public static} references in this class. - * - * @param array the array to check for {@code null} or empty - * @return the same array, {@code public static} empty array if {@code null} or empty input - * @since 2.5 - */ - public static Object[] nullToEmpty(final Object[] array) { - if (isEmpty(array)) { - return EMPTY_OBJECT_ARRAY; - } - return array; - } - - /** - *

                                      Defensive programming technique to change a {@code null} - * reference to an empty one. - * - *

                                      This method returns an empty array for a {@code null} input array. - * - *

                                      As a memory optimizing technique an empty array passed in will be overridden with - * the empty {@code public static} references in this class. - * - * @param array the array to check for {@code null} or empty - * @return the same array, {@code public static} empty array if {@code null} or empty input - * @since 3.2 - */ - public static Class[] nullToEmpty(final Class[] array) { - if (isEmpty(array)) { - return EMPTY_CLASS_ARRAY; - } - return array; - } - - // Is same length - //----------------------------------------------------------------------- - /** - *

                                      Checks whether two arrays are the same length, treating - * {@code null} arrays as length {@code 0}. - * - *

                                      Any multi-dimensional aspects of the arrays are ignored. - * - * @param array1 the first array, may be {@code null} - * @param array2 the second array, may be {@code null} - * @return {@code true} if length of arrays matches, treating - * {@code null} as an empty array - */ - public static boolean isSameLength(final Object[] array1, final Object[] array2) { - return getLength(array1) == getLength(array2); - } - - /** - *

                                      Returns the length of the specified array. - * This method can deal with {@code Object} arrays and with primitive arrays. - * - *

                                      If the input array is {@code null}, {@code 0} is returned. - * - *

                                      -     * ArrayUtils.getLength(null)            = 0
                                      -     * ArrayUtils.getLength([])              = 0
                                      -     * ArrayUtils.getLength([null])          = 1
                                      -     * ArrayUtils.getLength([true, false])   = 2
                                      -     * ArrayUtils.getLength([1, 2, 3])       = 3
                                      -     * ArrayUtils.getLength(["a", "b", "c"]) = 3
                                      -     * 
                                      - * - * @param array the array to retrieve the length from, may be null - * @return The length of the array, or {@code 0} if the array is {@code null} - * @throws IllegalArgumentException if the object argument is not an array. - * @since 2.1 - */ - public static int getLength(final Object array) { - if (array == null) { - return 0; - } - return Array.getLength(array); - } - - /** - *

                                      Checks if an array of Objects is empty or {@code null}. - * - * @param array the array to test - * @return {@code true} if the array is empty or {@code null} - * @since 2.1 - */ - public static boolean isEmpty(final Object[] array) { - return getLength(array) == 0; - } - -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/JavaVersion.java b/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/JavaVersion.java deleted file mode 100644 index 87187cd545..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/JavaVersion.java +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* - * This is a modified version of the original Apache class. It has had unused - * members removed. - */ -package org.ehcache.impl.internal.classes.commonslang; - -/** - *

                                      An enum representing all the versions of the Java specification. - * This is intended to mirror available values from the - * java.specification.version System property. - * - * @since 3.0 - */ -public enum JavaVersion { - - /** - * The Java version reported by Android. This is not an official Java version number. - */ - JAVA_0_9(1.5f, "0.9"), - - /** - * Java 1.1. - */ - JAVA_1_1(1.1f, "1.1"), - - /** - * Java 1.2. - */ - JAVA_1_2(1.2f, "1.2"), - - /** - * Java 1.3. - */ - JAVA_1_3(1.3f, "1.3"), - - /** - * Java 1.4. - */ - JAVA_1_4(1.4f, "1.4"), - - /** - * Java 1.5. - */ - JAVA_1_5(1.5f, "1.5"), - - /** - * Java 1.6. - */ - JAVA_1_6(1.6f, "1.6"), - - /** - * Java 1.7. - */ - JAVA_1_7(1.7f, "1.7"), - - /** - * Java 1.8. - */ - JAVA_1_8(1.8f, "1.8"), - - /** - * Java 1.9. - */ - JAVA_1_9(1.9f, "1.9"), - - /** - * Java 1.x, x > 9. Mainly introduced to avoid to break when a new version of Java is used. - */ - JAVA_RECENT(maxVersion(), Float.toString(maxVersion())); - - /** - * The float value. - */ - private final float value; - /** - * The standard name. - */ - private final String name; - - /** - * Constructor. - * - * @param value the float value - * @param name the standard name, not null - */ - JavaVersion(final float value, final String name) { - this.value = value; - this.name = name; - } - - //----------------------------------------------------------------------- - /** - *

                                      Whether this version of Java is at least the version of Java passed in. - * - *

                                      For example:
                                      - * {@code myVersion.atLeast(JavaVersion.JAVA_1_4)} - * - * @param requiredVersion the version to check against, not null - * @return true if this version is equal to or greater than the specified version - */ - public boolean atLeast(final JavaVersion requiredVersion) { - return this.value >= requiredVersion.value; - } - - /** - * Transforms the given string with a Java version number to the - * corresponding constant of this enumeration class. This method is used - * internally. - * - * @param nom the Java version as string - * @return the corresponding enumeration constant or null if the - * version is unknown - */ - // helper for static importing - static JavaVersion getJavaVersion(final String nom) { - return get(nom); - } - - /** - * Transforms the given string with a Java version number to the - * corresponding constant of this enumeration class. This method is used - * internally. - * - * @param nom the Java version as string - * @return the corresponding enumeration constant or null if the - * version is unknown - */ - static JavaVersion get(final String nom) { - if ("0.9".equals(nom)) { - return JAVA_0_9; - } else if ("1.1".equals(nom)) { - return JAVA_1_1; - } else if ("1.2".equals(nom)) { - return JAVA_1_2; - } else if ("1.3".equals(nom)) { - return JAVA_1_3; - } else if ("1.4".equals(nom)) { - return JAVA_1_4; - } else if ("1.5".equals(nom)) { - return JAVA_1_5; - } else if ("1.6".equals(nom)) { - return JAVA_1_6; - } else if ("1.7".equals(nom)) { - return JAVA_1_7; - } else if ("1.8".equals(nom)) { - return JAVA_1_8; - } else if ("1.9".equals(nom)) { - return JAVA_1_9; - } - if (nom == null) { - return null; - } - final float v = toFloatVersion(nom); - if ((v - 1.) < 1.) { // then we need to check decimals > .9 - final int firstComma = Math.max(nom.indexOf('.'), nom.indexOf(',')); - final int end = Math.max(nom.length(), nom.indexOf(',', firstComma)); - if (Float.parseFloat(nom.substring(firstComma + 1, end)) > .9f) { - return JAVA_RECENT; - } - } - return null; - } - - //----------------------------------------------------------------------- - /** - *

                                      The string value is overridden to return the standard name. - * - *

                                      For example, "1.5". - * - * @return the name, not null - */ - @Override - public String toString() { - return name; - } - - /** - * Gets the Java Version from the system or 2.0 if the {@code java.version} system property is not set. - * - * @return the value of {@code java.version} system property or 2.0 if it is not set. - */ - private static float maxVersion() { - final float v = toFloatVersion(System.getProperty("java.version", "2.0")); - if (v > 0) { - return v; - } - return 2f; - } - - /** - * Parses a float value from a String. - * - * @param value the String to parse. - * @return the float value represented by teh string or -1 if the given String can not be parsed. - */ - private static float toFloatVersion(final String value) { - final String[] toParse = value.split("\\."); - if (toParse.length >= 2) { - try { - return Float.parseFloat(toParse[0] + '.' + toParse[1]); - } catch (final NumberFormatException nfe) { - // no-op, let use default - } - } - return -1; - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/SystemUtils.java b/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/SystemUtils.java deleted file mode 100644 index 08d23aef09..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/SystemUtils.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* - * This is a modified version of the original Apache class. It has had unused - * members removed. - */ -package org.ehcache.impl.internal.classes.commonslang; - -/** - *

                                      - * Helpers for {@code java.lang.System}. - *

                                      - * If a system property cannot be read due to security restrictions, the corresponding field in this class will be set - * to {@code null} and a message will be written to {@code System.err}. - *

                                      - * #ThreadSafe# - * - * @since 1.0 - */ -public class SystemUtils { - - /** - * The {@code java.specification.version} System Property. Java Runtime Environment specification version. - *

                                      - * Defaults to {@code null} if the runtime does not have security access to read this property or the property does - * not exist. - *

                                      - * This value is initialized when the class is loaded. If {@link System#setProperty(String,String)} or - * {@link System#setProperties(java.util.Properties)} is called after this class is loaded, the value will be out of - * sync with that System property. - * - * @since Java 1.3 - */ - public static final String JAVA_SPECIFICATION_VERSION = getSystemProperty("java.specification.version"); - private static final JavaVersion JAVA_SPECIFICATION_VERSION_AS_ENUM = JavaVersion.get(JAVA_SPECIFICATION_VERSION); - - // ----------------------------------------------------------------------- - /** - * Gets a System property, defaulting to {@code null} if the property cannot be read. - *

                                      - * If a {@code SecurityException} is caught, the return value is {@code null} and a message is written to - * {@code System.err}. - * - * @param property the system property name - * @return the system property value or {@code null} if a security problem occurs - */ - private static String getSystemProperty(final String property) { - try { - return System.getProperty(property); - } catch (final SecurityException ex) { - // we are not allowed to look at this property - System.err.println("Caught a SecurityException reading the system property '" + property - + "'; the SystemUtils property value will default to null."); - return null; - } - } - - /** - * Is the Java version at least the requested version. - *

                                      - * Example input: - *

                                        - *
                                      • {@code 1.2f} to test for Java 1.2
                                      • - *
                                      • {@code 1.31f} to test for Java 1.3.1
                                      • - *
                                      - * - * @param requiredVersion the required version, for example 1.31f - * @return {@code true} if the actual version is equal or greater than the required version - */ - public static boolean isJavaVersionAtLeast(final JavaVersion requiredVersion) { - return JAVA_SPECIFICATION_VERSION_AS_ENUM.atLeast(requiredVersion); - } - - // ----------------------------------------------------------------------- - /** - * SystemUtils instances should NOT be constructed in standard programming. Instead, the class should be used as - * {@code SystemUtils.FILE_SEPARATOR}. - *

                                      - * This constructor is public to permit tools that require a JavaBean instance to operate. - */ - public SystemUtils() { - super(); - } - -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/Validate.java b/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/Validate.java deleted file mode 100644 index f0a1b9a385..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/Validate.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* - * This is a modified version of the original Apache class. It has had unused - * members removed. - */ -package org.ehcache.impl.internal.classes.commonslang; - -/** - *

                                      This class assists in validating arguments. The validation methods are - * based along the following principles: - *

                                        - *
                                      • An invalid {@code null} argument causes a {@link NullPointerException}.
                                      • - *
                                      • A non-{@code null} argument causes an {@link IllegalArgumentException}.
                                      • - *
                                      • An invalid index into an array/collection/map/string causes an {@link IndexOutOfBoundsException}.
                                      • - *
                                      - * - *

                                      All exceptions messages are - * format strings - * as defined by the Java platform. For example: - * - *

                                      - * Validate.isTrue(i > 0, "The value must be greater than zero: %d", i);
                                      - * Validate.notNull(surname, "The surname must not be %s", null);
                                      - * 
                                      - * - *

                                      #ThreadSafe# - * @see java.lang.String#format(String, Object...) - * @since 2.0 - */ -public class Validate { - - /** - * Constructor. This class should not normally be instantiated. - */ - public Validate() { - super(); - } - - /** - *

                                      Validate that the specified argument is not {@code null}; - * otherwise throwing an exception with the specified message. - * - *

                                      Validate.notNull(myObject, "The object must not be null");
                                      - * - * @param the object type - * @param object the object to check - * @param message the {@link String#format(String, Object...)} exception message if invalid, not null - * @param values the optional values for the formatted exception message - * @return the validated object (never {@code null} for method chaining) - * @throws NullPointerException if the object is {@code null} - * @see #notNull(Object) - */ - public static T notNull(final T object, final String message, final Object... values) { - if (object == null) { - throw new NullPointerException(String.format(message, values)); - } - return object; - } - -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/MemberUtils.java b/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/MemberUtils.java deleted file mode 100644 index d6592cbc46..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/classes/commonslang/reflect/MemberUtils.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* - * This is a modified version of the original Apache class. It has had unused - * members removed. - */ -package org.ehcache.impl.internal.classes.commonslang.reflect; - -import org.ehcache.impl.internal.classes.commonslang.ClassUtils; - -import java.lang.reflect.AccessibleObject; -import java.lang.reflect.Member; -import java.lang.reflect.Modifier; - -/** - * Contains common code for working with {@link java.lang.reflect.Method Methods}/{@link java.lang.reflect.Constructor Constructors}, - * extracted and refactored from {@link MethodUtils} when it was imported from Commons BeanUtils. - * - * @since 2.5 - */ -abstract class MemberUtils { - // TODO extract an interface to implement compareParameterSets(...)? - - private static final int ACCESS_TEST = Modifier.PUBLIC | Modifier.PROTECTED | Modifier.PRIVATE; - - /** Array of primitive number types ordered by "promotability" */ - private static final Class[] ORDERED_PRIMITIVE_TYPES = { Byte.TYPE, Short.TYPE, - Character.TYPE, Integer.TYPE, Long.TYPE, Float.TYPE, Double.TYPE }; - - /** - * XXX Default access superclass workaround. - * - * When a {@code public} class has a default access superclass with {@code public} members, - * these members are accessible. Calling them from compiled code works fine. - * Unfortunately, on some JVMs, using reflection to invoke these members - * seems to (wrongly) prevent access even when the modifier is {@code public}. - * Calling {@code setAccessible(true)} solves the problem but will only work from - * sufficiently privileged code. Better workarounds would be gratefully - * accepted. - * @param o the AccessibleObject to set as accessible - * @return a boolean indicating whether the accessibility of the object was set to true. - */ - static boolean setAccessibleWorkaround(final AccessibleObject o) { - if (o == null || o.isAccessible()) { - return false; - } - final Member m = (Member) o; - if (!o.isAccessible() && Modifier.isPublic(m.getModifiers()) && isPackageAccess(m.getDeclaringClass().getModifiers())) { - try { - o.setAccessible(true); - return true; - } catch (final SecurityException e) { // NOPMD - // ignore in favor of subsequent IllegalAccessException - } - } - return false; - } - - /** - * Returns whether a given set of modifiers implies package access. - * @param modifiers to test - * @return {@code true} unless {@code package}/{@code protected}/{@code private} modifier detected - */ - static boolean isPackageAccess(final int modifiers) { - return (modifiers & ACCESS_TEST) == 0; - } - - /** - * Returns whether a {@link Member} is accessible. - * @param m Member to check - * @return {@code true} if m is accessible - */ - static boolean isAccessible(final Member m) { - return m != null && Modifier.isPublic(m.getModifiers()) && !m.isSynthetic(); - } - - /** - * Compares the relative fitness of two sets of parameter types in terms of - * matching a third set of runtime parameter types, such that a list ordered - * by the results of the comparison would return the best match first - * (least). - * - * @param left the "left" parameter set - * @param right the "right" parameter set - * @param actual the runtime parameter types to match against - * {@code left}/{@code right} - * @return int consistent with {@code compare} semantics - */ - static int compareParameterTypes(final Class[] left, final Class[] right, final Class[] actual) { - final float leftCost = getTotalTransformationCost(actual, left); - final float rightCost = getTotalTransformationCost(actual, right); - return leftCost < rightCost ? -1 : rightCost < leftCost ? 1 : 0; - } - - /** - * Returns the sum of the object transformation cost for each class in the - * source argument list. - * @param srcArgs The source arguments - * @param destArgs The destination arguments - * @return The total transformation cost - */ - private static float getTotalTransformationCost(final Class[] srcArgs, final Class[] destArgs) { - float totalCost = 0.0f; - for (int i = 0; i < srcArgs.length; i++) { - Class srcClass, destClass; - srcClass = srcArgs[i]; - destClass = destArgs[i]; - totalCost += getObjectTransformationCost(srcClass, destClass); - } - return totalCost; - } - - /** - * Gets the number of steps required needed to turn the source class into - * the destination class. This represents the number of steps in the object - * hierarchy graph. - * @param srcClass The source class - * @param destClass The destination class - * @return The cost of transforming an object - */ - private static float getObjectTransformationCost(Class srcClass, final Class destClass) { - if (destClass.isPrimitive()) { - return getPrimitivePromotionCost(srcClass, destClass); - } - float cost = 0.0f; - while (srcClass != null && !destClass.equals(srcClass)) { - if (destClass.isInterface() && ClassUtils.isAssignable(srcClass, destClass)) { - // slight penalty for interface match. - // we still want an exact match to override an interface match, - // but - // an interface match should override anything where we have to - // get a superclass. - cost += 0.25f; - break; - } - cost++; - srcClass = srcClass.getSuperclass(); - } - /* - * If the destination class is null, we've travelled all the way up to - * an Object match. We'll penalize this by adding 1.5 to the cost. - */ - if (srcClass == null) { - cost += 1.5f; - } - return cost; - } - - /** - * Gets the number of steps required to promote a primitive number to another - * type. - * @param srcClass the (primitive) source class - * @param destClass the (primitive) destination class - * @return The cost of promoting the primitive - */ - private static float getPrimitivePromotionCost(final Class srcClass, final Class destClass) { - float cost = 0.0f; - Class cls = srcClass; - if (!cls.isPrimitive()) { - // slight unwrapping penalty - cost += 0.1f; - cls = ClassUtils.wrapperToPrimitive(cls); - } - for (int i = 0; cls != destClass && i < ORDERED_PRIMITIVE_TYPES.length; i++) { - if (cls == ORDERED_PRIMITIVE_TYPES[i]) { - cost += 0.1f; - if (i < ORDERED_PRIMITIVE_TYPES.length - 1) { - cls = ORDERED_PRIMITIVE_TYPES[i + 1]; - } - } - } - return cost; - } - -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcher.java b/impl/src/main/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcher.java deleted file mode 100644 index 2fba80a9ab..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcher.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.events; - -import org.ehcache.core.events.StoreEventSink; - -/** - * ScopedStoreEventDispatcher - */ -public class ScopedStoreEventDispatcher extends AbstractStoreEventDispatcher { - - - public ScopedStoreEventDispatcher(int dispatcherConcurrency) { - super(dispatcherConcurrency); - } - - @Override - public StoreEventSink eventSink() { - if (getListeners().isEmpty()) { - @SuppressWarnings("unchecked") - StoreEventSink noOpEventSink = (StoreEventSink) NO_OP_EVENT_SINK; - return noOpEventSink; - } else { - return new InvocationScopedEventSink<>(getFilters(), isEventOrdering(), getOrderedQueues(), getListeners()); - } - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehind.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehind.java deleted file mode 100644 index c936a49600..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehind.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.impl.internal.loaderwriter.writebehind; - -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.BlockingQueue; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; - -import org.ehcache.spi.loaderwriter.CacheWritingException; -import org.ehcache.impl.internal.loaderwriter.writebehind.operations.DeleteOperation; -import org.ehcache.impl.internal.loaderwriter.writebehind.operations.SingleOperation; -import org.ehcache.impl.internal.loaderwriter.writebehind.operations.WriteOperation; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; - -abstract class AbstractWriteBehind implements WriteBehind { - - private final CacheLoaderWriter cacheLoaderWriter; - - public AbstractWriteBehind(CacheLoaderWriter cacheLoaderWriter) { - this.cacheLoaderWriter = cacheLoaderWriter; - } - - @Override - public V load(K key) throws Exception { - SingleOperation operation = getOperation(key); - return operation == null ? cacheLoaderWriter.load(key) : (operation.getClass() == WriteOperation.class ? ((WriteOperation) operation).getValue() : null); - } - - @Override - public Map loadAll(Iterable keys) throws Exception { - Map entries = new HashMap<>(); - for (K k : keys) { - entries.put(k, load(k)) ; - } - return entries; - } - - @Override - public void write(K key, V value) throws CacheWritingException { - addOperation(new WriteOperation<>(key, value)); - } - - @Override - public void writeAll(Iterable> entries) throws BulkCacheWritingException, Exception { - for (Map.Entry entry : entries) { - write(entry.getKey(), entry.getValue()); - } - } - - @Override - public void delete(K key) throws CacheWritingException { - addOperation(new DeleteOperation<>(key)); - } - - @Override - public void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception { - for (K k : keys) { - delete(k); - } - } - - protected abstract SingleOperation getOperation(K key); - - protected abstract void addOperation(final SingleOperation operation); - - protected static void putUninterruptibly(BlockingQueue queue, T r) { - boolean interrupted = false; - try { - while (true) { - try { - queue.put(r); - return; - } catch (InterruptedException e) { - interrupted = true; - } - } - } finally { - if (interrupted) { - Thread.currentThread().interrupt(); - } - } - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProvider.java b/impl/src/main/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProvider.java deleted file mode 100644 index 68da1b0f61..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProvider.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.spi.loaderwriter; - -import org.ehcache.config.CacheConfiguration; -import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; -import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterProviderConfiguration; -import org.ehcache.impl.internal.classes.ClassInstanceProvider; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; - -/** - * @author Alex Snaps - */ -public class DefaultCacheLoaderWriterProvider extends ClassInstanceProvider> implements CacheLoaderWriterProvider { - - public DefaultCacheLoaderWriterProvider(DefaultCacheLoaderWriterProviderConfiguration configuration) { - super(configuration, DefaultCacheLoaderWriterConfiguration.class, true); - } - - @SuppressWarnings("unchecked") - @Override - public CacheLoaderWriter createCacheLoaderWriter(final String alias, final CacheConfiguration cacheConfiguration) { - return (CacheLoaderWriter) newInstance(alias, cacheConfiguration); - } - - @Override - public void releaseCacheLoaderWriter(final CacheLoaderWriter cacheLoaderWriter) throws Exception { - releaseInstance(cacheLoaderWriter); - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/statistics/DefaultCacheStatistics.java b/impl/src/main/java/org/ehcache/impl/internal/statistics/DefaultCacheStatistics.java deleted file mode 100644 index f5d205148b..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/statistics/DefaultCacheStatistics.java +++ /dev/null @@ -1,328 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.statistics; - -import java.util.Collections; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.LongAdder; - -import org.ehcache.core.InternalCache; -import org.ehcache.core.statistics.BulkOps; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.statistics.CacheStatistics; -import org.ehcache.core.statistics.TierStatistics; -import org.ehcache.core.statistics.TypedValueStatistic; -import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.derived.LatencySampling; -import org.terracotta.statistics.derived.MinMaxAverage; -import org.terracotta.statistics.extended.StatisticType; -import org.terracotta.statistics.observer.ChainedOperationObserver; - -import static java.util.EnumSet.allOf; -import static org.ehcache.impl.internal.statistics.StatsUtils.findLowestTier; -import static org.ehcache.impl.internal.statistics.StatsUtils.findOperationStatisticOnChildren; -import static org.ehcache.impl.internal.statistics.StatsUtils.findTiers; - -/** - * Contains usage statistics relative to a given cache. - */ -class DefaultCacheStatistics implements CacheStatistics { - - private volatile CompensatingCounters compensatingCounters = CompensatingCounters.empty(); - - private final OperationStatistic get; - private final OperationStatistic put; - private final OperationStatistic remove; - private final OperationStatistic putIfAbsent; - private final OperationStatistic replace; - private final OperationStatistic conditionalRemove; - - private final Map bulkMethodEntries; - - private final LatencyMonitor averageGetTime; - private final LatencyMonitor averagePutTime; - private final LatencyMonitor averageRemoveTime; - - private final Map tierStatistics; - private final TierStatistics lowestTier; - - private final Map knownStatistics; - - public DefaultCacheStatistics(InternalCache cache) { - bulkMethodEntries = cache.getBulkMethodEntries(); - - get = findOperationStatisticOnChildren(cache, CacheOperationOutcomes.GetOutcome.class, "get"); - put = findOperationStatisticOnChildren(cache, CacheOperationOutcomes.PutOutcome.class, "put"); - remove = findOperationStatisticOnChildren(cache, CacheOperationOutcomes.RemoveOutcome.class, "remove"); - putIfAbsent = findOperationStatisticOnChildren(cache, CacheOperationOutcomes.PutIfAbsentOutcome.class, "putIfAbsent"); - replace = findOperationStatisticOnChildren(cache, CacheOperationOutcomes.ReplaceOutcome.class, "replace"); - conditionalRemove = findOperationStatisticOnChildren(cache, CacheOperationOutcomes.ConditionalRemoveOutcome.class, "conditionalRemove"); - - averageGetTime = new LatencyMonitor<>(allOf(CacheOperationOutcomes.GetOutcome.class)); - get.addDerivedStatistic(averageGetTime); - averagePutTime = new LatencyMonitor<>(allOf(CacheOperationOutcomes.PutOutcome.class)); - put.addDerivedStatistic(averagePutTime); - averageRemoveTime = new LatencyMonitor<>(allOf(CacheOperationOutcomes.RemoveOutcome.class)); - remove.addDerivedStatistic(averageRemoveTime); - - String[] tierNames = findTiers(cache); - - String lowestTierName = findLowestTier(tierNames); - TierStatistics lowestTier = null; - - tierStatistics = new HashMap<>(tierNames.length); - for (String tierName : tierNames) { - TierStatistics tierStatistics = new DefaultTierStatistics(cache, tierName); - this.tierStatistics.put(tierName, tierStatistics); - if (lowestTierName.equals(tierName)) { - lowestTier = tierStatistics; - } - } - this.lowestTier = lowestTier; - - knownStatistics = createKnownStatistics(); - } - - private Map createKnownStatistics() { - Map knownStatistics = new HashMap<>(30); - knownStatistics.put("Cache:HitCount", new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getCacheHits(); - } - }); - knownStatistics.put("Cache:MissCount", new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getCacheMisses(); - } - }); - knownStatistics.put("Cache:PutCount", new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getCachePuts(); - } - }); - knownStatistics.put("Cache:RemovalCount", new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getCacheRemovals(); - } - }); - knownStatistics.put("Cache:EvictionCount", new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getCacheEvictions(); - } - }); - knownStatistics.put("Cache:ExpirationCount", new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getCacheExpirations(); - } - }); - - for (TierStatistics tier : tierStatistics.values()) { - knownStatistics.putAll(tier.getKnownStatistics()); - } - - return Collections.unmodifiableMap(knownStatistics); - } - - public Map getKnownStatistics() { - return knownStatistics; - } - - public Map getTierStatistics() { - return Collections.unmodifiableMap(tierStatistics); - } - - public void clear() { - compensatingCounters = compensatingCounters.snapshot(this); - averageGetTime.clear(); - averagePutTime.clear(); - averageRemoveTime.clear(); - for (TierStatistics t : tierStatistics.values()) { - t.clear(); - } - } - - public long getCacheHits() { - return normalize(getHits() - compensatingCounters.cacheHits); - } - - public float getCacheHitPercentage() { - long cacheHits = getCacheHits(); - return normalize((float) cacheHits / (cacheHits + getCacheMisses())) * 100.0f; - } - - public long getCacheMisses() { - return normalize(getMisses() - compensatingCounters.cacheMisses); - } - - public float getCacheMissPercentage() { - long cacheMisses = getCacheMisses(); - return normalize((float) cacheMisses / (getCacheHits() + cacheMisses)) * 100.0f; - } - - public long getCacheGets() { - return normalize(getHits() + getMisses() - - compensatingCounters.cacheGets); - } - - public long getCachePuts() { - return normalize(getBulkCount(BulkOps.PUT_ALL) + - put.sum(EnumSet.of(CacheOperationOutcomes.PutOutcome.PUT)) + - putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.PUT)) + - replace.sum(EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)) - - compensatingCounters.cachePuts); - } - - public long getCacheRemovals() { - return normalize(getBulkCount(BulkOps.REMOVE_ALL) + - remove.sum(EnumSet.of(CacheOperationOutcomes.RemoveOutcome.SUCCESS)) + - conditionalRemove.sum(EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS)) - - compensatingCounters.cacheRemovals); - } - - public long getCacheEvictions() { - return normalize(lowestTier.getEvictions()); - } - - public long getCacheExpirations() { - return normalize(lowestTier.getExpirations()); - } - - public float getCacheAverageGetTime() { - return (float) averageGetTime.value(); - } - - public float getCacheAveragePutTime() { - return (float) averagePutTime.value(); - } - - public float getCacheAverageRemoveTime() { - return (float) averageRemoveTime.value(); - } - - private long getMisses() { - return getBulkCount(BulkOps.GET_ALL_MISS) + - get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS)) + - putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.PUT)) + - replace.sum(EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_NOT_PRESENT)) + - conditionalRemove.sum(EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_MISSING)); - } - - private long getHits() { - return getBulkCount(BulkOps.GET_ALL_HITS) + - get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)) + - putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.HIT)) + - replace.sum(EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT, CacheOperationOutcomes.ReplaceOutcome.MISS_PRESENT)) + - conditionalRemove.sum(EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS, CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); - } - - private long getBulkCount(BulkOps bulkOps) { - return bulkMethodEntries.get(bulkOps).longValue(); - } - - private static long normalize(long value) { - return Math.max(0, value); - } - - private static float normalize(float value) { - if (Float.isNaN(value)) { - return 0.0f; - } - return Math.min(1.0f, Math.max(0.0f, value)); - } - - private static class CompensatingCounters { - final long cacheHits; - final long cacheMisses; - final long cacheGets; - final long cachePuts; - final long cacheRemovals; - - private CompensatingCounters(long cacheHits, long cacheMisses, long cacheGets, long cachePuts, long cacheRemovals) { - this.cacheHits = cacheHits; - this.cacheMisses = cacheMisses; - this.cacheGets = cacheGets; - this.cachePuts = cachePuts; - this.cacheRemovals = cacheRemovals; - } - - static CompensatingCounters empty() { - return new CompensatingCounters(0, 0, 0, 0, 0); - } - - CompensatingCounters snapshot(DefaultCacheStatistics statistics) { - return new CompensatingCounters( - cacheHits + statistics.getHits(), - cacheMisses + statistics.getMisses(), - cacheGets + statistics.getCacheGets(), - cachePuts + statistics.getCachePuts(), - cacheRemovals + statistics.getCacheRemovals()); - } - } - - private static class LatencyMonitor> implements ChainedOperationObserver { - - private final LatencySampling sampling; - private volatile MinMaxAverage average; - - public LatencyMonitor(Set targets) { - this.sampling = new LatencySampling<>(targets, 1.0); - this.average = new MinMaxAverage(); - sampling.addDerivedStatistic(average); - } - - @Override - public void begin(long time) { - sampling.begin(time); - } - - @Override - public void end(long time, T result) { - sampling.end(time, result); - } - - @Override - public void end(long time, T result, long... parameters) { - sampling.end(time, result, parameters); - } - - public double value() { - Double value = average.mean(); - if (value == null) { - //Someone involved with 107 can't do math - return 0; - } else { - //We use nanoseconds, 107 uses microseconds - return value / 1000f; - } - } - - public synchronized void clear() { - sampling.removeDerivedStatistic(average); - average = new MinMaxAverage(); - sampling.addDerivedStatistic(average); - } - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/statistics/DefaultStatisticsService.java b/impl/src/main/java/org/ehcache/impl/internal/statistics/DefaultStatisticsService.java deleted file mode 100644 index dfc9bb306b..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/statistics/DefaultStatisticsService.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.statistics; - -import org.ehcache.Cache; -import org.ehcache.Status; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.core.InternalCache; -import org.ehcache.core.events.CacheManagerListener; -import org.ehcache.core.spi.service.CacheManagerProviderService; -import org.ehcache.core.spi.service.StatisticsService; -import org.ehcache.core.spi.store.InternalCacheManager; -import org.ehcache.core.statistics.CacheStatistics; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceDependencies; -import org.ehcache.spi.service.ServiceProvider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -/** - * Default implementation using the statistics calculated by the observers set on the caches. - */ -@ServiceDependencies(CacheManagerProviderService.class) -public class DefaultStatisticsService implements StatisticsService, CacheManagerListener { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultStatisticsService.class); - - private final ConcurrentMap cacheStatistics = new ConcurrentHashMap<>(); - - private volatile InternalCacheManager cacheManager; - private volatile boolean started = false; - - public CacheStatistics getCacheStatistics(String cacheName) { - CacheStatistics stats = cacheStatistics.get(cacheName); - if (stats == null) { - throw new IllegalArgumentException("Unknown cache: " + cacheName); - } - return stats; - } - - public boolean isStarted() { - return started; - } - - @Override - public void start(ServiceProvider serviceProvider) { - LOGGER.debug("Starting service"); - CacheManagerProviderService cacheManagerProviderService = serviceProvider.getService(CacheManagerProviderService.class); - cacheManager = cacheManagerProviderService.getCacheManager(); - cacheManager.registerListener(this); - started = true; - } - - @Override - public void stop() { - LOGGER.debug("Stopping service"); - cacheManager.deregisterListener(this); - cacheStatistics.clear(); - started = false; - } - - @Override - public void stateTransition(Status from, Status to) { - LOGGER.debug("Moving from " + from + " to " + to); - switch(to) { - case AVAILABLE: - registerAllCaches(); - break; - case UNINITIALIZED: - cacheManager.deregisterListener(this); - cacheStatistics.clear(); - break; - case MAINTENANCE: - throw new IllegalStateException("Should not be started in maintenance mode"); - default: - throw new AssertionError("Unsupported state: " + to); - } - } - - private void registerAllCaches() { - for (Map.Entry> entry : cacheManager.getRuntimeConfiguration().getCacheConfigurations().entrySet()) { - String alias = entry.getKey(); - CacheConfiguration configuration = entry.getValue(); - Cache cache = cacheManager.getCache(alias, configuration.getKeyType(), configuration.getValueType()); - cacheAdded(alias, cache); - } - } - - @Override - public void cacheAdded(String alias, Cache cache) { - LOGGER.debug("Cache added " + alias); - cacheStatistics.put(alias, new DefaultCacheStatistics((InternalCache) cache)); - } - - @Override - public void cacheRemoved(String alias, Cache cache) { - LOGGER.debug("Cache removed " + alias); - cacheStatistics.remove(alias); - } - -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/statistics/DefaultTierStatistics.java b/impl/src/main/java/org/ehcache/impl/internal/statistics/DefaultTierStatistics.java deleted file mode 100755 index 10a328d5c1..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/statistics/DefaultTierStatistics.java +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.statistics; - -import java.util.Collections; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Map; - -import org.ehcache.Cache; -import org.ehcache.core.statistics.StoreOperationOutcomes; -import org.ehcache.core.statistics.TierOperationOutcomes; -import org.ehcache.core.statistics.TierStatistics; -import org.ehcache.core.statistics.TypedValueStatistic; -import org.terracotta.statistics.ConstantValueStatistic; -import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.ValueStatistic; -import org.terracotta.statistics.extended.StatisticType; - -import static org.ehcache.impl.internal.statistics.StatsUtils.findStatisticOnDescendants; - -/** - * Contains usage statistics relative to a given tier. - */ -class DefaultTierStatistics implements TierStatistics { - - private static final ValueStatistic NOT_AVAILABLE = ConstantValueStatistic.instance(-1L); - - private volatile CompensatingCounters compensatingCounters = CompensatingCounters.empty(); - - private final String tierName; - - private final Map knownStatistics; - - private final OperationStatistic get; - private final OperationStatistic put; - private final OperationStatistic putIfAbsent; - private final OperationStatistic replace; - private final OperationStatistic conditionalReplace; - private final OperationStatistic remove; - private final OperationStatistic conditionalRemove; - private final OperationStatistic eviction; - private final OperationStatistic expiration; - private final OperationStatistic compute; - private final OperationStatistic computeIfAbsent; - private final ValueStatistic mapping; - private final ValueStatistic maxMapping; - private final ValueStatistic allocatedMemory; - private final ValueStatistic occupiedMemory; - - public DefaultTierStatistics(Cache cache, String tierName) { - this.tierName = tierName; - - get = findOperationStatistic(cache, tierName, "tier", "get"); - put = findOperationStatistic(cache, tierName, "put"); - putIfAbsent = findOperationStatistic(cache, tierName, "putIfAbsent"); - replace = findOperationStatistic(cache, tierName, "replace"); - conditionalReplace = findOperationStatistic(cache, tierName, "conditionalReplace"); - remove = findOperationStatistic(cache, tierName, "remove"); - conditionalRemove = findOperationStatistic(cache, tierName, "conditionalRemove"); - eviction = findOperationStatistic(cache, tierName, "tier", "eviction"); - expiration = findOperationStatistic(cache, tierName, "expiration"); - compute = findOperationStatistic(cache, tierName, "compute"); - computeIfAbsent = findOperationStatistic(cache, tierName, "computeIfAbsent"); - - mapping = findValueStatistics(cache, tierName, "mappings"); - maxMapping = findValueStatistics(cache, tierName, "maxMappings"); - allocatedMemory = findValueStatistics(cache, tierName, "allocatedMemory"); - occupiedMemory = findValueStatistics(cache, tierName, "occupiedMemory"); - - Map knownStatistics = createKnownStatistics(tierName); - this.knownStatistics = Collections.unmodifiableMap(knownStatistics); - } - - private Map createKnownStatistics(String tierName) { - Map knownStatistics = new HashMap<>(7); - addKnownStatistic(knownStatistics, tierName, "HitCount", get, new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getHits(); - } - }); - addKnownStatistic(knownStatistics, tierName, "MissCount", get, new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getMisses(); - } - }); - addKnownStatistic(knownStatistics, tierName, "PutCount", get, new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getPuts(); - } - }); - addKnownStatistic(knownStatistics, tierName, "RemovalCount", get, new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getRemovals(); - } - }); - addKnownStatistic(knownStatistics, tierName, "EvictionCount", get, new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getEvictions(); - } - }); - addKnownStatistic(knownStatistics, tierName, "ExpirationCount", get, new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getExpirations(); - } - }); - addKnownStatistic(knownStatistics, tierName, "MappingCount", mapping, new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getMappings(); - } - }); - addKnownStatistic(knownStatistics, tierName, "MaxMappingCount", maxMapping, new TypedValueStatistic(StatisticType.COUNTER) { - @Override - public Number value() { - return getMaxMappings(); - } - }); - addKnownStatistic(knownStatistics, tierName, "AllocatedByteSize", allocatedMemory, new TypedValueStatistic(StatisticType.SIZE) { - @Override - public Number value() { - return getAllocatedByteSize(); - } - }); - addKnownStatistic(knownStatistics, tierName, "OccupiedByteSize", occupiedMemory, new TypedValueStatistic(StatisticType.SIZE) { - @Override - public Number value() { - return getOccupiedByteSize(); - } - }); - return knownStatistics; - } - - public Map getKnownStatistics() { - return knownStatistics; - } - - private static void addKnownStatistic(Map knownStatistics, String tierName, String name, Object stat, TypedValueStatistic statistic) { - if (stat != NOT_AVAILABLE) { - knownStatistics.put(tierName + ":" + name, statistic); - } - } - - private > OperationStatistic findOperationStatistic(Cache cache, String tierName, String tag, String stat) { - OperationStatistic s = findStatisticOnDescendants(cache, tierName, tag, stat); - if(s == null) { - return ZeroOperationStatistic.get(); - } - return s; - } - - private > OperationStatistic findOperationStatistic(Cache cache, String tierName, String stat) { - OperationStatistic s = findStatisticOnDescendants(cache, tierName, stat); - if(s == null) { - return ZeroOperationStatistic.get(); - } - return s; - } - - private ValueStatistic findValueStatistics(Cache cache, String tierName, String statName) { - ValueStatistic stat = findStatisticOnDescendants(cache, tierName, statName); - if (stat == null) { - return NOT_AVAILABLE; - } - return stat; - } - - /** - * Reset the values for this tier. However, note that {@code mapping, maxMappings, allocatedMemory, occupiedMemory} - * but be reset since it doesn't make sense. - */ - public void clear() { - compensatingCounters = compensatingCounters.snapshot(this); - } - - public long getHits() { - return get.sum(EnumSet.of(TierOperationOutcomes.GetOutcome.HIT)) + - putIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.HIT)) + - replace.sum(EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.REPLACED)) + - compute.sum(EnumSet.of(StoreOperationOutcomes.ComputeOutcome.HIT)) + - computeIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.ComputeIfAbsentOutcome.HIT)) + - conditionalReplace.sum(EnumSet.of(StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED)) + - conditionalRemove.sum(EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED)) - - compensatingCounters.hits; - } - - public long getMisses() { - return get.sum(EnumSet.of(TierOperationOutcomes.GetOutcome.MISS)) + - putIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.PUT)) + - replace.sum(EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.MISS)) + - computeIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.ComputeIfAbsentOutcome.NOOP)) + - conditionalReplace.sum(EnumSet.of(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS)) + - conditionalRemove.sum(EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS)) - - compensatingCounters.misses; - } - - public long getPuts() { - return put.sum(EnumSet.of(StoreOperationOutcomes.PutOutcome.PUT)) + - putIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.PutIfAbsentOutcome.PUT)) + - compute.sum(EnumSet.of(StoreOperationOutcomes.ComputeOutcome.PUT)) + - computeIfAbsent.sum(EnumSet.of(StoreOperationOutcomes.ComputeIfAbsentOutcome.PUT)) + - replace.sum(EnumSet.of(StoreOperationOutcomes.ReplaceOutcome.REPLACED)) + - conditionalReplace.sum(EnumSet.of(StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED)) - - compensatingCounters.puts; - } - - public long getRemovals() { - return remove.sum(EnumSet.of(StoreOperationOutcomes.RemoveOutcome.REMOVED)) + - compute.sum(EnumSet.of(StoreOperationOutcomes.ComputeOutcome.REMOVED)) + - conditionalRemove.sum(EnumSet.of(StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED)) - - compensatingCounters.removals; - } - - public long getEvictions() { - return eviction.sum(EnumSet.of(TierOperationOutcomes.EvictionOutcome.SUCCESS)) - - compensatingCounters.evictions; - } - - public long getExpirations() { - return expiration.sum() - compensatingCounters.expirations; - } - - public long getMappings() { - return mapping.value(); - } - - public long getMaxMappings() { - return maxMapping.value(); - } - - public long getAllocatedByteSize() { - return allocatedMemory.value(); - } - - public long getOccupiedByteSize() { - return occupiedMemory.value(); - } - - private static class CompensatingCounters { - final long hits; - final long misses; - final long puts; - final long removals; - final long evictions; - final long expirations; - - private CompensatingCounters(long hits, long misses, long puts, long removals, long evictions, long expirations) { - this.hits = hits; - this.misses = misses; - this.puts = puts; - this.removals = removals; - this.evictions = evictions; - this.expirations = expirations; - } - - static CompensatingCounters empty() { - return new CompensatingCounters(0, 0, 0, 0, 0, 0); - } - - CompensatingCounters snapshot(DefaultTierStatistics statistics) { - return new CompensatingCounters( - statistics.getHits() + hits, - statistics.getMisses() + misses, - statistics.getPuts() + puts, - statistics.getRemovals() + removals, - statistics.getEvictions() + evictions, - statistics.getExpirations() + expirations - ); - } - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/statistics/StatsUtils.java b/impl/src/main/java/org/ehcache/impl/internal/statistics/StatsUtils.java deleted file mode 100644 index f6927709b1..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/statistics/StatsUtils.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.statistics; - -import java.util.Collections; -import java.util.Map; -import java.util.Set; - -import org.ehcache.Cache; -import org.ehcache.core.statistics.StoreOperationOutcomes; -import org.terracotta.context.ContextManager; -import org.terracotta.context.TreeNode; -import org.terracotta.context.query.Matcher; -import org.terracotta.context.query.Matchers; -import org.terracotta.context.query.Query; -import org.terracotta.statistics.OperationStatistic; - -import static org.terracotta.context.query.Matchers.attributes; -import static org.terracotta.context.query.Matchers.context; -import static org.terracotta.context.query.Matchers.hasAttribute; -import static org.terracotta.context.query.QueryBuilder.queryBuilder; - -/** - * Class allowing to query cache and tier statistics - */ -final class StatsUtils { - - private StatsUtils() {} - - static Matcher> hasTag(final String tag) { - return hasAttribute("tags", new Matcher>() { - @Override - protected boolean matchesSafely(Set object) { - return object.contains(tag); - } - }); - } - - static Matcher> hasProperty(final String key, final String value) { - return hasAttribute("properties", new Matcher>() { - @Override - protected boolean matchesSafely(Map properties) { - Object val = properties.get(key); - return val == null ? false : value.equals(val); - } - }); - } - - /** - * Search for a statistic on the descendant of the context that matches the tag and statistic name. - * - * @param context the context of the query - * @param discriminator a filter on the discriminator property - * @param tag the tag we are looking for - * @param statName statistic name - * @param type of the statistic that will be returned - * @return the wanted statistic or null if no such statistic is found - * @throws RuntimeException when more than one matching statistic is found - */ - static T findStatisticOnDescendants(Object context, String discriminator, String tag, String statName) { - - @SuppressWarnings("unchecked") - Set statResult = queryBuilder() - .descendants() - .filter(context(attributes(Matchers.>allOf( - hasAttribute("name", statName), - hasProperty("discriminator", discriminator), - hasTag(tag))))) - .build().execute(Collections.singleton(ContextManager.nodeFor(context))); - - if (statResult.size() > 1) { - throw new RuntimeException("One stat expected for " + statName + " but found " + statResult.size()); - } - - if (statResult.size() == 1) { - @SuppressWarnings("unchecked") - T result = (T) statResult.iterator().next().getContext().attributes().get("this"); - return result; - } - - // No such stat in this context - return null; - } - - /** - * Search for a statistic on the descendant of the context that matches the tag and statistic name. - * - * @param context the context of the query - * @param tag the tag we are looking for - * @param statName statistic name - * @param type of the statistic that will be returned - * @return the wanted statistic or null if no such statistic is found - * @throws RuntimeException when more than one matching statistic is found - */ - static T findStatisticOnDescendants(Object context, String tag, String statName) { - - @SuppressWarnings("unchecked") - Set statResult = queryBuilder() - .descendants() - .filter(context(attributes(Matchers.>allOf( - hasAttribute("name", statName), - hasTag(tag))))) - .build().execute(Collections.singleton(ContextManager.nodeFor(context))); - - if (statResult.size() > 1) { - throw new RuntimeException("One stat expected for " + statName + " but found " + statResult.size()); - } - - if (statResult.size() == 1) { - @SuppressWarnings("unchecked") - T result = (T) statResult.iterator().next().getContext().attributes().get("this"); - return result; - } - - // No such stat in this context - return null; - } - - /** - * Find an operation statistic attached (as a children) to this context that matches the statistic name and type - * - * @param context the context of the query - * @param type type of the operation statistic - * @param statName statistic name - * @param type of the operation statistic content - * @return the operation statistic searched for - * @throws RuntimeException if 0 or more than 1 result is found - */ - static > OperationStatistic findOperationStatisticOnChildren(Object context, Class type, String statName) { - @SuppressWarnings("unchecked") - Query query = queryBuilder() - .children() - .filter(context(attributes(Matchers.>allOf(hasAttribute("name", statName), hasAttribute("type", type))))) - .build(); - - Set result = query.execute(Collections.singleton(ContextManager.nodeFor(context))); - if (result.size() > 1) { - throw new RuntimeException("result must be unique"); - } - if (result.isEmpty()) { - throw new RuntimeException("result must not be null"); - } - @SuppressWarnings("unchecked") - OperationStatistic statistic = (OperationStatistic) result.iterator().next().getContext().attributes().get("this"); - return statistic; - } - - /** - * Find the list of tiers of a cache. We assume a lot of things here. - *
                                        - *
                                      • The "eviction" statistic is available on the tier
                                      • - *
                                      • That the tiers have only one tag attribute
                                      • - *
                                      • That this tag contains the tier name
                                      • - *
                                      • That the only descendants having an "eviction" statistic are the tiers
                                      • - *
                                      - * - * @param cache the context for looking for tiers - * @return an array of tier names - * @throws RuntimeException if not tiers are found or if tiers have multiple tags - */ - static String[] findTiers(Cache cache) { - // Here I'm randomly taking the eviction observer because it exists on all tiers - @SuppressWarnings("unchecked") - Query statQuery = queryBuilder() - .descendants() - .filter(context(attributes(Matchers.>allOf(hasAttribute("name", "eviction"), hasAttribute("type", StoreOperationOutcomes.EvictionOutcome.class))))) - .build(); - - Set statResult = statQuery.execute(Collections.singleton(ContextManager.nodeFor(cache))); - - if (statResult.isEmpty()) { - throw new RuntimeException("Failed to find tiers using the eviction observer, valid result Set sizes must 1 or more"); - } - - String[] tiers = new String[statResult.size()]; - - int i = 0; - for (TreeNode treeNode : statResult) { - Set tags = (Set) treeNode.getContext().attributes().get("tags"); - if (tags.size() != 1) { - throw new RuntimeException("We expect tiers to have only one tag"); - } - - String storeType = tags.iterator().next().toString(); - tiers[i++] = storeType; - } - return tiers; - } - - /** - * Find the lowest tier from a list of tier. We assume a lot of things here that the tiers depth - * magically matches the alphabetical order. - * - * @param tiers all tiers - * @return the lowest tier - */ - static String findLowestTier(String[] tiers) { - //if only 1 store then you don't need to find the lowest tier - if (tiers.length == 1) { - return tiers[0]; - } - - //we expect at least one tier - if (tiers.length == 0) { - throw new RuntimeException("No existing tier"); - } - - // We rely here on the alphabetical order matching the depth order so from highest to lowest we have - // OnHeap, OffHeap, Disk, Clustered - String lowestTier = tiers[0]; - for (int i = 1; i < tiers.length; i++) { - if (tiers[i].compareTo(lowestTier) < 0) { - lowestTier = tiers[i]; - } - } - - return lowestTier; - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/statistics/ZeroOperationStatistic.java b/impl/src/main/java/org/ehcache/impl/internal/statistics/ZeroOperationStatistic.java deleted file mode 100644 index 7067025f9b..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/statistics/ZeroOperationStatistic.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.statistics; - -import java.util.Set; - -import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.ValueStatistic; -import org.terracotta.statistics.observer.ChainedOperationObserver; - -/** - * An operation statistic that always return 0 for everything. Used as a null object. - */ -class ZeroOperationStatistic> implements OperationStatistic { - - @SuppressWarnings("rawtypes") - private static final OperationStatistic INSTANCE = new ZeroOperationStatistic(); - - @SuppressWarnings("unchecked") - public static > OperationStatistic get() { - return (OperationStatistic) INSTANCE; - } - - @Override - public Class type() { - return null; - } - - @Override - public ValueStatistic statistic(T result) { - return null; - } - - @Override - public ValueStatistic statistic(Set results) { - return null; - } - - @Override - public long count(T type) { - return 0; - } - - @Override - public long sum(Set types) { - return 0; - } - - @Override - public long sum() { - return 0; - } - - @Override - public void addDerivedStatistic(ChainedOperationObserver derived) { - - } - - @Override - public void removeDerivedStatistic(ChainedOperationObserver derived) { - - } - - @Override - public void begin() { - - } - - @Override - public void end(T result) { - - } - - @Override - public void end(T result, long... parameters) { - - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/basic/EmptyValueHolder.java b/impl/src/main/java/org/ehcache/impl/internal/store/basic/EmptyValueHolder.java deleted file mode 100644 index 4566dffc04..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/store/basic/EmptyValueHolder.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.impl.internal.store.basic; - -import org.ehcache.core.spi.store.Store; - -import java.util.concurrent.TimeUnit; - -/** - * A value holder that always contains null - * - * @author Henri Tremblay - */ -public class EmptyValueHolder implements Store.ValueHolder { - - private static final Store.ValueHolder EMPTY = new EmptyValueHolder<>(); - - @SuppressWarnings("unchecked") - public static Store.ValueHolder empty() { - return (Store.ValueHolder) EMPTY; - } - - @Override - public V value() { - return null; - } - - @Override - public long creationTime(TimeUnit unit) { - return 0; - } - - @Override - public long expirationTime(TimeUnit unit) { - return 0; - } - - @Override - public boolean isExpired(long expirationTime, TimeUnit unit) { - return false; - } - - @Override - public long lastAccessTime(TimeUnit unit) { - return 0; - } - - @Override - public float hitRate(long now, TimeUnit unit) { - return 0; - } - - @Override - public long hits() { - return 0; - } - - @Override - public long getId() { - return 0; - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java deleted file mode 100644 index 479c5b1756..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java +++ /dev/null @@ -1,1764 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.heap; - -import org.ehcache.Cache; -import org.ehcache.config.SizedResourcePool; -import org.ehcache.core.CacheConfigurationChangeEvent; -import org.ehcache.core.CacheConfigurationChangeListener; -import org.ehcache.core.CacheConfigurationProperty; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.ResourceType; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.events.StoreEventDispatcher; -import org.ehcache.core.events.StoreEventSink; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.core.spi.store.heap.LimitExceededException; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.copy.IdentityCopier; -import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; -import org.ehcache.impl.copy.SerializingCopier; -import org.ehcache.core.events.NullStoreEventDispatcher; -import org.ehcache.impl.internal.events.ScopedStoreEventDispatcher; -import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; -import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapValueHolder; -import org.ehcache.impl.internal.store.heap.holders.OnHeapValueHolder; -import org.ehcache.impl.internal.store.heap.holders.SerializedOnHeapValueHolder; -import org.ehcache.core.spi.time.TimeSource; -import org.ehcache.core.spi.time.TimeSourceService; -import org.ehcache.impl.store.HashUtils; -import org.ehcache.impl.serialization.TransientStateRepository; -import org.ehcache.sizeof.annotations.IgnoreSizeOf; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.spi.serialization.StatefulSerializer; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.events.StoreEventSource; -import org.ehcache.core.spi.store.tiering.CachingTier; -import org.ehcache.core.spi.store.tiering.HigherCachingTier; -import org.ehcache.impl.internal.store.BinaryValueHolder; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.copy.CopyProvider; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.spi.service.ServiceDependencies; -import org.ehcache.core.spi.store.heap.SizeOfEngine; -import org.ehcache.core.spi.store.heap.SizeOfEngineProvider; -import org.ehcache.core.statistics.CachingTierOperationOutcomes; -import org.ehcache.core.statistics.HigherCachingTierOperationOutcomes; -import org.ehcache.core.statistics.StoreOperationOutcomes; -import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap; -import org.ehcache.core.statistics.TierOperationOutcomes; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.terracotta.offheapstore.util.FindbugsSuppressWarnings; -import org.terracotta.statistics.MappedOperationStatistic; -import org.terracotta.statistics.StatisticsManager; -import org.terracotta.statistics.observer.OperationObserver; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Random; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; - -import static org.ehcache.config.Eviction.noAdvice; -import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; -import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; -import static org.terracotta.statistics.StatisticBuilder.operation; - -/** - * {@link Store} and {@link HigherCachingTier} implementation for on heap. - * - *

                                      - * It currently carries the following responsibilities: - *

                                        - *
                                      • Expiry
                                      • - *
                                      • Eviction
                                      • - *
                                      • Events
                                      • - *
                                      • Statistics
                                      • - *
                                      - * - * The storage of mappings is handled by a {@link ConcurrentHashMap} accessed through {@link Backend}. - */ -public class OnHeapStore implements Store, HigherCachingTier { - - private static final Logger LOG = LoggerFactory.getLogger(OnHeapStore.class); - - private static final String STATISTICS_TAG = "OnHeap"; - - private static final int ATTEMPT_RATIO = 4; - private static final int EVICTION_RATIO = 2; - - private static final EvictionAdvisor> EVICTION_ADVISOR = (key, value) -> value.evictionAdvice(); - - /** - * Comparator for eviction candidates: - * The highest priority is the ValueHolder having the smallest lastAccessTime. - */ - private static final Comparator> EVICTION_PRIORITIZER = (t, u) -> { - if (t instanceof Fault) { - return -1; - } else if (u instanceof Fault) { - return 1; - } else { - return Long.signum(u.lastAccessTime(TimeUnit.NANOSECONDS) - t.lastAccessTime(TimeUnit.NANOSECONDS)); - } - }; - - private static final InvalidationListener NULL_INVALIDATION_LISTENER = (InvalidationListener) (key, valueHolder) -> { - // Do nothing - }; - - static final int SAMPLE_SIZE = 8; - private volatile Backend map; - - private final Class keyType; - private final Class valueType; - private final Copier valueCopier; - - private final SizeOfEngine sizeOfEngine; - private final boolean byteSized; - - private volatile long capacity; - private final EvictionAdvisor evictionAdvisor; - private final Expiry expiry; - private final TimeSource timeSource; - private final StoreEventDispatcher storeEventDispatcher; - @SuppressWarnings("unchecked") - private volatile InvalidationListener invalidationListener = (InvalidationListener) NULL_INVALIDATION_LISTENER; - - private CacheConfigurationChangeListener cacheConfigurationChangeListener = new CacheConfigurationChangeListener() { - @Override - public void cacheConfigurationChange(CacheConfigurationChangeEvent event) { - if(event.getProperty().equals(CacheConfigurationProperty.UPDATE_SIZE)) { - ResourcePools updatedPools = (ResourcePools)event.getNewValue(); - ResourcePools configuredPools = (ResourcePools)event.getOldValue(); - if(updatedPools.getPoolForResource(ResourceType.Core.HEAP).getSize() != - configuredPools.getPoolForResource(ResourceType.Core.HEAP).getSize()) { - LOG.info("Updating size to: {}", updatedPools.getPoolForResource(ResourceType.Core.HEAP).getSize()); - SizedResourcePool pool = updatedPools.getPoolForResource(ResourceType.Core.HEAP); - if (pool.getUnit() instanceof MemoryUnit) { - capacity = ((MemoryUnit)pool.getUnit()).toBytes(pool.getSize()); - } else { - capacity = pool.getSize(); - } - } - } - } - }; - - private final OperationObserver getObserver; - private final OperationObserver putObserver; - private final OperationObserver removeObserver; - private final OperationObserver putIfAbsentObserver; - private final OperationObserver conditionalRemoveObserver; - private final OperationObserver replaceObserver; - private final OperationObserver conditionalReplaceObserver; - private final OperationObserver computeObserver; - private final OperationObserver computeIfAbsentObserver; - private final OperationObserver evictionObserver; - private final OperationObserver expirationObserver; - - private final OperationObserver getOrComputeIfAbsentObserver; - private final OperationObserver invalidateObserver; - private final OperationObserver invalidateAllObserver; - private final OperationObserver invalidateAllWithHashObserver; - private final OperationObserver silentInvalidateObserver; - private final OperationObserver silentInvalidateAllObserver; - private final OperationObserver silentInvalidateAllWithHashObserver; - - private static final Supplier REPLACE_EQUALS_TRUE = () -> Boolean.TRUE; - - public OnHeapStore(final Configuration config, final TimeSource timeSource, Copier keyCopier, Copier valueCopier, SizeOfEngine sizeOfEngine, StoreEventDispatcher eventDispatcher) { - if (keyCopier == null) { - throw new NullPointerException("keyCopier must not be null"); - } - if (valueCopier == null) { - throw new NullPointerException("valueCopier must not be null"); - } - SizedResourcePool heapPool = config.getResourcePools().getPoolForResource(ResourceType.Core.HEAP); - if (heapPool == null) { - throw new IllegalArgumentException("OnHeap store must be configured with a resource of type 'heap'"); - } - if (timeSource == null) { - throw new NullPointerException("timeSource must not be null"); - } - if (sizeOfEngine == null) { - throw new NullPointerException("sizeOfEngine must not be null"); - } - this.sizeOfEngine = sizeOfEngine; - this.byteSized = this.sizeOfEngine instanceof NoopSizeOfEngine ? false : true; - this.capacity = byteSized ? ((MemoryUnit) heapPool.getUnit()).toBytes(heapPool.getSize()) : heapPool.getSize(); - this.timeSource = timeSource; - if (config.getEvictionAdvisor() == null) { - this.evictionAdvisor = noAdvice(); - } else { - this.evictionAdvisor = config.getEvictionAdvisor(); - } - this.keyType = config.getKeyType(); - this.valueType = config.getValueType(); - this.expiry = config.getExpiry(); - this.valueCopier = valueCopier; - this.storeEventDispatcher = eventDispatcher; - if (keyCopier instanceof IdentityCopier) { - this.map = new SimpleBackend<>(byteSized); - } else { - this.map = new KeyCopyBackend<>(byteSized, keyCopier); - } - - getObserver = operation(StoreOperationOutcomes.GetOutcome.class).named("get").of(this).tag(STATISTICS_TAG).build(); - putObserver = operation(StoreOperationOutcomes.PutOutcome.class).named("put").of(this).tag(STATISTICS_TAG).build(); - removeObserver = operation(StoreOperationOutcomes.RemoveOutcome.class).named("remove").of(this).tag(STATISTICS_TAG).build(); - putIfAbsentObserver = operation(StoreOperationOutcomes.PutIfAbsentOutcome.class).named("putIfAbsent").of(this).tag(STATISTICS_TAG).build(); - conditionalRemoveObserver = operation(StoreOperationOutcomes.ConditionalRemoveOutcome.class).named("conditionalRemove").of(this).tag(STATISTICS_TAG).build(); - replaceObserver = operation(StoreOperationOutcomes.ReplaceOutcome.class).named("replace").of(this).tag(STATISTICS_TAG).build(); - conditionalReplaceObserver = operation(StoreOperationOutcomes.ConditionalReplaceOutcome.class).named("conditionalReplace").of(this).tag(STATISTICS_TAG).build(); - computeObserver = operation(StoreOperationOutcomes.ComputeOutcome.class).named("compute").of(this).tag(STATISTICS_TAG).build(); - computeIfAbsentObserver = operation(StoreOperationOutcomes.ComputeIfAbsentOutcome.class).named("computeIfAbsent").of(this).tag(STATISTICS_TAG).build(); - evictionObserver = operation(StoreOperationOutcomes.EvictionOutcome.class).named("eviction").of(this).tag(STATISTICS_TAG).build(); - expirationObserver = operation(StoreOperationOutcomes.ExpirationOutcome.class).named("expiration").of(this).tag(STATISTICS_TAG).build(); - - getOrComputeIfAbsentObserver = operation(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class).named("getOrComputeIfAbsent").of(this).tag(STATISTICS_TAG).build(); - invalidateObserver = operation(CachingTierOperationOutcomes.InvalidateOutcome.class).named("invalidate").of(this).tag(STATISTICS_TAG).build(); - invalidateAllObserver = operation(CachingTierOperationOutcomes.InvalidateAllOutcome.class).named("invalidateAll").of(this).tag(STATISTICS_TAG).build(); - invalidateAllWithHashObserver = operation(CachingTierOperationOutcomes.InvalidateAllWithHashOutcome.class).named("invalidateAllWithHash").of(this).tag(STATISTICS_TAG).build(); - - silentInvalidateObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateOutcome.class).named("silentInvalidate").of(this).tag(STATISTICS_TAG).build(); - silentInvalidateAllObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllOutcome.class).named("silentInvalidateAll").of(this).tag(STATISTICS_TAG).build(); - silentInvalidateAllWithHashObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllWithHashOutcome.class).named("silentInvalidateAllWithHash").of(this).tag(STATISTICS_TAG).build(); - - Set tags = new HashSet<>(Arrays.asList(STATISTICS_TAG, "tier")); - StatisticsManager.createPassThroughStatistic(this, "mappings", tags, () -> map.mappingCount()); - StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", tags, () -> { - if (byteSized) { - return map.byteSize(); - } else { - return -1L; - } - }); - } - - @Override - public ValueHolder get(final K key) throws StoreAccessException { - checkKey(key); - return internalGet(key, true); - } - - private OnHeapValueHolder internalGet(final K key, final boolean updateAccess) throws StoreAccessException { - getObserver.begin(); - try { - OnHeapValueHolder mapping = getQuiet(key); - - if (mapping == null) { - getObserver.end(StoreOperationOutcomes.GetOutcome.MISS); - return null; - } - - if (updateAccess) { - setAccessTimeAndExpiryThenReturnMappingOutsideLock(key, mapping, timeSource.getTimeMillis()); - } - getObserver.end(StoreOperationOutcomes.GetOutcome.HIT); - return mapping; - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } - } - - private OnHeapValueHolder getQuiet(final K key) throws StoreAccessException { - try { - OnHeapValueHolder mapping = map.get(key); - if (mapping == null) { - return null; - } - - if (mapping.isExpired(timeSource.getTimeMillis(), TimeUnit.MILLISECONDS)) { - expireMappingUnderLock(key, mapping); - return null; - } - return mapping; - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } - } - - @Override - public boolean containsKey(final K key) throws StoreAccessException { - checkKey(key); - return getQuiet(key) != null; - } - - @Override - public PutStatus put(final K key, final V value) throws StoreAccessException { - putObserver.begin(); - checkKey(key); - checkValue(value); - - final long now = timeSource.getTimeMillis(); - final AtomicReference statOutcome = new AtomicReference<>(StoreOperationOutcomes.PutOutcome.NOOP); - final StoreEventSink eventSink = storeEventDispatcher.eventSink(); - - try { - map.compute(key, (mappedKey, mappedValue) -> { - - if (mappedValue != null && mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { - updateUsageInBytesIfRequired(- mappedValue.size()); - mappedValue = null; - } - - if (mappedValue == null) { - OnHeapValueHolder newValue = newCreateValueHolder(key, value, now, eventSink); - if (newValue != null) { - updateUsageInBytesIfRequired(newValue.size()); - statOutcome.set(StoreOperationOutcomes.PutOutcome.PUT); - } - return newValue; - } else { - OnHeapValueHolder newValue = newUpdateValueHolder(key, mappedValue, value, now, eventSink); - if (newValue != null) { - updateUsageInBytesIfRequired(newValue.size() - mappedValue.size()); - } else { - updateUsageInBytesIfRequired(- mappedValue.size()); - } - statOutcome.set(StoreOperationOutcomes.PutOutcome.PUT); - return newValue; - } - }); - storeEventDispatcher.releaseEventSink(eventSink); - - enforceCapacity(); - - StoreOperationOutcomes.PutOutcome outcome = statOutcome.get(); - putObserver.end(outcome); - switch (outcome) { - case PUT: - return PutStatus.PUT; - case NOOP: - return PutStatus.NOOP; - default: - throw new AssertionError("Unknown enum value " + outcome); - } - } catch (RuntimeException re) { - storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - putObserver.end(StoreOperationOutcomes.PutOutcome.FAILURE); - throw handleRuntimeException(re); - } - } - - @Override - public boolean remove(final K key) throws StoreAccessException { - removeObserver.begin(); - checkKey(key); - final StoreEventSink eventSink = storeEventDispatcher.eventSink(); - final long now = timeSource.getTimeMillis(); - - try { - final AtomicReference statisticOutcome = new AtomicReference<>(StoreOperationOutcomes.RemoveOutcome.MISS); - - map.computeIfPresent(key, (mappedKey, mappedValue) -> { - updateUsageInBytesIfRequired(- mappedValue.size()); - if (mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { - fireOnExpirationEvent(mappedKey, mappedValue, eventSink); - return null; - } - - statisticOutcome.set(StoreOperationOutcomes.RemoveOutcome.REMOVED); - eventSink.removed(mappedKey, mappedValue); - return null; - }); - storeEventDispatcher.releaseEventSink(eventSink); - StoreOperationOutcomes.RemoveOutcome outcome = statisticOutcome.get(); - removeObserver.end(outcome); - switch (outcome) { - case REMOVED: - return true; - case MISS: - return false; - default: - throw new AssertionError("Unknown enum value " + outcome); - } - } catch (RuntimeException re) { - storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw handleRuntimeException(re); - } - } - - @Override - public ValueHolder putIfAbsent(final K key, final V value) throws StoreAccessException { - return putIfAbsent(key, value, false); - } - - private OnHeapValueHolder putIfAbsent(final K key, final V value, boolean returnCurrentMapping) throws StoreAccessException { - putIfAbsentObserver.begin(); - checkKey(key); - checkValue(value); - - final AtomicReference> returnValue = new AtomicReference<>(null); - final AtomicBoolean entryActuallyAdded = new AtomicBoolean(); - final long now = timeSource.getTimeMillis(); - final StoreEventSink eventSink = storeEventDispatcher.eventSink(); - - try { - OnHeapValueHolder inCache = map.compute(key, (mappedKey, mappedValue) -> { - if (mappedValue == null || mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { - if (mappedValue != null) { - updateUsageInBytesIfRequired(- mappedValue.size()); - fireOnExpirationEvent(mappedKey, mappedValue, eventSink); - } - - OnHeapValueHolder holder = newCreateValueHolder(key, value, now, eventSink); - if (holder != null) { - updateUsageInBytesIfRequired(holder.size()); - } - entryActuallyAdded.set(holder != null); - return holder; - } - - returnValue.set(mappedValue); - OnHeapValueHolder holder = setAccessTimeAndExpiryThenReturnMappingUnderLock(key, mappedValue, now, eventSink); - if (holder == null) { - updateUsageInBytesIfRequired(- mappedValue.size()); - } - return holder; - }); - - storeEventDispatcher.releaseEventSink(eventSink); - - if (entryActuallyAdded.get()) { - enforceCapacity(); - putIfAbsentObserver.end(StoreOperationOutcomes.PutIfAbsentOutcome.PUT); - } else { - putIfAbsentObserver.end(StoreOperationOutcomes.PutIfAbsentOutcome.HIT); - } - - if (returnCurrentMapping) { - return inCache; - } - } catch (RuntimeException re) { - storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw handleRuntimeException(re); - } - - return returnValue.get(); - } - - @Override - public RemoveStatus remove(final K key, final V value) throws StoreAccessException { - conditionalRemoveObserver.begin(); - checkKey(key); - checkValue(value); - - final AtomicReference outcome = new AtomicReference<>(RemoveStatus.KEY_MISSING); - final StoreEventSink eventSink = storeEventDispatcher.eventSink(); - - try { - map.computeIfPresent(key, (mappedKey, mappedValue) -> { - final long now = timeSource.getTimeMillis(); - - if (mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { - updateUsageInBytesIfRequired(- mappedValue.size()); - fireOnExpirationEvent(mappedKey, mappedValue, eventSink); - return null; - } else if (value.equals(mappedValue.value())) { - updateUsageInBytesIfRequired(- mappedValue.size()); - eventSink.removed(mappedKey, mappedValue); - outcome.set(RemoveStatus.REMOVED); - return null; - } else { - outcome.set(RemoveStatus.KEY_PRESENT); - OnHeapValueHolder holder = setAccessTimeAndExpiryThenReturnMappingUnderLock(key, mappedValue, now, eventSink); - if (holder == null) { - updateUsageInBytesIfRequired(- mappedValue.size()); - } - return holder; - } - }); - storeEventDispatcher.releaseEventSink(eventSink); - RemoveStatus removeStatus = outcome.get(); - switch (removeStatus) { - case REMOVED: - conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED); - break; - case KEY_MISSING: - case KEY_PRESENT: - conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS); - break; - default: - - } - return removeStatus; - } catch (RuntimeException re) { - storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw handleRuntimeException(re); - } - - } - - @Override - public ValueHolder replace(final K key, final V value) throws StoreAccessException { - replaceObserver.begin(); - checkKey(key); - checkValue(value); - - final AtomicReference> returnValue = new AtomicReference<>(null); - final StoreEventSink eventSink = storeEventDispatcher.eventSink(); - - try { - map.computeIfPresent(key, (mappedKey, mappedValue) -> { - final long now = timeSource.getTimeMillis(); - - if (mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { - updateUsageInBytesIfRequired(- mappedValue.size()); - fireOnExpirationEvent(mappedKey, mappedValue, eventSink); - return null; - } else { - returnValue.set(mappedValue); - OnHeapValueHolder holder = newUpdateValueHolder(key, mappedValue, value, now, eventSink); - if (holder != null) { - updateUsageInBytesIfRequired(holder.size() - mappedValue.size()); - } else { - updateUsageInBytesIfRequired(- mappedValue.size()); - } - return holder; - } - }); - OnHeapValueHolder valueHolder = returnValue.get(); - storeEventDispatcher.releaseEventSink(eventSink); - enforceCapacity(); - if (valueHolder != null) { - replaceObserver.end(StoreOperationOutcomes.ReplaceOutcome.REPLACED); - } else { - replaceObserver.end(StoreOperationOutcomes.ReplaceOutcome.MISS); - } - } catch (RuntimeException re) { - storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw handleRuntimeException(re); - } - - return returnValue.get(); - } - - @Override - public ReplaceStatus replace(final K key, final V oldValue, final V newValue) throws StoreAccessException { - conditionalReplaceObserver.begin(); - checkKey(key); - checkValue(oldValue); - checkValue(newValue); - - final StoreEventSink eventSink = storeEventDispatcher.eventSink(); - final AtomicReference outcome = new AtomicReference<>(ReplaceStatus.MISS_NOT_PRESENT); - - try { - map.computeIfPresent(key, (mappedKey, mappedValue) -> { - final long now = timeSource.getTimeMillis(); - - V existingValue = mappedValue.value(); - if (mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { - fireOnExpirationEvent(mappedKey, mappedValue, eventSink); - updateUsageInBytesIfRequired(- mappedValue.size()); - return null; - } else if (oldValue.equals(existingValue)) { - outcome.set(ReplaceStatus.HIT); - OnHeapValueHolder holder = newUpdateValueHolder(key, mappedValue, newValue, now, eventSink); - if (holder != null) { - updateUsageInBytesIfRequired(holder.size() - mappedValue.size()); - } else { - updateUsageInBytesIfRequired(- mappedValue.size()); - } - return holder; - } else { - outcome.set(ReplaceStatus.MISS_PRESENT); - OnHeapValueHolder holder = setAccessTimeAndExpiryThenReturnMappingUnderLock(key, mappedValue, now, eventSink); - if (holder == null) { - updateUsageInBytesIfRequired(- mappedValue.size()); - } - return holder; - } - }); - storeEventDispatcher.releaseEventSink(eventSink); - enforceCapacity(); - ReplaceStatus replaceStatus = outcome.get(); - switch (replaceStatus) { - case HIT: - conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED); - break; - case MISS_PRESENT: - case MISS_NOT_PRESENT: - conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS); - break; - default: - throw new AssertionError("Unknown enum value " + replaceStatus); - } - return replaceStatus; - } catch (RuntimeException re) { - storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw handleRuntimeException(re); - } - } - - @Override - public void clear() { - this.map = map.clear(); - } - - @Override - public Iterator>> iterator() { - return new Iterator>>() { - private final java.util.Iterator>> it = map.entrySetIterator(); - - @Override - public boolean hasNext() { - return it.hasNext(); - } - - @Override - public Cache.Entry> next() throws StoreAccessException { - Entry> next = it.next(); - final K key = next.getKey(); - final OnHeapValueHolder value = next.getValue(); - return new Cache.Entry>() { - @Override - public K getKey() { - return key; - } - @Override - public ValueHolder getValue() { - return value; - } - }; - } - }; - } - - @Override - public ValueHolder getOrComputeIfAbsent(final K key, final Function> source) throws StoreAccessException { - try { - getOrComputeIfAbsentObserver.begin(); - Backend backEnd = map; - - // First try to find the value from heap - OnHeapValueHolder cachedValue = backEnd.get(key); - - final long now = timeSource.getTimeMillis(); - if (cachedValue == null) { - final Fault fault = new Fault<>(() -> source.apply(key)); - cachedValue = backEnd.putIfAbsent(key, fault); - - if (cachedValue == null) { - return resolveFault(key, backEnd, now, fault); - } - } - - // If we have a real value (not a fault), we make sure it is not expired - // If yes, we remove it and ask the source just in case. If no, we return it (below) - if (!(cachedValue instanceof Fault)) { - if (cachedValue.isExpired(now, TimeUnit.MILLISECONDS)) { - expireMappingUnderLock(key, cachedValue); - - // On expiration, we might still be able to get a value from the fault. For instance, when a load-writer is used - final Fault fault = new Fault<>(() -> source.apply(key)); - cachedValue = backEnd.putIfAbsent(key, fault); - - if (cachedValue == null) { - return resolveFault(key, backEnd, now, fault); - } - } - else { - setAccessTimeAndExpiryThenReturnMappingOutsideLock(key, cachedValue, now); - } - } - - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.HIT); - - // Return the value that we found in the cache (by getting the fault or just returning the plain value depending on what we found) - return getValue(cachedValue); - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } - } - - private ValueHolder resolveFault(final K key, Backend backEnd, long now, Fault fault) throws StoreAccessException { - try { - final ValueHolder value = fault.get(); - final OnHeapValueHolder newValue; - if(value != null) { - newValue = importValueFromLowerTier(key, value, now, backEnd, fault); - if (newValue == null) { - // Inline expiry or sizing failure - backEnd.remove(key, fault); - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); - return value; - } - } else { - backEnd.remove(key, fault); - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); - return null; - } - - if (backEnd.replace(key, fault, newValue)) { - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED); - updateUsageInBytesIfRequired(newValue.size()); - enforceCapacity(); - return newValue; - } - - final AtomicReference> invalidatedValue = new AtomicReference<>(); - backEnd.computeIfPresent(key, (mappedKey, mappedValue) -> { - notifyInvalidation(key, mappedValue); - invalidatedValue.set(mappedValue); - updateUsageInBytesIfRequired(mappedValue.size()); - return null; - }); - - ValueHolder p = getValue(invalidatedValue.get()); - if (p != null) { - if (p.isExpired(now, TimeUnit.MILLISECONDS)) { - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS); - return null; - } - - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); - return p; - } - - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); - return newValue; - - } catch (Throwable e) { - backEnd.remove(key, fault); - throw new StoreAccessException(e); - } - } - - private void invalidateInGetOrComputeIfAbsent(Backend map, final K key, final ValueHolder value, final Fault fault, final long now, final Duration expiration) { - map.computeIfPresent(key, (mappedKey, mappedValue) -> { - if(mappedValue.equals(fault)) { - try { - invalidationListener.onInvalidation(key, cloneValueHolder(key, value, now, expiration, false)); - } catch (LimitExceededException ex) { - throw new AssertionError("Sizing is not expected to happen."); - } - return null; - } - return mappedValue; - }); - } - - @Override - public void invalidate(final K key) throws StoreAccessException { - invalidateObserver.begin(); - checkKey(key); - try { - final AtomicReference outcome = new AtomicReference<>(CachingTierOperationOutcomes.InvalidateOutcome.MISS); - - map.computeIfPresent(key, (k, present) -> { - if (!(present instanceof Fault)) { - notifyInvalidation(key, present); - outcome.set(CachingTierOperationOutcomes.InvalidateOutcome.REMOVED); - } - updateUsageInBytesIfRequired(- present.size()); - return null; - }); - invalidateObserver.end(outcome.get()); - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } - } - - @Override - public void silentInvalidate(K key, final Function, Void> function) throws StoreAccessException { - silentInvalidateObserver.begin(); - checkKey(key); - try { - final AtomicReference outcome = - new AtomicReference<>(HigherCachingTierOperationOutcomes.SilentInvalidateOutcome.MISS); - - map.compute(key, (mappedKey, mappedValue) -> { - long size = 0L; - OnHeapValueHolder holderToPass = null; - if (mappedValue != null) { - size = mappedValue.size(); - if (!(mappedValue instanceof Fault)) { - holderToPass = mappedValue; - outcome.set(HigherCachingTierOperationOutcomes.SilentInvalidateOutcome.REMOVED); - } - } - function.apply(holderToPass); - updateUsageInBytesIfRequired(- size); - return null; - }); - silentInvalidateObserver.end(outcome.get()); - } catch (RuntimeException re) { - throw handleRuntimeException(re); - } - } - - @Override - public void invalidateAll() throws StoreAccessException { - invalidateAllObserver.begin(); - long errorCount = 0; - StoreAccessException firstException = null; - for(K key : map.keySet()) { - try { - invalidate(key); - } catch (StoreAccessException cae) { - errorCount++; - if (firstException == null) { - firstException = cae; - } - } - } - if (firstException != null) { - invalidateAllObserver.end(CachingTierOperationOutcomes.InvalidateAllOutcome.FAILURE); - throw new StoreAccessException("Error(s) during invalidation - count is " + errorCount, firstException); - } - clear(); - invalidateAllObserver.end(CachingTierOperationOutcomes.InvalidateAllOutcome.SUCCESS); - } - - @Override - public void silentInvalidateAll(final BiFunction, Void> biFunction) throws StoreAccessException { - silentInvalidateAllObserver.begin(); - StoreAccessException exception = null; - long errorCount = 0; - - for (final K k : map.keySet()) { - try { - silentInvalidate(k, mappedValue -> { - biFunction.apply(k, mappedValue); - return null; - }); - } catch (StoreAccessException e) { - errorCount++; - if (exception == null) { - exception = e; - } - } - } - - if (exception != null) { - silentInvalidateAllObserver.end(HigherCachingTierOperationOutcomes.SilentInvalidateAllOutcome.FAILURE); - throw new StoreAccessException("silentInvalidateAll failed - error count: " + errorCount, exception); - } - silentInvalidateAllObserver.end(HigherCachingTierOperationOutcomes.SilentInvalidateAllOutcome.SUCCESS); - } - - @Override - public void silentInvalidateAllWithHash(long hash, BiFunction, Void> biFunction) throws StoreAccessException { - silentInvalidateAllWithHashObserver.begin(); - int intHash = HashUtils.longHashToInt(hash); - Collection>> removed = map.removeAllWithHash(intHash); - for (Entry> entry : removed) { - biFunction.apply(entry.getKey(), entry.getValue()); - } - silentInvalidateAllWithHashObserver.end(HigherCachingTierOperationOutcomes.SilentInvalidateAllWithHashOutcome.SUCCESS); - } - - private void notifyInvalidation(final K key, final ValueHolder p) { - final InvalidationListener invalidationListener = this.invalidationListener; - if(invalidationListener != null) { - invalidationListener.onInvalidation(key, p); - } - } - - @Override - public void setInvalidationListener(final InvalidationListener providedInvalidationListener) { - this.invalidationListener = (key, valueHolder) -> { - if (!(valueHolder instanceof Fault)) { - providedInvalidationListener.onInvalidation(key, valueHolder); - } - }; - } - - @Override - public void invalidateAllWithHash(long hash) throws StoreAccessException { - invalidateAllWithHashObserver.begin(); - int intHash = HashUtils.longHashToInt(hash); - Collection>> removed = map.removeAllWithHash(intHash); - for (Entry> entry : removed) { - notifyInvalidation(entry.getKey(), entry.getValue()); - } - LOG.debug("CLIENT: onheap store removed all with hash {}", intHash); - invalidateAllWithHashObserver.end(CachingTierOperationOutcomes.InvalidateAllWithHashOutcome.SUCCESS); - } - - private ValueHolder getValue(final ValueHolder cachedValue) { - if (cachedValue instanceof Fault) { - return ((Fault)cachedValue).get(); - } else { - return cachedValue; - } - } - - private long getSizeOfKeyValuePairs(K key, OnHeapValueHolder holder) throws LimitExceededException { - return sizeOfEngine.sizeof(key, holder); - } - - /** - * Place holder used when loading an entry from the authority into this caching tier - * - * @param the value type of the caching tier - */ - private static class Fault extends OnHeapValueHolder { - - private static final int FAULT_ID = -1; - - @IgnoreSizeOf - private final Supplier> source; - private ValueHolder value; - private Throwable throwable; - private boolean complete; - - public Fault(final Supplier> source) { - super(FAULT_ID, 0, true); - this.source = source; - } - - private void complete(ValueHolder value) { - synchronized (this) { - this.value = value; - this.complete = true; - notifyAll(); - } - } - - private ValueHolder get() { - synchronized (this) { - if (!complete) { - try { - complete(source.get()); - } catch (Throwable e) { - fail(e); - } - } - } - - return throwOrReturn(); - } - - @Override - public long getId() { - throw new UnsupportedOperationException("You should NOT call that?!"); - } - - private ValueHolder throwOrReturn() { - if (throwable != null) { - if (throwable instanceof RuntimeException) { - throw (RuntimeException) throwable; - } - throw new RuntimeException("Faulting from repository failed", throwable); - } - return value; - } - - private void fail(final Throwable t) { - synchronized (this) { - this.throwable = t; - this.complete = true; - notifyAll(); - } - throwOrReturn(); - } - - @Override - public V value() { - throw new UnsupportedOperationException(); - } - - @Override - public long creationTime(TimeUnit unit) { - throw new UnsupportedOperationException(); - } - - @Override - public void setExpirationTime(long expirationTime, TimeUnit unit) { - throw new UnsupportedOperationException(); - } - - @Override - public long expirationTime(TimeUnit unit) { - throw new UnsupportedOperationException(); - } - - @Override - public boolean isExpired(long expirationTime, TimeUnit unit) { - throw new UnsupportedOperationException(); - } - - @Override - public long lastAccessTime(TimeUnit unit) { - return Long.MAX_VALUE; - } - - @Override - public void setLastAccessTime(long lastAccessTime, TimeUnit unit) { - throw new UnsupportedOperationException(); - } - - @Override - public void setSize(long size) { - throw new UnsupportedOperationException("Faults should not be sized"); - } - - /** - * Faults always have a size of 0 - * - * @return {@code 0} - */ - @Override - public long size() { - return 0L; - } - - @Override - public String toString() { - return "[Fault : " + (complete ? (throwable == null ? String.valueOf(value) : throwable.getMessage()) : "???") + "]"; - } - - @Override - public boolean equals(Object obj) { - return obj == this; - } - } - - @Override - public ValueHolder compute(final K key, final BiFunction mappingFunction) throws StoreAccessException { - return compute(key, mappingFunction, REPLACE_EQUALS_TRUE); - } - - @Override - public ValueHolder compute(final K key, final BiFunction mappingFunction, final Supplier replaceEqual) throws StoreAccessException { - computeObserver.begin(); - checkKey(key); - - final long now = timeSource.getTimeMillis(); - final StoreEventSink eventSink = storeEventDispatcher.eventSink(); - try { - final AtomicReference> valueHeld = new AtomicReference<>(); - final AtomicReference outcome = - new AtomicReference<>(StoreOperationOutcomes.ComputeOutcome.MISS); - - OnHeapValueHolder computeResult = map.compute(key, (mappedKey, mappedValue) -> { - long sizeDelta = 0L; - if (mappedValue != null && mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { - fireOnExpirationEvent(mappedKey, mappedValue, eventSink); - sizeDelta -= mappedValue.size(); - mappedValue = null; - } - - V existingValue = mappedValue == null ? null : mappedValue.value(); - V computedValue = mappingFunction.apply(mappedKey, existingValue); - if (computedValue == null) { - if (existingValue != null) { - eventSink.removed(mappedKey, mappedValue); - outcome.set(StoreOperationOutcomes.ComputeOutcome.REMOVED); - updateUsageInBytesIfRequired(- mappedValue.size()); - } - return null; - } else if ((eq(existingValue, computedValue)) && (!replaceEqual.get())) { - if (mappedValue != null) { - OnHeapValueHolder holder = setAccessTimeAndExpiryThenReturnMappingUnderLock(key, mappedValue, now, eventSink); - outcome.set(StoreOperationOutcomes.ComputeOutcome.HIT); - if (holder == null) { - valueHeld.set(mappedValue); - updateUsageInBytesIfRequired(- mappedValue.size()); - } - return holder; - } - } - - checkValue(computedValue); - if (mappedValue != null) { - outcome.set(StoreOperationOutcomes.ComputeOutcome.PUT); - long expirationTime = mappedValue.expirationTime(OnHeapValueHolder.TIME_UNIT); - OnHeapValueHolder valueHolder = newUpdateValueHolder(key, mappedValue, computedValue, now, eventSink); - sizeDelta -= mappedValue.size(); - if (valueHolder == null) { - try { - valueHeld.set(makeValue(key, computedValue, now, expirationTime, valueCopier, false)); - } catch (LimitExceededException e) { - // Not happening - } - } else { - sizeDelta += valueHolder.size(); - } - updateUsageInBytesIfRequired(sizeDelta); - return valueHolder; - } else { - OnHeapValueHolder holder = newCreateValueHolder(key, computedValue, now, eventSink); - if (holder != null) { - outcome.set(StoreOperationOutcomes.ComputeOutcome.PUT); - sizeDelta += holder.size(); - } - updateUsageInBytesIfRequired(sizeDelta); - return holder; - } - }); - if (computeResult == null && valueHeld.get() != null) { - computeResult = valueHeld.get(); - } - storeEventDispatcher.releaseEventSink(eventSink); - enforceCapacity(); - computeObserver.end(outcome.get()); - return computeResult; - } catch (RuntimeException re) { - storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw handleRuntimeException(re); - } - } - - @Override - public ValueHolder computeIfAbsent(final K key, final Function mappingFunction) throws StoreAccessException { - computeIfAbsentObserver.begin(); - checkKey(key); - - final StoreEventSink eventSink = storeEventDispatcher.eventSink(); - try { - final long now = timeSource.getTimeMillis(); - - final AtomicReference> previousValue = new AtomicReference<>(); - final AtomicReference outcome = - new AtomicReference<>(StoreOperationOutcomes.ComputeIfAbsentOutcome.NOOP); - OnHeapValueHolder computeResult = map.compute(key, (mappedKey, mappedValue) -> { - if (mappedValue == null || mappedValue.isExpired(now, TimeUnit.MILLISECONDS)) { - if (mappedValue != null) { - updateUsageInBytesIfRequired(- mappedValue.size()); - fireOnExpirationEvent(mappedKey, mappedValue, eventSink); - } - V computedValue = mappingFunction.apply(mappedKey); - if (computedValue == null) { - return null; - } - - checkValue(computedValue); - OnHeapValueHolder holder = newCreateValueHolder(key, computedValue, now, eventSink); - if (holder != null) { - outcome.set(StoreOperationOutcomes.ComputeIfAbsentOutcome.PUT); - updateUsageInBytesIfRequired(holder.size()); - } - return holder; - } else { - previousValue.set(mappedValue); - outcome.set(StoreOperationOutcomes.ComputeIfAbsentOutcome.HIT); - OnHeapValueHolder holder = setAccessTimeAndExpiryThenReturnMappingUnderLock(key, mappedValue, now, eventSink); - if (holder == null) { - updateUsageInBytesIfRequired(- mappedValue.size()); - } - return holder; - } - }); - OnHeapValueHolder previousValueHolder = previousValue.get(); - - storeEventDispatcher.releaseEventSink(eventSink); - if (computeResult != null) { - enforceCapacity(); - } - computeIfAbsentObserver.end(outcome.get()); - if (computeResult == null && previousValueHolder != null) { - // There was a value - it expired on access - return previousValueHolder; - } - return computeResult; - } catch (RuntimeException re) { - storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw handleRuntimeException(re); - } - } - - @Override - public Map> bulkComputeIfAbsent(Set keys, final Function, Iterable>> mappingFunction) throws StoreAccessException { - Map> result = new HashMap<>(); - - for (final K key : keys) { - final ValueHolder newValue = computeIfAbsent(key, k -> { - final Iterable keySet = Collections.singleton(k); - final Iterable> entries = mappingFunction.apply(keySet); - final java.util.Iterator> iterator = entries.iterator(); - final Entry next = iterator.next(); - - K computedKey = next.getKey(); - checkKey(computedKey); - - V computedValue = next.getValue(); - if (computedValue == null) { - return null; - } - - checkValue(computedValue); - return computedValue; - }); - result.put(key, newValue); - } - return result; - } - - @Override - public List getConfigurationChangeListeners() { - List configurationChangeListenerList - = new ArrayList<>(); - configurationChangeListenerList.add(this.cacheConfigurationChangeListener); - return configurationChangeListenerList; - } - - @Override - public Map> bulkCompute(Set keys, final Function>, Iterable>> remappingFunction) throws StoreAccessException { - return bulkCompute(keys, remappingFunction, REPLACE_EQUALS_TRUE); - } - - @Override - public Map> bulkCompute(Set keys, final Function>, Iterable>> remappingFunction, Supplier replaceEqual) throws StoreAccessException { - - // The Store here is free to slice & dice the keys as it sees fit - // As this OnHeapStore doesn't operate in segments, the best it can do is do a "bulk" write in batches of... one! - - Map> result = new HashMap<>(); - for (K key : keys) { - checkKey(key); - - final ValueHolder newValue = compute(key, (k, oldValue) -> { - final Set> entrySet = Collections.singletonMap(k, oldValue).entrySet(); - final Iterable> entries = remappingFunction.apply(entrySet); - final java.util.Iterator> iterator = entries.iterator(); - final Entry next = iterator.next(); - - K key1 = next.getKey(); - V value = next.getValue(); - checkKey(key1); - if (value != null) { - checkValue(value); - } - return value; - }, replaceEqual); - result.put(key, newValue); - } - return result; - } - - @Override - public StoreEventSource getStoreEventSource() { - return storeEventDispatcher; - } - - private OnHeapValueHolder setAccessTimeAndExpiryThenReturnMappingOutsideLock(K key, OnHeapValueHolder valueHolder, long now) { - Duration duration; - try { - duration = expiry.getExpiryForAccess(key, valueHolder); - } catch (RuntimeException re) { - LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); - duration = Duration.ZERO; - } - valueHolder.accessed(now, duration); - if (Duration.ZERO.equals(duration)) { - // Expires mapping through computeIfPresent - expireMappingUnderLock(key, valueHolder); - return null; - } - return valueHolder; - } - - private OnHeapValueHolder setAccessTimeAndExpiryThenReturnMappingUnderLock(K key, OnHeapValueHolder valueHolder, long now, - StoreEventSink eventSink) { - Duration duration = Duration.ZERO; - try { - duration = expiry.getExpiryForAccess(key, valueHolder); - } catch (RuntimeException re) { - LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); - } - valueHolder.accessed(now, duration); - if (Duration.ZERO.equals(duration)) { - // Fires event, must happen under lock - fireOnExpirationEvent(key, valueHolder, eventSink); - return null; - } - return valueHolder; - } - - private void expireMappingUnderLock(final K key, final ValueHolder value) { - - final StoreEventSink eventSink = storeEventDispatcher.eventSink(); - try { - map.computeIfPresent(key, (mappedKey, mappedValue) -> { - if(mappedValue.equals(value)) { - fireOnExpirationEvent(key, value, eventSink); - updateUsageInBytesIfRequired(- mappedValue.size()); - return null; - } - return mappedValue; - }); - storeEventDispatcher.releaseEventSink(eventSink); - } catch(RuntimeException re) { - storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw re; - } - } - - private OnHeapValueHolder newUpdateValueHolder(K key, OnHeapValueHolder oldValue, V newValue, long now, StoreEventSink eventSink) { - if (oldValue == null) { - throw new NullPointerException(); - } - if (newValue == null) { - throw new NullPointerException(); - } - - Duration duration = Duration.ZERO; - try { - duration = expiry.getExpiryForUpdate(key, oldValue, newValue); - } catch (RuntimeException re) { - LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); - } - if (Duration.ZERO.equals(duration)) { - eventSink.updated(key, oldValue, newValue); - eventSink.expired(key, supplierOf(newValue)); - return null; - } - - long expirationTime; - if (duration == null) { - expirationTime = oldValue.expirationTime(OnHeapValueHolder.TIME_UNIT); - } else { - if (duration.isInfinite()) { - expirationTime = ValueHolder.NO_EXPIRE; - } else { - expirationTime = safeExpireTime(now, duration); - } - } - - OnHeapValueHolder holder = null; - try { - holder = makeValue(key, newValue, now, expirationTime, this.valueCopier); - eventSink.updated(key, oldValue, newValue); - } catch (LimitExceededException e) { - LOG.warn(e.getMessage()); - eventSink.removed(key, oldValue); - } - return holder; - } - - private OnHeapValueHolder newCreateValueHolder(K key, V value, long now, StoreEventSink eventSink) { - if (value == null) { - throw new NullPointerException(); - } - - Duration duration; - try { - duration = expiry.getExpiryForCreation(key, value); - } catch (RuntimeException re) { - LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); - return null; - } - if (Duration.ZERO.equals(duration)) { - return null; - } - - long expirationTime = duration.isInfinite() ? ValueHolder.NO_EXPIRE : safeExpireTime(now, duration); - - OnHeapValueHolder holder = null; - try { - holder = makeValue(key, value, now, expirationTime, this.valueCopier); - eventSink.created(key, value); - } catch (LimitExceededException e) { - LOG.warn(e.getMessage()); - } - return holder; - } - - private OnHeapValueHolder importValueFromLowerTier(K key, ValueHolder valueHolder, long now, Backend backEnd, Fault fault) { - Duration expiration = Duration.ZERO; - try { - expiration = expiry.getExpiryForAccess(key, valueHolder); - } catch (RuntimeException re) { - LOG.error("Expiry computation caused an exception - Expiry duration will be 0 ", re); - } - - if (Duration.ZERO.equals(expiration)) { - invalidateInGetOrComputeIfAbsent(backEnd, key, valueHolder, fault, now, Duration.ZERO); - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); - return null; - } - - try{ - return cloneValueHolder(key, valueHolder, now, expiration, true); - } catch (LimitExceededException e) { - LOG.warn(e.getMessage()); - invalidateInGetOrComputeIfAbsent(backEnd, key, valueHolder, fault, now, expiration); - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); - return null; - } - } - - private OnHeapValueHolder cloneValueHolder(K key, ValueHolder valueHolder, long now, Duration expiration, boolean sizingEnabled) throws LimitExceededException { - V realValue = valueHolder.value(); - boolean evictionAdvice = checkEvictionAdvice(key, realValue); - OnHeapValueHolder clonedValueHolder = null; - if(valueCopier instanceof SerializingCopier) { - if (valueHolder instanceof BinaryValueHolder && ((BinaryValueHolder) valueHolder).isBinaryValueAvailable()) { - clonedValueHolder = new SerializedOnHeapValueHolder<>(valueHolder, ((BinaryValueHolder) valueHolder).getBinaryValue(), - evictionAdvice, ((SerializingCopier) valueCopier).getSerializer(), now, expiration); - } else { - clonedValueHolder = new SerializedOnHeapValueHolder<>(valueHolder, realValue, evictionAdvice, - ((SerializingCopier) valueCopier).getSerializer(), now, expiration); - } - } else { - clonedValueHolder = new CopiedOnHeapValueHolder<>(valueHolder, realValue, evictionAdvice, valueCopier, now, expiration); - } - if (sizingEnabled) { - clonedValueHolder.setSize(getSizeOfKeyValuePairs(key, clonedValueHolder)); - } - return clonedValueHolder; - } - - private OnHeapValueHolder makeValue(K key, V value, long creationTime, long expirationTime, Copier valueCopier) throws LimitExceededException { - return makeValue(key, value, creationTime, expirationTime, valueCopier, true); - } - - private OnHeapValueHolder makeValue(K key, V value, long creationTime, long expirationTime, Copier valueCopier, boolean size) throws LimitExceededException { - boolean evictionAdvice = checkEvictionAdvice(key, value); - OnHeapValueHolder valueHolder; - if (valueCopier instanceof SerializingCopier) { - valueHolder = new SerializedOnHeapValueHolder<>(value, creationTime, expirationTime, evictionAdvice, ((SerializingCopier) valueCopier) - .getSerializer()); - } else { - valueHolder = new CopiedOnHeapValueHolder<>(value, creationTime, expirationTime, evictionAdvice, valueCopier); - } - if (size) { - valueHolder.setSize(getSizeOfKeyValuePairs(key, valueHolder)); - } - return valueHolder; - } - - private boolean checkEvictionAdvice(K key, V value) { - try { - return evictionAdvisor.adviseAgainstEviction(key, value); - } catch (Exception e) { - LOG.error("Exception raised while running eviction advisor " + - "- Eviction will assume entry is NOT advised against eviction", e); - return false; - } - } - - private static long safeExpireTime(long now, Duration duration) { - long millis = OnHeapValueHolder.TIME_UNIT.convert(duration.getLength(), duration.getTimeUnit()); - - if (millis == Long.MAX_VALUE) { - return Long.MAX_VALUE; - } - - long result = now + millis; - if (result < 0) { - return Long.MAX_VALUE; - } - return result; - } - - private void updateUsageInBytesIfRequired(long delta) { - map.updateUsageInBytesIfRequired(delta); - } - - protected long byteSized() { - return map.byteSize(); - } - - @FindbugsSuppressWarnings("QF_QUESTIONABLE_FOR_LOOP") - protected void enforceCapacity() { - StoreEventSink eventSink = storeEventDispatcher.eventSink(); - try { - for (int attempts = 0, evicted = 0; attempts < ATTEMPT_RATIO && evicted < EVICTION_RATIO - && capacity < map.naturalSize(); attempts++) { - if (evict(eventSink)) { - evicted++; - } - } - storeEventDispatcher.releaseEventSink(eventSink); - } catch (RuntimeException re){ - storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); - throw re; - } - } - - /** - * Try to evict a mapping. - * @return true if a mapping was evicted, false otherwise. - * @param eventSink target of eviction event - */ - boolean evict(final StoreEventSink eventSink) { - evictionObserver.begin(); - final Random random = new Random(); - - @SuppressWarnings("unchecked") - Map.Entry> candidate = map.getEvictionCandidate(random, SAMPLE_SIZE, EVICTION_PRIORITIZER, EVICTION_ADVISOR); - - if (candidate == null) { - // 2nd attempt without any advisor - candidate = map.getEvictionCandidate(random, SAMPLE_SIZE, EVICTION_PRIORITIZER, noAdvice()); - } - - if (candidate == null) { - return false; - } else { - final Map.Entry> evictionCandidate = candidate; - final AtomicBoolean removed = new AtomicBoolean(false); - map.computeIfPresent(evictionCandidate.getKey(), (mappedKey, mappedValue) -> { - if (mappedValue.equals(evictionCandidate.getValue())) { - removed.set(true); - if (!(evictionCandidate.getValue() instanceof Fault)) { - eventSink.evicted(evictionCandidate.getKey(), evictionCandidate.getValue()); - invalidationListener.onInvalidation(mappedKey, evictionCandidate.getValue()); - } - updateUsageInBytesIfRequired(-mappedValue.size()); - return null; - } - return mappedValue; - }); - if (removed.get()) { - evictionObserver.end(StoreOperationOutcomes.EvictionOutcome.SUCCESS); - return true; - } else { - evictionObserver.end(StoreOperationOutcomes.EvictionOutcome.FAILURE); - return false; - } - } - } - - private void checkKey(K keyObject) { - if (keyObject == null) { - throw new NullPointerException(); - } - if (!keyType.isAssignableFrom(keyObject.getClass())) { - throw new ClassCastException("Invalid key type, expected : " + keyType.getName() + " but was : " + keyObject.getClass().getName()); - } - } - - private void checkValue(V valueObject) { - if (valueObject == null) { - throw new NullPointerException(); - } - if (!valueType.isAssignableFrom(valueObject.getClass())) { - throw new ClassCastException("Invalid value type, expected : " + valueType.getName() + " but was : " + valueObject.getClass().getName()); - } - } - - private void fireOnExpirationEvent(K mappedKey, ValueHolder mappedValue, StoreEventSink eventSink) { - expirationObserver.begin(); - expirationObserver.end(StoreOperationOutcomes.ExpirationOutcome.SUCCESS); - eventSink.expired(mappedKey, mappedValue); - invalidationListener.onInvalidation(mappedKey, mappedValue); - } - - private static boolean eq(Object o1, Object o2) { - return (o1 == o2) || (o1 != null && o1.equals(o2)); - } - - @ServiceDependencies({TimeSourceService.class, CopyProvider.class, SizeOfEngineProvider.class}) - public static class Provider implements Store.Provider, CachingTier.Provider, HigherCachingTier.Provider { - - private volatile ServiceProvider serviceProvider; - private final Map, List> createdStores = new ConcurrentWeakIdentityHashMap<>(); - private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap<>(); - - @Override - public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { - return resourceTypes.equals(Collections.singleton(ResourceType.Core.HEAP)) ? 1 : 0; - } - - @Override - public int rankCachingTier(Set> resourceTypes, Collection> serviceConfigs) { - return rank(resourceTypes, serviceConfigs); - } - - @Override - public OnHeapStore createStore(final Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { - OnHeapStore store = createStoreInternal(storeConfig, new ScopedStoreEventDispatcher<>(storeConfig.getDispatcherConcurrency()), serviceConfigs); - Collection> tieredOps = new ArrayList<>(); - - MappedOperationStatistic get = - new MappedOperationStatistic<>( - store, TierOperationOutcomes.GET_TRANSLATION, "get", ResourceType.Core.HEAP.getTierHeight(), "get", STATISTICS_TAG); - StatisticsManager.associate(get).withParent(store); - tieredOps.add(get); - - MappedOperationStatistic evict = - new MappedOperationStatistic<>( - store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); - StatisticsManager.associate(evict).withParent(store); - tieredOps.add(evict); - - tierOperationStatistics.put(store, tieredOps); - return store; - } - - public OnHeapStore createStoreInternal(final Configuration storeConfig, final StoreEventDispatcher eventDispatcher, - final ServiceConfiguration... serviceConfigs) { - TimeSource timeSource = serviceProvider.getService(TimeSourceService.class).getTimeSource(); - CopyProvider copyProvider = serviceProvider.getService(CopyProvider.class); - Copier keyCopier = copyProvider.createKeyCopier(storeConfig.getKeyType(), storeConfig.getKeySerializer(), serviceConfigs); - Copier valueCopier = copyProvider.createValueCopier(storeConfig.getValueType(), storeConfig.getValueSerializer(), serviceConfigs); - - List copiers = new ArrayList<>(); - copiers.add(keyCopier); - copiers.add(valueCopier); - - SizeOfEngineProvider sizeOfEngineProvider = serviceProvider.getService(SizeOfEngineProvider.class); - SizeOfEngine sizeOfEngine = sizeOfEngineProvider.createSizeOfEngine( - storeConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getUnit(), serviceConfigs); - OnHeapStore onHeapStore = new OnHeapStore<>(storeConfig, timeSource, keyCopier, valueCopier, sizeOfEngine, eventDispatcher); - createdStores.put(onHeapStore, copiers); - return onHeapStore; - } - - @Override - public void releaseStore(Store resource) { - List copiers = createdStores.remove(resource); - if (copiers == null) { - throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); - } - final OnHeapStore onHeapStore = (OnHeapStore)resource; - close(onHeapStore); - StatisticsManager.nodeFor(onHeapStore).clean(); - tierOperationStatistics.remove(onHeapStore); - - CopyProvider copyProvider = serviceProvider.getService(CopyProvider.class); - for (Copier copier: copiers) { - try { - copyProvider.releaseCopier(copier); - } catch (Exception e) { - throw new IllegalStateException("Exception while releasing Copier instance.", e); - } - } - } - - static void close(final OnHeapStore onHeapStore) { - onHeapStore.clear(); - } - - @Override - public void initStore(Store resource) { - checkResource(resource); - - List copiers = createdStores.get(resource); - for (Copier copier : copiers) { - if(copier instanceof SerializingCopier) { - Serializer serializer = ((SerializingCopier)copier).getSerializer(); - if(serializer instanceof StatefulSerializer) { - ((StatefulSerializer)serializer).init(new TransientStateRepository()); - } - } - } - } - - private void checkResource(Object resource) { - if (!createdStores.containsKey(resource)) { - throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); - } - } - - @Override - public void start(final ServiceProvider serviceProvider) { - this.serviceProvider = serviceProvider; - } - - @Override - public void stop() { - this.serviceProvider = null; - createdStores.clear(); - } - - @Override - public CachingTier createCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - OnHeapStore cachingTier = createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); - Collection> tieredOps = new ArrayList<>(); - - MappedOperationStatistic get = - new MappedOperationStatistic<>( - cachingTier, TierOperationOutcomes.GET_OR_COMPUTEIFABSENT_TRANSLATION, "get", ResourceType.Core.HEAP.getTierHeight(), "getOrComputeIfAbsent", STATISTICS_TAG); - StatisticsManager.associate(get).withParent(cachingTier); - tieredOps.add(get); - - MappedOperationStatistic evict - = new MappedOperationStatistic<>( - cachingTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); - StatisticsManager.associate(evict).withParent(cachingTier); - tieredOps.add(evict); - - this.tierOperationStatistics.put(cachingTier, tieredOps); - return cachingTier; - } - - @Override - public void releaseCachingTier(CachingTier resource) { - checkResource(resource); - try { - resource.invalidateAll(); - } catch (StoreAccessException e) { - LOG.warn("Invalidation failure while releasing caching tier", e); - } - releaseStore((Store) resource); - } - - @Override - public void initCachingTier(CachingTier resource) { - checkResource(resource); - } - - @Override - public HigherCachingTier createHigherCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - OnHeapStore higherCachingTier = createStoreInternal(storeConfig, new ScopedStoreEventDispatcher<>(storeConfig - .getDispatcherConcurrency()), serviceConfigs); - Collection> tieredOps = new ArrayList<>(); - - MappedOperationStatistic get = - new MappedOperationStatistic<>( - higherCachingTier, TierOperationOutcomes.GET_OR_COMPUTEIFABSENT_TRANSLATION, "get", ResourceType.Core.HEAP.getTierHeight(), "getOrComputeIfAbsent", STATISTICS_TAG); - StatisticsManager.associate(get).withParent(higherCachingTier); - tieredOps.add(get); - - MappedOperationStatistic evict = - new MappedOperationStatistic<>( - higherCachingTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); - StatisticsManager.associate(evict).withParent(higherCachingTier); - tieredOps.add(evict); - - tierOperationStatistics.put(higherCachingTier, tieredOps); - return higherCachingTier; - } - - @Override - public void releaseHigherCachingTier(HigherCachingTier resource) { - releaseCachingTier(resource); - } - - @Override - public void initHigherCachingTier(HigherCachingTier resource) { - checkResource(resource); - } - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/LazyOffHeapValueHolder.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/LazyOffHeapValueHolder.java deleted file mode 100644 index f65a9bcbe9..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/LazyOffHeapValueHolder.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.offheap; - -import org.ehcache.core.spi.store.Store; -import org.ehcache.impl.internal.store.BinaryValueHolder; -import org.ehcache.spi.serialization.SerializerException; -import org.ehcache.impl.internal.store.offheap.portability.OffHeapValueHolderPortability; -import org.ehcache.spi.serialization.Serializer; -import org.terracotta.offheapstore.storage.portability.WriteContext; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.concurrent.TimeUnit; - -/** -* OffHeapValueHolder variant that supports lazy deserialization and also serving the binary value if detached. -*/ -public final class LazyOffHeapValueHolder extends OffHeapValueHolder implements BinaryValueHolder { - - private final Serializer valueSerializer; - private final WriteContext writeContext; - private Mode mode; - private ByteBuffer binaryValue; - private V value; - - public LazyOffHeapValueHolder(long id, ByteBuffer binaryValue, Serializer serializer, long creationTime, long expireTime, long lastAccessTime, long hits, WriteContext writeContext) { - super(id, creationTime, expireTime); - setLastAccessTime(lastAccessTime, TIME_UNIT); - this.binaryValue = binaryValue; - this.valueSerializer = serializer; - this.setHits(hits); - this.writeContext = writeContext; - this.mode = Mode.ATTACHED; - } - - @Override - public V value() { - forceDeserialization(); - return value; - } - - @Override - public ByteBuffer getBinaryValue() throws IllegalStateException { - if (isBinaryValueAvailable()) { - return binaryValue.duplicate(); - } else { - throw new IllegalStateException("This OffHeapValueHolder has not been prepared to hand off its binary form"); - } - } - - @Override - public boolean isBinaryValueAvailable() { - return mode == Mode.DETACHED; - } - - @Override - void updateMetadata(final Store.ValueHolder valueFlushed) { - if(getId() != valueFlushed.getId()) { - throw new IllegalArgumentException("Wrong id passed in [this.id != id] : " + getId() + " != " + valueFlushed.getId()); - } - this.setLastAccessTime(valueFlushed.lastAccessTime(LazyOffHeapValueHolder.TIME_UNIT), LazyOffHeapValueHolder.TIME_UNIT); - this.setExpirationTime(valueFlushed.expirationTime(LazyOffHeapValueHolder.TIME_UNIT), LazyOffHeapValueHolder.TIME_UNIT); - this.setHits(valueFlushed.hits()); - } - - /** - * Must be called under offheap lock, may corrupt memory otherwise - */ - @Override - void writeBack() { - writeContext.setLong(OffHeapValueHolderPortability.ACCESS_TIME_OFFSET, lastAccessTime(TimeUnit.MILLISECONDS)); - writeContext.setLong(OffHeapValueHolderPortability.EXPIRE_TIME_OFFSET, expirationTime(TimeUnit.MILLISECONDS)); - writeContext.setLong(OffHeapValueHolderPortability.HITS_OFFSET, hits()); - writeContext.flush(); - } - - /** - * Must be called under offheap lock (when it actually does something) - */ - @Override - void forceDeserialization() { - if (value == null) { - try { - value = valueSerializer.read(binaryValue.duplicate()); - } catch (ClassNotFoundException e) { - throw new SerializerException(e); - } catch (SerializerException e) { - throw new SerializerException("Seeing this exception and having no other " + - "serialization related issues is a red flag!", e); - } - } - } - - /** - * Must be called under offheap lock, may read invalid memory content otherwise - */ - @Override - void detach() { - if (mode == Mode.ATTACHED) { - byte[] bytes = new byte[binaryValue.remaining()]; - binaryValue.get(bytes); - binaryValue = ByteBuffer.wrap(bytes); - mode = Mode.DETACHED; - } else { - throw new IllegalStateException("OffHeapValueHolder in mode " + mode + " cannot be prepared for delayed deserialization"); - } - } - - private enum Mode { - ATTACHED, DETACHED - } - - private void writeObject(java.io.ObjectOutputStream out) throws IOException { - throw new UnsupportedOperationException("This subclass of AbstractValueHolder is NOT serializable"); - } - -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java deleted file mode 100644 index e62ce0984b..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java +++ /dev/null @@ -1,334 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.offheap; - -import org.ehcache.config.SizedResourcePool; -import org.ehcache.core.CacheConfigurationChangeListener; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourceType; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.events.StoreEventDispatcher; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; -import org.ehcache.core.statistics.LowerCachingTierOperationsOutcome; -import org.ehcache.core.statistics.StoreOperationOutcomes; -import org.ehcache.core.events.NullStoreEventDispatcher; -import org.ehcache.impl.internal.events.ThreadLocalStoreEventDispatcher; -import org.ehcache.impl.internal.store.offheap.factories.EhcacheSegmentFactory; -import org.ehcache.impl.internal.store.offheap.portability.OffHeapValueHolderPortability; -import org.ehcache.impl.internal.store.offheap.portability.SerializerPortability; -import org.ehcache.core.spi.time.TimeSource; -import org.ehcache.core.spi.time.TimeSourceService; -import org.ehcache.impl.serialization.TransientStateRepository; -import org.ehcache.spi.serialization.StatefulSerializer; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.tiering.AuthoritativeTier; -import org.ehcache.core.spi.store.tiering.LowerCachingTier; -import org.ehcache.spi.serialization.SerializationProvider; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.spi.service.ServiceDependencies; -import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap; -import org.ehcache.core.statistics.TierOperationOutcomes; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.terracotta.offheapstore.paging.PageSource; -import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; -import org.terracotta.offheapstore.pinning.PinnableSegment; -import org.terracotta.offheapstore.storage.OffHeapBufferStorageEngine; -import org.terracotta.offheapstore.storage.PointerSize; -import org.terracotta.offheapstore.storage.portability.Portability; -import org.terracotta.offheapstore.util.Factory; -import org.terracotta.statistics.MappedOperationStatistic; -import org.terracotta.statistics.StatisticsManager; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.ehcache.config.Eviction.noAdvice; -import static org.ehcache.impl.internal.store.offheap.OffHeapStoreUtils.getBufferSource; - -/** - * OffHeapStore - */ -public class OffHeapStore extends AbstractOffHeapStore { - - private static final String STATISTICS_TAG = "OffHeap"; - - private final SwitchableEvictionAdvisor> evictionAdvisor; - private final Serializer keySerializer; - private final Serializer valueSerializer; - private final long sizeInBytes; - - private volatile EhcacheConcurrentOffHeapClockCache> map; - - public OffHeapStore(final Configuration config, TimeSource timeSource, StoreEventDispatcher eventDispatcher, long sizeInBytes) { - super(STATISTICS_TAG, config, timeSource, eventDispatcher); - EvictionAdvisor evictionAdvisor = config.getEvictionAdvisor(); - if (evictionAdvisor != null) { - this.evictionAdvisor = wrap(evictionAdvisor); - } else { - this.evictionAdvisor = wrap(noAdvice()); - } - this.keySerializer = config.getKeySerializer(); - this.valueSerializer = config.getValueSerializer(); - this.sizeInBytes = sizeInBytes; - } - - @Override - public List getConfigurationChangeListeners() { - return Collections.emptyList(); - } - - private EhcacheConcurrentOffHeapClockCache> createBackingMap(long size, Serializer keySerializer, Serializer valueSerializer, SwitchableEvictionAdvisor> evictionAdvisor) { - HeuristicConfiguration config = new HeuristicConfiguration(size); - PageSource source = new UpfrontAllocatingPageSource(getBufferSource(), config.getMaximumSize(), config.getMaximumChunkSize(), config.getMinimumChunkSize()); - Portability keyPortability = new SerializerPortability<>(keySerializer); - Portability> elementPortability = new OffHeapValueHolderPortability<>(valueSerializer); - Factory>> storageEngineFactory = OffHeapBufferStorageEngine.createFactory(PointerSize.INT, source, config - .getSegmentDataPageSize(), keyPortability, elementPortability, false, true); - - Factory>> segmentFactory = new EhcacheSegmentFactory<>( - source, - storageEngineFactory, - config.getInitialSegmentTableSize(), - evictionAdvisor, - mapEvictionListener); - return new EhcacheConcurrentOffHeapClockCache<>(evictionAdvisor, segmentFactory, config.getConcurrency()); - - } - - @Override - protected EhcacheOffHeapBackingMap> backingMap() { - return map; - } - - @Override - protected SwitchableEvictionAdvisor> evictionAdvisor() { - return evictionAdvisor; - } - - @ServiceDependencies({TimeSourceService.class, SerializationProvider.class}) - public static class Provider implements Store.Provider, AuthoritativeTier.Provider, LowerCachingTier.Provider { - - private static final Logger LOGGER = LoggerFactory.getLogger(Provider.class); - - private volatile ServiceProvider serviceProvider; - private final Set> createdStores = Collections.newSetFromMap(new ConcurrentWeakIdentityHashMap, Boolean>()); - private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap<>(); - - @Override - public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { - return resourceTypes.equals(Collections.singleton(ResourceType.Core.OFFHEAP)) ? 1 : 0; - } - - @Override - public int rankAuthority(ResourceType authorityResource, Collection> serviceConfigs) { - return authorityResource.equals(ResourceType.Core.OFFHEAP) ? 1 : 0; - } - - @Override - public OffHeapStore createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - OffHeapStore store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher<>(storeConfig.getDispatcherConcurrency()), serviceConfigs); - Collection> tieredOps = new ArrayList<>(); - - MappedOperationStatistic get = - new MappedOperationStatistic<>( - store, TierOperationOutcomes.GET_TRANSLATION, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "get", STATISTICS_TAG); - StatisticsManager.associate(get).withParent(store); - tieredOps.add(get); - - MappedOperationStatistic evict = - new MappedOperationStatistic<>( - store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); - StatisticsManager.associate(evict).withParent(store); - tieredOps.add(evict); - - tierOperationStatistics.put(store, tieredOps); - return store; - } - - private OffHeapStore createStoreInternal(Configuration storeConfig, StoreEventDispatcher eventDispatcher, ServiceConfiguration... serviceConfigs) { - if (serviceProvider == null) { - throw new NullPointerException("ServiceProvider is null in OffHeapStore.Provider."); - } - TimeSource timeSource = serviceProvider.getService(TimeSourceService.class).getTimeSource(); - - SizedResourcePool offHeapPool = storeConfig.getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP); - if (!(offHeapPool.getUnit() instanceof MemoryUnit)) { - throw new IllegalArgumentException("OffHeapStore only supports resources with memory unit"); - } - MemoryUnit unit = (MemoryUnit)offHeapPool.getUnit(); - - - OffHeapStore offHeapStore = new OffHeapStore<>(storeConfig, timeSource, eventDispatcher, unit.toBytes(offHeapPool - .getSize())); - createdStores.add(offHeapStore); - return offHeapStore; - } - - @Override - public void releaseStore(Store resource) { - if (!createdStores.contains(resource)) { - throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); - } - OffHeapStore offHeapStore = (OffHeapStore)resource; - close(offHeapStore); - StatisticsManager.nodeFor(offHeapStore).clean(); - tierOperationStatistics.remove(offHeapStore); - } - - static void close(final OffHeapStore resource) { - EhcacheConcurrentOffHeapClockCache localMap = resource.map; - if (localMap != null) { - resource.map = null; - localMap.destroy(); - } - } - - @Override - public void initStore(Store resource) { - if (!createdStores.contains(resource)) { - throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); - } - - OffHeapStore offHeapStore = (OffHeapStore) resource; - Serializer keySerializer = offHeapStore.keySerializer; - if (keySerializer instanceof StatefulSerializer) { - ((StatefulSerializer)keySerializer).init(new TransientStateRepository()); - } - Serializer valueSerializer = offHeapStore.valueSerializer; - if (valueSerializer instanceof StatefulSerializer) { - ((StatefulSerializer)valueSerializer).init(new TransientStateRepository()); - } - - init(offHeapStore); - } - - static void init(final OffHeapStore resource) { - resource.map = resource.createBackingMap(resource.sizeInBytes, resource.keySerializer, resource.valueSerializer, resource.evictionAdvisor); - } - - @Override - public void start(ServiceProvider serviceProvider) { - this.serviceProvider = serviceProvider; - } - - @Override - public void stop() { - this.serviceProvider = null; - createdStores.clear(); - } - - @Override - public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - OffHeapStore authoritativeTier = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher<>(storeConfig - .getDispatcherConcurrency()), serviceConfigs); - Collection> tieredOps = new ArrayList<>(); - - MappedOperationStatistic get = - new MappedOperationStatistic<>( - authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "getAndFault", STATISTICS_TAG); - StatisticsManager.associate(get).withParent(authoritativeTier); - tieredOps.add(get); - - MappedOperationStatistic evict - = new MappedOperationStatistic<>( - authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); - StatisticsManager.associate(evict).withParent(authoritativeTier); - tieredOps.add(evict); - - tierOperationStatistics.put(authoritativeTier, tieredOps); - return authoritativeTier; - } - - @Override - public void releaseAuthoritativeTier(AuthoritativeTier resource) { - releaseStore(resource); - } - - @Override - public void initAuthoritativeTier(AuthoritativeTier resource) { - initStore(resource); - } - - @Override - public LowerCachingTier createCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - OffHeapStore lowerCachingTier = createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); - Collection> tieredOps = new ArrayList<>(); - - MappedOperationStatistic get - = new MappedOperationStatistic<>( - lowerCachingTier, TierOperationOutcomes.GET_AND_REMOVE_TRANSLATION, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "getAndRemove", STATISTICS_TAG); - StatisticsManager.associate(get).withParent(lowerCachingTier); - tieredOps.add(get); - - MappedOperationStatistic evict = - new MappedOperationStatistic<>( - lowerCachingTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); - StatisticsManager.associate(evict).withParent(lowerCachingTier); - tieredOps.add(evict); - - tierOperationStatistics.put(lowerCachingTier, tieredOps); - return lowerCachingTier; - } - - @Override - @SuppressWarnings("unchecked") - public void releaseCachingTier(LowerCachingTier resource) { - if (!createdStores.contains(resource)) { - throw new IllegalArgumentException("Given caching tier is not managed by this provider : " + resource); - } - flushToLowerTier((OffHeapStore) resource); - releaseStore((Store) resource); - } - - private void flushToLowerTier(OffHeapStore resource) { - StoreAccessException lastFailure = null; - int failureCount = 0; - OffHeapStore offheapStore = resource; - Set keys = offheapStore.backingMap().keySet(); - for (Object key : keys) { - try { - offheapStore.invalidate(key); - } catch (StoreAccessException cae) { - lastFailure = cae; - failureCount++; - LOGGER.warn("Error flushing '{}' to lower tier", key, cae); - } - } - if (lastFailure != null) { - throw new RuntimeException("Failed to flush some mappings to lower tier, " + - failureCount + " could not be flushed. This error represents the last failure.", lastFailure); - } - } - - @Override - public void initCachingTier(LowerCachingTier resource) { - if (!createdStores.contains(resource)) { - throw new IllegalArgumentException("Given caching tier is not managed by this provider : " + resource); - } - init((OffHeapStore) resource); - } - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/OffHeapValueHolderPortability.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/OffHeapValueHolderPortability.java deleted file mode 100644 index 20f1d1c1fa..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/OffHeapValueHolderPortability.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.offheap.portability; - -import org.ehcache.impl.internal.store.BinaryValueHolder; -import org.ehcache.impl.internal.store.offheap.LazyOffHeapValueHolder; -import org.ehcache.impl.internal.store.offheap.OffHeapValueHolder; -import org.ehcache.spi.serialization.Serializer; -import org.terracotta.offheapstore.storage.portability.WriteBackPortability; -import org.terracotta.offheapstore.storage.portability.WriteContext; - -import java.nio.ByteBuffer; - -/** - * OffHeapValueHolderPortability - */ -public class OffHeapValueHolderPortability implements WriteBackPortability> { - - public static final int ACCESS_TIME_OFFSET = 16; - public static final int EXPIRE_TIME_OFFSET = 24; - public static final int HITS_OFFSET = 32; - - // 5 longs: id, access, expire, creation time, hits - private static final int FIELDS_OVERHEAD = 40; - - private final Serializer serializer; - - public OffHeapValueHolderPortability(Serializer serializer) { - this.serializer = serializer; - } - - @Override - public ByteBuffer encode(OffHeapValueHolder valueHolder) { - ByteBuffer serialized; - if (valueHolder instanceof BinaryValueHolder && ((BinaryValueHolder)valueHolder).isBinaryValueAvailable()) { - serialized = ((BinaryValueHolder)valueHolder).getBinaryValue(); - } else { - serialized = serializer.serialize(valueHolder.value()); - } - ByteBuffer byteBuffer = ByteBuffer.allocate(serialized.remaining() + FIELDS_OVERHEAD); - byteBuffer.putLong(valueHolder.getId()); - byteBuffer.putLong(valueHolder.creationTime(OffHeapValueHolder.TIME_UNIT)); - byteBuffer.putLong(valueHolder.lastAccessTime(OffHeapValueHolder.TIME_UNIT)); - byteBuffer.putLong(valueHolder.expirationTime(OffHeapValueHolder.TIME_UNIT)); - byteBuffer.putLong(valueHolder.hits()); - byteBuffer.put(serialized); - byteBuffer.flip(); - return byteBuffer; - } - - @Override - public OffHeapValueHolder decode(ByteBuffer byteBuffer) { - return decode(byteBuffer, null); - } - - @Override - public boolean equals(Object o, ByteBuffer byteBuffer) { - return o.equals(decode(byteBuffer)); - } - - @Override - public OffHeapValueHolder decode(ByteBuffer byteBuffer, WriteContext writeContext) { - long id = byteBuffer.getLong(); - long creationTime = byteBuffer.getLong(); - long lastAccessTime = byteBuffer.getLong(); - long expireTime = byteBuffer.getLong(); - long hits = byteBuffer.getLong(); - return new LazyOffHeapValueHolder<>(id, byteBuffer.slice(), serializer, - creationTime, expireTime, lastAccessTime, hits, writeContext); - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java deleted file mode 100644 index 0715da14d9..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java +++ /dev/null @@ -1,521 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.impl.internal.store.tiering; - -import org.ehcache.Cache; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.ResourceType; -import org.ehcache.core.CacheConfigurationChangeListener; -import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.core.spi.store.events.StoreEventSource; -import org.ehcache.core.spi.store.tiering.AuthoritativeTier; -import org.ehcache.core.spi.store.tiering.CachingTier; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.spi.service.ServiceDependencies; -import org.ehcache.spi.service.ServiceProvider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.terracotta.statistics.StatisticsManager; - -import java.util.AbstractMap; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; - -/** - * A {@link Store} implementation supporting a tiered caching model. - */ -public class TieredStore implements Store { - - private static final Logger LOG = LoggerFactory.getLogger(TieredStore.class); - - private final AtomicReference> cachingTierRef; - private final CachingTier noopCachingTier; - private final CachingTier realCachingTier; - private final AuthoritativeTier authoritativeTier; - - public TieredStore(CachingTier cachingTier, AuthoritativeTier authoritativeTier) { - this.cachingTierRef = new AtomicReference<>(cachingTier); - this.authoritativeTier = authoritativeTier; - this.realCachingTier = cachingTier; - this.noopCachingTier = new NoopCachingTier<>(authoritativeTier); - - - this.realCachingTier.setInvalidationListener(TieredStore.this.authoritativeTier::flush); - - this.authoritativeTier.setInvalidationValve(new AuthoritativeTier.InvalidationValve() { - @Override - public void invalidateAll() throws StoreAccessException { - invalidateAllInternal(); - } - - @Override - public void invalidateAllWithHash(long hash) throws StoreAccessException { - cachingTier().invalidateAllWithHash(hash); - } - }); - - StatisticsManager.associate(cachingTier).withParent(this); - StatisticsManager.associate(authoritativeTier).withParent(this); - } - - - @Override - public ValueHolder get(final K key) throws StoreAccessException { - try { - return cachingTier().getOrComputeIfAbsent(key, keyParam -> { - try { - return authoritativeTier.getAndFault(keyParam); - } catch (StoreAccessException cae) { - throw new ComputationException(cae); - } - }); - } catch (ComputationException ce) { - throw ce.getStoreAccessException(); - } - } - - static class ComputationException extends RuntimeException { - - public ComputationException(StoreAccessException cause) { - super(cause); - } - - public StoreAccessException getStoreAccessException() { - return (StoreAccessException) getCause(); - } - - @Override - public synchronized Throwable fillInStackTrace() { - return this; - } - } - - @Override - public boolean containsKey(K key) throws StoreAccessException { - return authoritativeTier.containsKey(key); - } - - @Override - public PutStatus put(final K key, final V value) throws StoreAccessException { - try { - return authoritativeTier.put(key, value); - } finally { - cachingTier().invalidate(key); - } - } - - @Override - public ValueHolder putIfAbsent(K key, V value) throws StoreAccessException { - try { - return authoritativeTier.putIfAbsent(key, value); - } finally { - cachingTier().invalidate(key); - } - } - - @Override - public boolean remove(K key) throws StoreAccessException { - try { - return authoritativeTier.remove(key); - } finally { - cachingTier().invalidate(key); - } - } - - @Override - public RemoveStatus remove(K key, V value) throws StoreAccessException { - try { - return authoritativeTier.remove(key, value); - } finally { - cachingTier().invalidate(key); - } - } - - @Override - public ValueHolder replace(K key, V value) throws StoreAccessException { - try { - return authoritativeTier.replace(key, value); - } finally { - cachingTier().invalidate(key); - } - } - - @Override - public ReplaceStatus replace(K key, V oldValue, V newValue) throws StoreAccessException { - try { - return authoritativeTier.replace(key, oldValue, newValue); - } finally { - cachingTier().invalidate(key); - } - } - - @Override - public void clear() throws StoreAccessException { - swapCachingTiers(); - try { - authoritativeTier.clear(); - } finally { - try { - realCachingTier.clear(); - } finally { - swapBackCachingTiers(); - } - } - } - - private void invalidateAllInternal() throws StoreAccessException { - swapCachingTiers(); - try { - realCachingTier.invalidateAll(); - } finally { - swapBackCachingTiers(); - } - } - - private void swapCachingTiers() { - boolean interrupted = false; - while(!cachingTierRef.compareAndSet(realCachingTier, noopCachingTier)) { - synchronized (noopCachingTier) { - if(cachingTierRef.get() == noopCachingTier) { - try { - noopCachingTier.wait(); - } catch (InterruptedException e) { - interrupted = true; - } - } - } - } - if(interrupted) { - Thread.currentThread().interrupt(); - } - } - - private void swapBackCachingTiers() { - if(!cachingTierRef.compareAndSet(noopCachingTier, realCachingTier)) { - throw new AssertionError("Something bad happened"); - } - synchronized (noopCachingTier) { - noopCachingTier.notify(); - } - } - - @Override - public StoreEventSource getStoreEventSource() { - return authoritativeTier.getStoreEventSource(); - } - - @Override - public Iterator>> iterator() { - return authoritativeTier.iterator(); - } - - @Override - public ValueHolder compute(final K key, final BiFunction mappingFunction) throws StoreAccessException { - try { - return authoritativeTier.compute(key, mappingFunction); - } finally { - cachingTier().invalidate(key); - } - } - - @Override - public ValueHolder compute(final K key, final BiFunction mappingFunction, final Supplier replaceEqual) throws StoreAccessException { - try { - return authoritativeTier.compute(key, mappingFunction, replaceEqual); - } finally { - cachingTier().invalidate(key); - } - } - - public ValueHolder computeIfAbsent(final K key, final Function mappingFunction) throws StoreAccessException { - try { - return cachingTier().getOrComputeIfAbsent(key, keyParam -> { - try { - return authoritativeTier.computeIfAbsentAndFault(keyParam, mappingFunction); - } catch (StoreAccessException cae) { - throw new ComputationException(cae); - } - }); - } catch (ComputationException ce) { - throw ce.getStoreAccessException(); - } - } - - @Override - public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction) throws StoreAccessException { - try { - return authoritativeTier.bulkCompute(keys, remappingFunction); - } finally { - for (K key : keys) { - cachingTier().invalidate(key); - } - } - } - - @Override - public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction, Supplier replaceEqual) throws StoreAccessException { - try { - return authoritativeTier.bulkCompute(keys, remappingFunction, replaceEqual); - } finally { - for (K key : keys) { - cachingTier().invalidate(key); - } - } - } - - @Override - public Map> bulkComputeIfAbsent(Set keys, Function, Iterable>> mappingFunction) throws StoreAccessException { - try { - return authoritativeTier.bulkComputeIfAbsent(keys, mappingFunction); - } finally { - for (K key : keys) { - cachingTier().invalidate(key); - } - } - } - - @Override - public List getConfigurationChangeListeners() { - List configurationChangeListenerList - = new ArrayList<>(); - configurationChangeListenerList.addAll(realCachingTier.getConfigurationChangeListeners()); - configurationChangeListenerList.addAll(authoritativeTier.getConfigurationChangeListeners()); - return configurationChangeListenerList; - } - - private CachingTier cachingTier() { - return cachingTierRef.get(); - } - - @ServiceDependencies({CachingTier.Provider.class, AuthoritativeTier.Provider.class}) - public static class Provider implements Store.Provider { - - private volatile ServiceProvider serviceProvider; - private final ConcurrentMap, Map.Entry> providersMap = new ConcurrentWeakIdentityHashMap<>(); - - @Override - public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { - if (resourceTypes.size() == 1) { - return 0; - } - ResourceType authorityResource = getAuthorityResource(resourceTypes); - int authorityRank = 0; - Collection authorityProviders = serviceProvider.getServicesOfType(AuthoritativeTier.Provider.class); - for (AuthoritativeTier.Provider authorityProvider : authorityProviders) { - int newRank = authorityProvider.rankAuthority(authorityResource, serviceConfigs); - if (newRank > authorityRank) { - authorityRank = newRank; - } - } - if (authorityRank == 0) { - return 0; - } - Set> cachingResources = new HashSet<>(); - cachingResources.addAll(resourceTypes); - cachingResources.remove(authorityResource); - int cachingTierRank = 0; - Collection cachingTierProviders = serviceProvider.getServicesOfType(CachingTier.Provider.class); - for (CachingTier.Provider cachingTierProvider : cachingTierProviders) { - int newRank = cachingTierProvider.rankCachingTier(cachingResources, serviceConfigs); - if (newRank > cachingTierRank) { - cachingTierRank = newRank; - } - } - if (cachingTierRank == 0) { - return 0; - } - return authorityRank + cachingTierRank; - } - - private ResourceType getAuthorityResource(Set> resourceTypes) { - ResourceType authorityResource = null; - for (ResourceType resourceType : resourceTypes) { - if (authorityResource == null || authorityResource.getTierHeight() > resourceType.getTierHeight()) { - authorityResource = resourceType; - } - } - return authorityResource; - } - - @Override - public Store createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - final List> enhancedServiceConfigs = new ArrayList<>(Arrays.asList(serviceConfigs)); - - final ResourcePools resourcePools = storeConfig.getResourcePools(); - if (rank(resourcePools.getResourceTypeSet(), enhancedServiceConfigs) == 0) { - throw new IllegalArgumentException("TieredStore.Provider does not support configured resource types " - + resourcePools.getResourceTypeSet()); - } - - ResourceType authorityResource = getAuthorityResource(resourcePools.getResourceTypeSet()); - AuthoritativeTier.Provider authoritativeTierProvider = getAuthoritativeTierProvider(authorityResource, enhancedServiceConfigs); - - Set> cachingResources = new HashSet<>(); - cachingResources.addAll(resourcePools.getResourceTypeSet()); - cachingResources.remove(authorityResource); - - CachingTier.Provider cachingTierProvider = getCachingTierProvider(cachingResources, enhancedServiceConfigs); - - final ServiceConfiguration[] configurations = - enhancedServiceConfigs.toArray(new ServiceConfiguration[enhancedServiceConfigs.size()]); - CachingTier cachingTier = cachingTierProvider.createCachingTier(storeConfig, configurations); - AuthoritativeTier authoritativeTier = authoritativeTierProvider.createAuthoritativeTier(storeConfig, configurations); - - TieredStore store = new TieredStore<>(cachingTier, authoritativeTier); - registerStore(store, cachingTierProvider, authoritativeTierProvider); - return store; - } - - private CachingTier.Provider getCachingTierProvider(Set> cachingResources, List> enhancedServiceConfigs) { - CachingTier.Provider cachingTierProvider = null; - Collection cachingTierProviders = serviceProvider.getServicesOfType(CachingTier.Provider.class); - for (CachingTier.Provider provider : cachingTierProviders) { - if (provider.rankCachingTier(cachingResources, enhancedServiceConfigs) != 0) { - cachingTierProvider = provider; - break; - } - } - if (cachingTierProvider == null) { - throw new AssertionError("No CachingTier.Provider found although ranking found one for " + cachingResources); - } - return cachingTierProvider; - } - - private AuthoritativeTier.Provider getAuthoritativeTierProvider(ResourceType authorityResource, List> enhancedServiceConfigs) { - AuthoritativeTier.Provider authoritativeTierProvider = null; - Collection authorityProviders = serviceProvider.getServicesOfType(AuthoritativeTier.Provider.class); - for (AuthoritativeTier.Provider provider : authorityProviders) { - if (provider.rankAuthority(authorityResource, enhancedServiceConfigs) != 0) { - authoritativeTierProvider = provider; - break; - } - } - if (authoritativeTierProvider == null) { - throw new AssertionError("No AuthoritativeTier.Provider found although ranking found one for " + authorityResource); - } - return authoritativeTierProvider; - } - - void registerStore(final TieredStore store, final CachingTier.Provider cachingTierProvider, final AuthoritativeTier.Provider authoritativeTierProvider) { - if(providersMap.putIfAbsent(store, new AbstractMap.SimpleEntry<>(cachingTierProvider, authoritativeTierProvider)) != null) { - throw new IllegalStateException("Instance of the Store already registered!"); - } - } - - @Override - public void releaseStore(Store resource) { - Map.Entry entry = providersMap.get(resource); - if (entry == null) { - throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); - } - TieredStore tieredStore = (TieredStore) resource; - // Stop propagating invalidation to higher tier since they will be released before the authoritative tier - // and thus not be in a state when they can invalidate anymore - tieredStore.authoritativeTier.setInvalidationValve(new AuthoritativeTier.InvalidationValve() { - @Override - public void invalidateAll() throws StoreAccessException { - } - - @Override - public void invalidateAllWithHash(long hash) throws StoreAccessException { - } - }); - entry.getKey().releaseCachingTier(tieredStore.realCachingTier); - entry.getValue().releaseAuthoritativeTier(tieredStore.authoritativeTier); - } - - @Override - public void initStore(Store resource) { - Map.Entry entry = providersMap.get(resource); - if (entry == null) { - throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); - } - TieredStore tieredStore = (TieredStore) resource; - entry.getKey().initCachingTier(tieredStore.realCachingTier); - entry.getValue().initAuthoritativeTier(tieredStore.authoritativeTier); - } - - @Override - public void start(ServiceProvider serviceProvider) { - this.serviceProvider = serviceProvider; - } - - @Override - public void stop() { - this.serviceProvider = null; - providersMap.clear(); - } - } - - private static class NoopCachingTier implements CachingTier { - - private final AuthoritativeTier authoritativeTier; - - public NoopCachingTier(final AuthoritativeTier authoritativeTier) { - this.authoritativeTier = authoritativeTier; - } - - @Override - public ValueHolder getOrComputeIfAbsent(final K key, final Function> source) throws StoreAccessException { - final ValueHolder apply = source.apply(key); - authoritativeTier.flush(key, apply); - return apply; - } - - @Override - public void invalidate(final K key) throws StoreAccessException { - // noop - } - - @Override - public void invalidateAll() { - // noop - } - - @Override - public void clear() throws StoreAccessException { - // noop - } - - @Override - public void setInvalidationListener(final InvalidationListener invalidationListener) { - // noop - } - - @Override - public void invalidateAllWithHash(long hash) throws StoreAccessException { - // noop - } - - @Override - public List getConfigurationChangeListeners() { - return null; - } - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/util/ByteBufferInputStream.java b/impl/src/main/java/org/ehcache/impl/internal/util/ByteBufferInputStream.java deleted file mode 100644 index 369e57f86e..0000000000 --- a/impl/src/main/java/org/ehcache/impl/internal/util/ByteBufferInputStream.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.impl.internal.util; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.ByteBuffer; - -import static java.lang.Math.max; -import static java.lang.Math.min; - -public class ByteBufferInputStream extends InputStream { - - private final ByteBuffer buffer; - - public ByteBufferInputStream(ByteBuffer buffer) { - this.buffer = buffer.slice(); - } - - @Override - public int read() throws IOException { - if (buffer.hasRemaining()) { - return 0xff & buffer.get(); - } else { - return -1; - } - } - - @Override - public int read(byte b[], int off, int len) { - len = min(len, buffer.remaining()); - buffer.get(b, off, len); - return len; - } - - @Override - public long skip(long n) { - n = min(buffer.remaining(), max(n, 0)); - buffer.position((int) (buffer.position() + n)); - return n; - } - - @Override - public synchronized int available() { - return buffer.remaining(); - } -} diff --git a/impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java b/impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java deleted file mode 100644 index 33ce48bbb7..0000000000 --- a/impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java +++ /dev/null @@ -1,209 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.persistence; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - -import org.ehcache.CachePersistenceException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.nio.charset.Charset; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.ArrayDeque; -import java.util.Deque; -import java.util.HashSet; -import java.util.Locale; -import java.util.Set; - -import static java.lang.Integer.toHexString; -import static java.nio.charset.Charset.forName; - -/** - * A bunch of utility functions, mainly used by {@link DefaultLocalPersistenceService} and - * {@link FileBasedStateRepository} within this class. - */ -final class FileUtils { - private static final Logger LOGGER = LoggerFactory.getLogger(FileUtils.class); - private static final Charset UTF8 = forName("UTF8"); - private static final int DEL = 0x7F; - private static final char ESCAPE = '%'; - - private static final Set ILLEGALS = new HashSet<>(); - static { - ILLEGALS.add('/'); - ILLEGALS.add('\\'); - ILLEGALS.add('<'); - ILLEGALS.add('>'); - ILLEGALS.add(':'); - ILLEGALS.add('"'); - ILLEGALS.add('|'); - ILLEGALS.add('?'); - ILLEGALS.add('*'); - ILLEGALS.add('.'); - } - - static void createLocationIfRequiredAndVerify(final File rootDirectory) { - if(!rootDirectory.exists()) { - if(!rootDirectory.mkdirs()) { - throw new IllegalArgumentException("Directory couldn't be created: " + rootDirectory.getAbsolutePath()); - } - } else if(!rootDirectory.isDirectory()) { - throw new IllegalArgumentException("Location is not a directory: " + rootDirectory.getAbsolutePath()); - } - - if(!rootDirectory.canWrite()) { - throw new IllegalArgumentException("Location isn't writable: " + rootDirectory.getAbsolutePath()); - } - } - - static File createSubDirectory(File mainDirectory, String name) throws CachePersistenceException { - validateName(name); - File subDirectory = new File(mainDirectory, name); - create(subDirectory); - return subDirectory; - } - - static void validateName(String name) { - if (!name.matches("[a-zA-Z0-9\\-_]+")) { - throw new IllegalArgumentException("Name is invalid for persistence context: " + name); - } - } - - static void create(File directory) throws CachePersistenceException { - if (directory.isDirectory()) { - LOGGER.debug("Reusing {}", directory.getAbsolutePath()); - } else if (directory.mkdir()) { - LOGGER.debug("Created {}", directory.getAbsolutePath()); - } else if (directory.isDirectory()) { - // if create directory fails, check once more if it is due to concurrent creation. - LOGGER.debug("Reusing {}", directory.getAbsolutePath()); - } else { - throw new CachePersistenceException("Unable to create or reuse directory: " + directory.getAbsolutePath()); - } - } - - static boolean recursiveDeleteDirectoryContent(File file) { - File[] contents = file.listFiles(); - if (contents == null) { - throw new IllegalArgumentException("File " + file.getAbsolutePath() + " is not a directory"); - } else { - boolean deleteSuccessful = true; - for (File f : contents) { - deleteSuccessful &= tryRecursiveDelete(f); - } - return deleteSuccessful; - } - } - - private static boolean recursiveDelete(File file) { - Deque toDelete = new ArrayDeque<>(); - toDelete.push(file); - while (!toDelete.isEmpty()) { - File target = toDelete.pop(); - File[] contents = target.listFiles(); - if (contents == null || contents.length == 0) { - if (target.exists() && !target.delete()) { - return false; - } - } else { - toDelete.push(target); - for (File f : contents) { - toDelete.push(f); - } - } - } - return true; - } - - @SuppressFBWarnings("DM_GC") - static boolean tryRecursiveDelete(File file) { - boolean interrupted = false; - try { - for (int i = 0; i < 5; i++) { - if (recursiveDelete(file) || !isWindows()) { - return true; - } else { - System.gc(); - System.runFinalization(); - - try { - Thread.sleep(50); - } catch (InterruptedException e) { - interrupted = true; - } - } - } - } finally { - if (interrupted) { - Thread.currentThread().interrupt(); - } - } - return false; - } - - private static boolean isWindows() { - return System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows"); - } - - /** - * sanitize a name for valid file or directory name - * - * @param name the name to sanitize - * @return sanitized version of name - */ - static String safeIdentifier(String name) { - return safeIdentifier(name, true); - } - - static String safeIdentifier(String name, boolean withSha1) { - int len = name.length(); - StringBuilder sb = new StringBuilder(len); - for (int i = 0; i < len; i++) { - char c = name.charAt(i); - if (c <= ' ' || c >= DEL || ILLEGALS.contains(c) || c == ESCAPE) { - sb.append(ESCAPE); - sb.append(String.format("%04x", (int) c)); - } else { - sb.append(c); - } - } - if (withSha1) { - sb.append("_").append(sha1(name)); - } - return sb.toString(); - } - - private static String sha1(String input) { - StringBuilder sb = new StringBuilder(); - for (byte b : getSha1Digest().digest(input.getBytes(UTF8))) { - sb.append(toHexString((b & 0xf0) >>> 4)); - sb.append(toHexString((b & 0xf))); - } - return sb.toString(); - } - - private static MessageDigest getSha1Digest() { - try { - return MessageDigest.getInstance("SHA-1"); - } catch (NoSuchAlgorithmException e) { - throw new AssertionError("All JDKs must have SHA-1"); - } - } -} diff --git a/impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory b/impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory deleted file mode 100644 index 6eb820c274..0000000000 --- a/impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory +++ /dev/null @@ -1,18 +0,0 @@ -org.ehcache.impl.internal.store.heap.OnHeapStoreProviderFactory -org.ehcache.impl.internal.store.offheap.OffHeapStoreProviderFactory -org.ehcache.impl.internal.store.disk.OffHeapDiskStoreProviderFactory -org.ehcache.impl.internal.store.tiering.TieredStoreProviderFactory -org.ehcache.impl.internal.store.tiering.CompoundCachingTierProviderFactory - -org.ehcache.impl.internal.TimeSourceServiceFactory -org.ehcache.impl.internal.spi.serialization.DefaultSerializationProviderFactory -org.ehcache.impl.internal.spi.loaderwriter.DefaultCacheLoaderWriterProviderFactory -org.ehcache.impl.internal.spi.event.DefaultCacheEventListenerProviderFactory -org.ehcache.impl.internal.executor.DefaultExecutionServiceFactory -org.ehcache.impl.internal.persistence.DefaultLocalPersistenceServiceFactory -org.ehcache.impl.internal.persistence.DefaultDiskResourceServiceFactory -org.ehcache.impl.internal.loaderwriter.writebehind.WriteBehindProviderFactory -org.ehcache.impl.internal.events.CacheEventNotificationListenerServiceProviderFactory -org.ehcache.impl.internal.spi.copy.DefaultCopyProviderFactory -org.ehcache.impl.internal.sizeof.DefaultSizeOfEngineProviderFactory -org.ehcache.impl.internal.statistics.DefaultStatisticsServiceFactory diff --git a/impl/src/test/java/org/ehcache/EhcacheRuntimeConfigurationTest.java b/impl/src/test/java/org/ehcache/EhcacheRuntimeConfigurationTest.java deleted file mode 100644 index ab4eaf5f03..0000000000 --- a/impl/src/test/java/org/ehcache/EhcacheRuntimeConfigurationTest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache; - -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.ResourceType; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; -import org.ehcache.config.units.EntryUnit; -import org.junit.Test; - -import java.io.File; -import org.ehcache.config.units.MemoryUnit; - -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; - -/** - * @author rism - */ -public class EhcacheRuntimeConfigurationTest { - - @Test - public void testUpdateResources() { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10L, EntryUnit.ENTRIES).disk(10, MemoryUnit.MB).build()).build(); - - final CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .with(new CacheManagerPersistenceConfiguration(new File(System.getProperty("java.io.tmpdir") + "/myData"))) - .withCache("cache", cacheConfiguration).build(true); - - Cache cache = cacheManager.getCache("cache", Long.class, String.class); - - ResourcePoolsBuilder poolsBuilder = ResourcePoolsBuilder.newResourcePoolsBuilder(); - poolsBuilder = poolsBuilder.heap(20L, EntryUnit.ENTRIES); - ResourcePools pools = poolsBuilder.build(); - cache.getRuntimeConfiguration().updateResourcePools(pools); - assertThat(cache.getRuntimeConfiguration().getResourcePools() - .getPoolForResource(ResourceType.Core.HEAP).getSize(), is(20L)); - pools = poolsBuilder.build(); - cache.getRuntimeConfiguration().updateResourcePools(pools); - assertThat(cache.getRuntimeConfiguration().getResourcePools() - .getPoolForResource(ResourceType.Core.HEAP).getSize(), is(20L)); - cacheManager.close(); - } - - @Test - public void testUpdateFailureDoesNotUpdate() { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10L, EntryUnit.ENTRIES).build()).build(); - - final CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("cache", cacheConfiguration).build(true); - - Cache cache = cacheManager.getCache("cache", Long.class, String.class); - - ResourcePoolsBuilder poolsBuilder = ResourcePoolsBuilder.newResourcePoolsBuilder(); - poolsBuilder = poolsBuilder.heap(20L, EntryUnit.ENTRIES).disk(10, MemoryUnit.MB); - ResourcePools pools = poolsBuilder.build(); - try { - cache.getRuntimeConfiguration().updateResourcePools(pools); - } catch (IllegalArgumentException iae) { -// expected - assertThat(iae.getMessage(), is("Pools to be updated cannot contain previously undefined resources pools")); - } - assertThat(cache.getRuntimeConfiguration().getResourcePools() - .getPoolForResource(ResourceType.Core.HEAP).getSize(), is(10L)); - cacheManager.close(); - } -} diff --git a/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java b/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java deleted file mode 100644 index a5843409fc..0000000000 --- a/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.config.builders; - -import org.ehcache.config.*; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.spi.service.ServiceUtils; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.config.copy.DefaultCopierConfiguration; -import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; -import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; -import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; -import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.spi.serialization.SerializerException; -import org.ehcache.spi.service.ServiceConfiguration; -import org.hamcrest.Matcher; -import org.hamcrest.Matchers; -import org.hamcrest.core.IsSame; -import org.junit.Test; - -import java.nio.ByteBuffer; -import java.util.Map; - -import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; - -public class CacheConfigurationBuilderTest { - - @Test - public void testEvictionAdvisor() throws Exception { - EvictionAdvisor evictionAdvisor = (key, value) -> false; - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) - .withEvictionAdvisor(evictionAdvisor) - .build(); - - @SuppressWarnings("unchecked") - Matcher> evictionAdvisorMatcher = (Matcher) sameInstance(cacheConfiguration - .getEvictionAdvisor()); - assertThat(evictionAdvisor, evictionAdvisorMatcher); - } - - @Test - public void testLoaderWriter() throws Exception { - CacheLoaderWriter loaderWriter = new CacheLoaderWriter() { - @Override - public Object load(Object key) throws Exception { - return null; - } - - @Override - public Map loadAll(Iterable keys) throws Exception { - return null; - } - - @Override - public void write(Object key, Object value) throws Exception { - - } - - @Override - public void writeAll(Iterable iterable) throws BulkCacheWritingException, Exception { - - } - - @Override - public void delete(Object key) throws Exception { - - } - - @Override - public void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception { - - } - }; - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) - .withLoaderWriter(loaderWriter) - .build(); - - DefaultCacheLoaderWriterConfiguration cacheLoaderWriterConfiguration = ServiceUtils.findSingletonAmongst(DefaultCacheLoaderWriterConfiguration.class, cacheConfiguration.getServiceConfigurations()); - Object instance = ((ClassInstanceConfiguration) cacheLoaderWriterConfiguration).getInstance(); - assertThat(instance, Matchers.sameInstance(loaderWriter)); - } - - @Test - public void testKeySerializer() throws Exception { - Serializer keySerializer = new Serializer() { - @Override - public ByteBuffer serialize(Object object) throws SerializerException { - return null; - } - - @Override - public Object read(ByteBuffer binary) throws ClassNotFoundException, SerializerException { - return null; - } - - @Override - public boolean equals(Object object, ByteBuffer binary) throws ClassNotFoundException, SerializerException { - return false; - } - }; - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) - .withKeySerializer(keySerializer) - .build(); - - - DefaultSerializerConfiguration serializerConfiguration = ServiceUtils.findSingletonAmongst(DefaultSerializerConfiguration.class, cacheConfiguration.getServiceConfigurations()); - assertThat(serializerConfiguration.getType(), is(DefaultSerializerConfiguration.Type.KEY)); - Object instance = serializerConfiguration.getInstance(); - assertThat(instance, Matchers.sameInstance(keySerializer)); - } - - @Test - public void testValueSerializer() throws Exception { - Serializer valueSerializer = new Serializer() { - @Override - public ByteBuffer serialize(Object object) throws SerializerException { - return null; - } - - @Override - public Object read(ByteBuffer binary) throws ClassNotFoundException, SerializerException { - return null; - } - - @Override - public boolean equals(Object object, ByteBuffer binary) throws ClassNotFoundException, SerializerException { - return false; - } - }; - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) - .withValueSerializer(valueSerializer) - .build(); - - - DefaultSerializerConfiguration serializerConfiguration = ServiceUtils.findSingletonAmongst(DefaultSerializerConfiguration.class, cacheConfiguration.getServiceConfigurations()); - assertThat(serializerConfiguration.getType(), is(DefaultSerializerConfiguration.Type.VALUE)); - Object instance = ((ClassInstanceConfiguration) serializerConfiguration).getInstance(); - assertThat(instance, Matchers.sameInstance(valueSerializer)); - } - - @Test - public void testKeyCopier() throws Exception { - Copier keyCopier = new Copier() { - @Override - public Long copyForRead(Object obj) { - return null; - } - - @Override - public Long copyForWrite(Object obj) { - return null; - } - }; - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) - .withKeyCopier(keyCopier) - .build(); - - - DefaultCopierConfiguration copierConfiguration = ServiceUtils.findSingletonAmongst(DefaultCopierConfiguration.class, cacheConfiguration.getServiceConfigurations()); - assertThat(copierConfiguration.getType(), is(DefaultCopierConfiguration.Type.KEY)); - Object instance = copierConfiguration.getInstance(); - assertThat(instance, Matchers.sameInstance(keyCopier)); - } - - @Test - public void testValueCopier() throws Exception { - Copier valueCopier = new Copier() { - @Override - public Long copyForRead(Object obj) { - return null; - } - - @Override - public Long copyForWrite(Object obj) { - return null; - } - }; - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Object.class, Object.class, heap(10)) - .withValueCopier(valueCopier) - .build(); - - - DefaultCopierConfiguration copierConfiguration = ServiceUtils.findSingletonAmongst(DefaultCopierConfiguration.class, cacheConfiguration.getServiceConfigurations()); - assertThat(copierConfiguration.getType(), is(DefaultCopierConfiguration.Type.VALUE)); - Object instance = copierConfiguration.getInstance(); - assertThat(instance, Matchers.sameInstance(valueCopier)); - } - - @Test - public void testNothing() { - final CacheConfigurationBuilder builder = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, CharSequence.class, heap(10)); - - final Expiry expiry = Expirations.timeToIdleExpiration(Duration.INFINITE); - - builder - .withEvictionAdvisor((key, value) -> value.charAt(0) == 'A') - .withExpiry(expiry) - .build(); - } - - @Test - public void testOffheapGetsAddedToCacheConfiguration() { - CacheConfigurationBuilder builder = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, CharSequence.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES) - .offheap(10, MemoryUnit.MB)); - - final Expiry expiry = Expirations.timeToIdleExpiration(Duration.INFINITE); - - CacheConfiguration config = builder - .withEvictionAdvisor((key, value) -> value.charAt(0) == 'A') - .withExpiry(expiry) - .build(); - assertThat(config.getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getType(), Matchers.is(ResourceType.Core.OFFHEAP)); - assertThat(config.getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getUnit(), Matchers.is(MemoryUnit.MB)); - } - - @Test - public void testSizeOf() { - CacheConfigurationBuilder builder = CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, heap(10)); - - builder = builder.withSizeOfMaxObjectSize(10, MemoryUnit.B).withSizeOfMaxObjectGraph(100); - CacheConfiguration configuration = builder.build(); - - DefaultSizeOfEngineConfiguration sizeOfEngineConfiguration = ServiceUtils.findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, configuration.getServiceConfigurations()); - assertThat(sizeOfEngineConfiguration, notNullValue()); - assertEquals(sizeOfEngineConfiguration.getMaxObjectSize(), 10); - assertEquals(sizeOfEngineConfiguration.getUnit(), MemoryUnit.B); - assertEquals(sizeOfEngineConfiguration.getMaxObjectGraphSize(), 100); - - builder = builder.withSizeOfMaxObjectGraph(1000); - configuration = builder.build(); - - sizeOfEngineConfiguration = ServiceUtils.findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, configuration.getServiceConfigurations()); - assertEquals(sizeOfEngineConfiguration.getMaxObjectGraphSize(), 1000); - - } - - @Test - public void testCopyingOfExistingConfiguration() { - Class keyClass = Integer.class; - Class valueClass = String.class; - ClassLoader loader = mock(ClassLoader.class); - @SuppressWarnings("unchecked") - EvictionAdvisor eviction = mock(EvictionAdvisor.class); - @SuppressWarnings("unchecked") - Expiry expiry = mock(Expiry.class); - ServiceConfiguration service = mock(ServiceConfiguration.class); - - CacheConfiguration configuration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Integer.class, String.class, heap(10)) - .withClassLoader(loader) - .withEvictionAdvisor(eviction) - .withExpiry(expiry) - .add(service) - .build(); - - CacheConfiguration copy = CacheConfigurationBuilder.newCacheConfigurationBuilder(configuration).build(); - - assertThat(copy.getKeyType(), equalTo(keyClass)); - assertThat(copy.getValueType(), equalTo(valueClass)); - assertThat(copy.getClassLoader(), equalTo(loader)); - - assertThat(copy.getEvictionAdvisor(), IsSame.>sameInstance(eviction)); - assertThat(copy.getExpiry(), IsSame.>sameInstance(expiry)); - assertThat(copy.getServiceConfigurations(), contains(IsSame.>sameInstance(service))); - } -} diff --git a/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java b/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java deleted file mode 100644 index a735f7ac07..0000000000 --- a/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.config.builders; - -import org.ehcache.CacheManager; -import org.ehcache.PersistentCacheManager; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; -import org.ehcache.impl.copy.IdentityCopier; -import org.ehcache.impl.copy.SerializingCopier; -import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.impl.serialization.JavaSerializer; -import org.ehcache.spi.serialization.Serializer; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.util.concurrent.atomic.AtomicInteger; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; -import static org.mockito.Mockito.mock; - -public class CacheManagerBuilderTest { - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @Test - public void testIsExtensible() { - - final AtomicInteger counter = new AtomicInteger(0); - - final PersistentCacheManager cacheManager = newCacheManagerBuilder().with((CacheManagerConfiguration) other -> { - counter.getAndIncrement(); - return mock(CacheManagerBuilder.class); - }).build(true); - - assertThat(cacheManager).isNull(); - assertThat(counter.get()).isEqualTo(1); - } - - @Test - public void testCanOverrideCopierInConfig() { - @SuppressWarnings("unchecked") - CacheManagerBuilder managerBuilder = newCacheManagerBuilder() - .withCopier(Long.class, (Class) IdentityCopier.class); - assertThat(managerBuilder.withCopier(Long.class, SerializingCopier.asCopierClass())).isNotNull(); - } - - @Test - public void testCanOverrideSerializerConfig() { - @SuppressWarnings("unchecked") - Class> serializer1 = (Class) JavaSerializer.class; - CacheManagerBuilder managerBuilder = newCacheManagerBuilder() - .withSerializer(String.class, serializer1); - @SuppressWarnings("unchecked") - Class> serializer2 = (Class) CompactJavaSerializer.class; - assertThat(managerBuilder.withSerializer(String.class, serializer2)).isNotNull(); - } - - @Test(expected = IllegalArgumentException.class) - public void testDuplicateServiceCreationConfigurationFails() { - newCacheManagerBuilder().using(new DefaultCopyProviderConfiguration()) - .using(new DefaultCopyProviderConfiguration()); - } - - @Test - public void testDuplicateServiceCreationConfigurationOkWhenExplicit() { - assertThat(newCacheManagerBuilder().using(new DefaultCopyProviderConfiguration()) - .replacing(new DefaultCopyProviderConfiguration())).isNotNull(); - } - - @Test - public void testShouldNotBeAllowedToRegisterTwoCachesWithSameAlias() { - String cacheAlias = "cacheAliasSameName"; - - CacheConfiguration cacheConfig = CacheConfigurationBuilder - .newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10)) - .build(); - - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("Cache alias 'cacheAliasSameName' already exists"); - - CacheManagerBuilder.newCacheManagerBuilder() - .withCache(cacheAlias, cacheConfig) - .withCache(cacheAlias, cacheConfig); - } -} diff --git a/impl/src/test/java/org/ehcache/docs/Ehcache2.java b/impl/src/test/java/org/ehcache/docs/Ehcache2.java deleted file mode 100644 index f64c78c40b..0000000000 --- a/impl/src/test/java/org/ehcache/docs/Ehcache2.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.docs; - -import net.sf.ehcache.CacheManager; -import net.sf.ehcache.Cache; -import net.sf.ehcache.Element; -import net.sf.ehcache.config.CacheConfiguration; -import org.junit.Test; - - -public class Ehcache2 { - - @Test - public void ehcache2Expiry() throws Exception { - // tag::CustomExpiryEhcache2[] - int defaultCacheTTLInSeconds = 20; - - CacheManager cacheManager = initCacheManager(); - CacheConfiguration cacheConfiguration = new CacheConfiguration().name("cache") - .maxEntriesLocalHeap(100) - .timeToLiveSeconds(defaultCacheTTLInSeconds); // <1> - cacheManager.addCache(new Cache(cacheConfiguration)); - - Element element = new Element(10L, "Hello"); - - int ttlInSeconds = getTimeToLiveInSeconds((Long)element.getObjectKey(), (String)element.getObjectValue()); // <2> - - if (ttlInSeconds != defaultCacheTTLInSeconds) { // <3> - element.setTimeToLive(ttlInSeconds); - } - - cacheManager.getCache("cache").put(element); - - System.out.println(cacheManager.getCache("cache").get(10L).getObjectValue()); - - sleep(2100); // <4> - - // Now the returned element should be null, as the mapping is expired. - System.out.println(cacheManager.getCache("cache").get(10L)); - // end::CustomExpiryEhcache2[] - } - - /** - * Returns the expiry in Seconds for the given key/value pair, based on some complex logic. - * @param key Cache Key - * @param value Cache Value - * @return - */ - private int getTimeToLiveInSeconds(Long key, String value) { - // Returns TTL of 10 seconds for keys less than 1000 - if (key < 1000) { - return 2; - } - - // Otherwise return 5 seconds TTL - return 1; - } - - - /** - * Initialize and return the cache manager. - * @return CacheManager - */ - private CacheManager initCacheManager() { - CacheManager cacheManager = new CacheManager(); - return cacheManager; - } - - private void sleep(int millisecondsToSleep) throws Exception { - Thread.sleep(millisecondsToSleep); - } -} diff --git a/impl/src/test/java/org/ehcache/docs/GettingStarted.java b/impl/src/test/java/org/ehcache/docs/GettingStarted.java deleted file mode 100644 index 5e45fd3093..0000000000 --- a/impl/src/test/java/org/ehcache/docs/GettingStarted.java +++ /dev/null @@ -1,375 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.docs; - -import org.ehcache.Cache; -import org.ehcache.CacheManager; -import org.ehcache.PersistentCacheManager; -import org.ehcache.ValueSupplier; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.builders.WriteBehindConfigurationBuilder; -import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.docs.plugs.ListenerObject; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.serialization.JavaSerializer; -import org.ehcache.docs.plugs.OddKeysEvictionAdvisor; -import org.ehcache.docs.plugs.SampleLoaderWriter; -import org.ehcache.event.EventFiring; -import org.ehcache.event.EventOrdering; -import org.ehcache.event.EventType; -import org.ehcache.impl.copy.ReadWriteCopier; -import org.junit.Test; - -import java.io.File; -import java.io.Serializable; -import java.net.URISyntaxException; -import java.util.EnumSet; -import java.util.concurrent.TimeUnit; - -import static java.util.Collections.singletonMap; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - -/** - * Samples to get started with Ehcache 3 - * - * If you add new examples, you should use tags to have them included in the README.adoc - * You need to edit the README.adoc too to add your new content. - * The callouts are also used in docs/user/index.adoc - */ -@SuppressWarnings("unused") -public class GettingStarted { - - @Test - public void cachemanagerExample() { - // tag::cachemanagerExample[] - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() // <1> - .withCache("preConfigured", - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10))) // <2> - .build(); // <3> - cacheManager.init(); // <4> - - Cache preConfigured = - cacheManager.getCache("preConfigured", Long.class, String.class); // <5> - - Cache myCache = cacheManager.createCache("myCache", // <6> - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10))); - - myCache.put(1L, "da one!"); // <7> - String value = myCache.get(1L); // <8> - - cacheManager.removeCache("preConfigured"); // <9> - - cacheManager.close(); // <10> - // end::cachemanagerExample[] - } - - @Test - public void threeTiersCacheManager() throws Exception { - // tag::threeTiersCacheManager[] - PersistentCacheManager persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .with(CacheManagerBuilder.persistence(new File(getStoragePath(), "myData"))) // <1> - .withCache("threeTieredCache", - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10, EntryUnit.ENTRIES) // <2> - .offheap(1, MemoryUnit.MB) // <3> - .disk(20, MemoryUnit.MB, true) // <4> - ) - ).build(true); - - Cache threeTieredCache = persistentCacheManager.getCache("threeTieredCache", Long.class, String.class); - threeTieredCache.put(1L, "stillAvailableAfterRestart"); // <5> - - persistentCacheManager.close(); - // end::threeTiersCacheManager[] - } - - @Test - public void testCacheEventListener() { - // tag::cacheEventListener[] - CacheEventListenerConfigurationBuilder cacheEventListenerConfiguration = CacheEventListenerConfigurationBuilder - .newEventListenerConfiguration(new ListenerObject(), EventType.CREATED, EventType.UPDATED) // <1> - .unordered().asynchronous(); // <2> - - final CacheManager manager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("foo", - CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, ResourcePoolsBuilder.heap(10)) - .add(cacheEventListenerConfiguration) // <3> - ).build(true); - - final Cache cache = manager.getCache("foo", String.class, String.class); - cache.put("Hello", "World"); // <4> - cache.put("Hello", "Everyone"); // <5> - cache.remove("Hello"); // <6> - // end::cacheEventListener[] - - manager.close(); - } - - @Test - public void writeThroughCache() throws ClassNotFoundException { - // tag::writeThroughCache[] - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().build(true); - - Cache writeThroughCache = cacheManager.createCache("writeThroughCache", - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) - .withLoaderWriter(new SampleLoaderWriter<>(singletonMap(41L, "zero"))) // <1> - .build()); - - assertThat(writeThroughCache.get(41L), is("zero")); // <2> - writeThroughCache.put(42L, "one"); // <3> - assertThat(writeThroughCache.get(42L), equalTo("one")); - - cacheManager.close(); - // end::writeThroughCache[] - } - - @Test - public void writeBehindCache() throws ClassNotFoundException { - // tag::writeBehindCache[] - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().build(true); - - Cache writeBehindCache = cacheManager.createCache("writeBehindCache", - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) - .withLoaderWriter(new SampleLoaderWriter<>(singletonMap(41L, "zero"))) // <1> - .add(WriteBehindConfigurationBuilder // <2> - .newBatchedWriteBehindConfiguration(1, TimeUnit.SECONDS, 3)// <3> - .queueSize(3)// <4> - .concurrencyLevel(1) // <5> - .enableCoalescing()) // <6> - .build()); - - assertThat(writeBehindCache.get(41L), is("zero")); - writeBehindCache.put(42L, "one"); - writeBehindCache.put(43L, "two"); - writeBehindCache.put(42L, "This goes for the record"); - assertThat(writeBehindCache.get(42L), equalTo("This goes for the record")); - - cacheManager.close(); - // end::writeBehindCache[] - } - - @Test - public void registerListenerAtRuntime() throws InterruptedException { - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.heap(10L))) - .build(true); - - Cache cache = cacheManager.getCache("cache", Long.class, String.class); - - // tag::registerListenerAtRuntime[] - ListenerObject listener = new ListenerObject(); // <1> - cache.getRuntimeConfiguration().registerCacheEventListener(listener, EventOrdering.ORDERED, - EventFiring.ASYNCHRONOUS, EnumSet.of(EventType.CREATED, EventType.REMOVED)); // <2> - - cache.put(1L, "one"); - cache.put(2L, "two"); - cache.remove(1L); - cache.remove(2L); - - cache.getRuntimeConfiguration().deregisterCacheEventListener(listener); // <3> - - cache.put(1L, "one again"); - cache.remove(1L); - // end::registerListenerAtRuntime[] - - cacheManager.close(); - } - - @Test - public void configuringEventProcessing() { - CacheEventListenerConfigurationBuilder cacheEventListenerConfiguration = CacheEventListenerConfigurationBuilder - .newEventListenerConfiguration(ListenerObject.class, EventType.EVICTED).ordered().synchronous(); - // tag::configuringEventProcessingQueues[] - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.heap(5L)) - .withDispatcherConcurrency(10) // <1> - .withEventListenersThreadPool("listeners-pool") - .build(); - // end::configuringEventProcessingQueues[] - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().withCache("cache", cacheConfiguration) - .build(true); - cacheManager.close(); - } - - @Test - public void cacheEvictionAdvisor() throws Exception { - // tag::cacheEvictionAdvisor[] - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.heap(2L)) // <1> - .withEvictionAdvisor(new OddKeysEvictionAdvisor<>()) // <2> - .build(); - - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("cache", cacheConfiguration) - .build(true); - - Cache cache = cacheManager.getCache("cache", Long.class, String.class); - - // Work with the cache - cache.put(42L, "The Answer!"); - cache.put(41L, "The wrong Answer!"); - cache.put(39L, "The other wrong Answer!"); - - cacheManager.close(); - // end::cacheEvictionAdvisor[] - } - - @Test - public void expiry() throws Exception { - // tag::expiry[] - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.heap(100)) // <1> - .withExpiry(Expirations.timeToLiveExpiration(Duration.of(20, TimeUnit.SECONDS))) // <2> - .build(); - // end::expiry[] - } - - @Test - public void customExpiry() throws Exception { - // tag::customExpiry[] - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.heap(100)) - .withExpiry(new CustomExpiry()) // <1> - .build(); - // end::customExpiry[] - } - - private static class Description { - int id; - String alias; - - Description(Description other) { - this.id = other.id; - this.alias = other.alias; - } - - Description(int id, String alias) { - this.id = id; - this.alias = alias; - } - - @Override - public boolean equals(final Object other) { - if(this == other) return true; - if(other == null || this.getClass() != other.getClass()) return false; - - Description that = (Description)other; - if(id != that.id) return false; - if ((alias == null) ? (alias != null) : !alias.equals(that.alias)) return false; - return true; - } - - @Override - public int hashCode() { - int result = 1; - result = 31 * result + id; - result = 31 * result + (alias == null ? 0 : alias.hashCode()); - return result; - } - } - - private static class Person implements Serializable { - String name; - int age; - - Person(Person other) { - this.name = other.name; - this.age = other.age; - } - - Person(String name, int age) { - this.name = name; - this.age = age; - } - - @Override - public boolean equals(final Object other) { - if(this == other) return true; - if(other == null || this.getClass() != other.getClass()) return false; - - Person that = (Person)other; - if(age != that.age) return false; - if((name == null) ? (that.name != null) : !name.equals(that.name)) return false; - - return true; - } - - @Override - public int hashCode() { - int result = 1; - result = 31 * result + age; - result = 31 * result + (name == null ? 0 : name.hashCode()); - return result; - } - } - - public static class DescriptionCopier extends ReadWriteCopier { - - @Override - public Description copy(final Description obj) { - return new Description(obj); - } - } - - public static class PersonCopier extends ReadWriteCopier { - - @Override - public Person copy(final Person obj) { - return new Person(obj); - } - } - - static class PersonSerializer extends JavaSerializer { - public PersonSerializer() { - super(ClassLoader.getSystemClassLoader()); - } - } - - private String getStoragePath() throws URISyntaxException { - return getClass().getClassLoader().getResource(".").toURI().getPath(); - } - - public static class CustomExpiry implements Expiry { - - @Override - public Duration getExpiryForCreation(Long key, String value) { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public Duration getExpiryForAccess(Long key, ValueSupplier value) { - throw new UnsupportedOperationException("TODO Implement me!"); - } - - @Override - public Duration getExpiryForUpdate(Long key, ValueSupplier oldValue, String newValue) { - throw new UnsupportedOperationException("TODO Implement me!"); - } - } - -} diff --git a/impl/src/test/java/org/ehcache/docs/Tiering.java b/impl/src/test/java/org/ehcache/docs/Tiering.java deleted file mode 100644 index 2d07b4c0f7..0000000000 --- a/impl/src/test/java/org/ehcache/docs/Tiering.java +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.docs; - -import java.io.File; -import java.io.IOException; - -import org.ehcache.Cache; -import org.ehcache.CacheManager; -import org.ehcache.PersistentCacheManager; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.ResourceType; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.docs.plugs.ListenerObject; -import org.ehcache.event.EventType; -import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - -/** - * Tiering - */ -public class Tiering { - - @Rule - public TemporaryFolder folder = new TemporaryFolder(); - - @Test - public void tierSizing() { - // tag::heap[] - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES); // <1> - // or - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10); // <2> - // or - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, MemoryUnit.MB); // <3> - // end::heap[] - // tag::offheap[] - ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB); // <1> - // end::offheap[] - } - - @Test - public void testSingleTier() { - // tag::offheapOnly[] - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, // <1> - ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(2, MemoryUnit.GB)).build(); // <2> - // end::offheapOnly[] - } - - @Test - public void threeTiersCacheManager() throws Exception { - // tag::threeTiersCacheManager[] - PersistentCacheManager persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .with(CacheManagerBuilder.persistence(new File(getStoragePath(), "myData"))) - .withCache("threeTieredCache", - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10, EntryUnit.ENTRIES) - .offheap(1, MemoryUnit.MB) - .disk(20, MemoryUnit.MB, true) - ) - ).build(true); - // end::threeTiersCacheManager[] - - persistentCacheManager.close(); - } - - @Test - public void persistentCacheManager() throws Exception { - // tag::persistentCacheManager[] - PersistentCacheManager persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() // <1> - .with(CacheManagerBuilder.persistence(new File(getStoragePath(), "myData"))) // <2> - .withCache("persistent-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB, true)) // <3> - ) - .build(true); - - persistentCacheManager.close(); - // end::persistentCacheManager[] - } - - @Test - public void diskSegments() throws Exception { - // tag::diskSegments[] - String storagePath = getStoragePath(); - PersistentCacheManager persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .with(CacheManagerBuilder.persistence(new File(storagePath, "myData"))) - .withCache("less-segments", - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB)) - .add(new OffHeapDiskStoreConfiguration(2)) // <1> - ) - .build(true); - - persistentCacheManager.close(); - // end::diskSegments[] - } - - @Test - public void updateResourcesAtRuntime() throws InterruptedException { - ListenerObject listener = new ListenerObject(); - CacheEventListenerConfigurationBuilder cacheEventListenerConfiguration = CacheEventListenerConfigurationBuilder - .newEventListenerConfiguration(listener, EventType.EVICTED).unordered().synchronous(); - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10L, EntryUnit.ENTRIES)) - .add(cacheEventListenerConfiguration) - .build(); - - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().withCache("cache", cacheConfiguration) - .build(true); - - Cache cache = cacheManager.getCache("cache", Long.class, String.class); - for(long i = 0; i < 20; i++ ){ - cache.put(i, "Hello World"); - } - assertThat(listener.evicted(), is(10)); - - cache.clear(); - listener.resetEvictionCount(); - - // tag::updateResourcesAtRuntime[] - ResourcePools pools = ResourcePoolsBuilder.newResourcePoolsBuilder().heap(20L, EntryUnit.ENTRIES).build(); // <1> - cache.getRuntimeConfiguration().updateResourcePools(pools); // <2> - assertThat(cache.getRuntimeConfiguration().getResourcePools() - .getPoolForResource(ResourceType.Core.HEAP).getSize(), is(20L)); - // end::updateResourcesAtRuntime[] - - for(long i = 0; i < 20; i++ ){ - cache.put(i, "Hello World"); - } - assertThat(listener.evicted(), is(0)); - - cacheManager.close(); - } - - @Test - public void testPersistentDiskTier() throws Exception { - // tag::diskPersistent[] - CacheManagerBuilder.newCacheManagerBuilder() - .with(CacheManagerBuilder.persistence(getStoragePath())) // <1> - .withCache("myCache", - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(1, MemoryUnit.GB, true))); // <2> - // end::diskPersistent[]f - } - - @Test - public void testNotShared() { - // tag::notShared[] - ResourcePools pool = ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10).build(); - - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("test-cache1", CacheConfigurationBuilder.newCacheConfigurationBuilder(Integer.class, String.class, pool)) - .withCache("test-cache2", CacheConfigurationBuilder.newCacheConfigurationBuilder(Integer.class, String.class, pool)) - .build(true); - // end::notShared[] - } - - @Test - public void byteSizedTieredCache() { - // tag::byteSizedTieredCache[] - CacheConfiguration usesConfiguredInCacheConfig = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10, MemoryUnit.KB) // <1> - .offheap(10, MemoryUnit.MB)) // <2> - .withSizeOfMaxObjectGraph(1000) - .withSizeOfMaxObjectSize(1000, MemoryUnit.B) // <3> - .build(); - - CacheConfiguration usesDefaultSizeOfEngineConfig = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10, MemoryUnit.KB)) - .build(); - - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withDefaultSizeOfMaxObjectSize(500, MemoryUnit.B) - .withDefaultSizeOfMaxObjectGraph(2000) // <4> - .withCache("usesConfiguredInCache", usesConfiguredInCacheConfig) - .withCache("usesDefaultSizeOfEngine", usesDefaultSizeOfEngineConfig) - .build(true); - // end::byteSizedTieredCache[] - } - - private String getStoragePath() throws IOException { - return folder.newFolder().getAbsolutePath(); - } -} diff --git a/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java b/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java deleted file mode 100644 index c4cbb3ae98..0000000000 --- a/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.config.serializer; - -import org.ehcache.spi.persistence.StateRepository; -import org.ehcache.spi.serialization.SerializerException; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.spi.serialization.StatefulSerializer; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.nio.ByteBuffer; - -import static org.junit.Assert.*; - -public class DefaultSerializationProviderConfigurationTest { - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @Test - public void testAddSerializerFor() throws Exception { - DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, MinimalSerializer.class); - - assertSame(MinimalSerializer.class, config.getDefaultSerializers().get(Long.class)); - } - - @Test - public void testAddSerializerForDuplicateThrows() throws Exception { - DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, MinimalSerializer.class); - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("Duplicate serializer for class"); - config.addSerializerFor(Long.class, MinimalSerializer.class); - } - - @Test - public void testAddSerializerForConstructorless() throws Exception { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); - DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, UnusableSerializer.class); - } - - @Test - public void testAddSerializerForStatefulSerializer() throws Exception { - DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, MinimalStatefulSerializer.class); - assertSame(MinimalStatefulSerializer.class, config.getDefaultSerializers().get(Long.class)); - } - - @Test - public void testAddSerializerForStatefulConstructorless() throws Exception { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); - DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, UnusableStatefulSerializer.class); - } - - @Test - public void testAddSerializerForLegacySerializer() throws Exception { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); - DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, LegacySerializer.class); - } - - private static class MinimalSerializer implements Serializer { - - public MinimalSerializer(ClassLoader loader) { - } - - @Override - public ByteBuffer serialize(final Long object) throws SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - } - - private static class LegacySerializer implements Serializer { - - public LegacySerializer(ClassLoader loader, FileBasedPersistenceContext context) { - } - - @Override - public ByteBuffer serialize(final Long object) throws SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - } - - private static class UnusableSerializer implements Serializer { - - @Override - public ByteBuffer serialize(final Long object) throws SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - } - - private static class MinimalStatefulSerializer implements StatefulSerializer { - - public MinimalStatefulSerializer(ClassLoader loader) { - } - - @Override - public void init(final StateRepository stateRepository) { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public ByteBuffer serialize(final Long object) throws SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - } - - private static class UnusableStatefulSerializer implements StatefulSerializer { - - @Override - public void init(final StateRepository stateRepository) { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public ByteBuffer serialize(final Long object) throws SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - } -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java deleted file mode 100644 index f529cedf61..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.impl.internal.classes; - -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.junit.Test; - -import java.io.Closeable; -import java.io.IOException; -import java.util.concurrent.atomic.AtomicInteger; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.sameInstance; -import static org.hamcrest.core.Is.is; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -/** - * @author Ludovic Orban - */ -public class ClassInstanceProviderTest { - - @SuppressWarnings("unchecked") - private Class> configClass = (Class)ClassInstanceConfiguration.class; - - @Test - public void testNewInstanceUsingAliasAndNoArgs() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider<>(null, configClass); - - classInstanceProvider.preconfigured.put("test stuff", new ClassInstanceConfiguration(TestService.class)); - TestService obj = classInstanceProvider.newInstance("test stuff", (ServiceConfiguration) null); - - assertThat(obj.theString, is(nullValue())); - } - - @Test - public void testNewInstanceUsingAliasAndArg() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider<>(null, configClass); - - classInstanceProvider.preconfigured.put("test stuff", new ClassInstanceConfiguration<>(TestService.class, "test string")); - TestService obj = classInstanceProvider.newInstance("test stuff", (ServiceConfiguration) null); - - assertThat(obj.theString, equalTo("test string")); - } - - @Test - public void testNewInstanceUsingServiceConfig() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider<>(null, configClass); - - TestServiceConfiguration config = new TestServiceConfiguration(); - TestService obj = classInstanceProvider.newInstance("test stuff", config); - - assertThat(obj.theString, is(nullValue())); - } - - @Test - public void testNewInstanceUsingServiceConfigFactory() throws Exception { - TestServiceProviderConfiguration factoryConfig = new TestServiceProviderConfiguration(); - factoryConfig.getDefaults().put("test stuff", new ClassInstanceConfiguration(TestService.class)); - - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider<>(factoryConfig, configClass); - classInstanceProvider.start(null); - - TestService obj = classInstanceProvider.newInstance("test stuff", (ServiceConfiguration) null); - assertThat(obj.theString, is(nullValue())); - } - - @Test(expected = IllegalArgumentException.class) - public void testReleaseInstanceByAnotherProvider() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider<>(null, null); - - classInstanceProvider.releaseInstance("foo"); - } - - @Test(expected = IllegalArgumentException.class) - public void testReleaseSameInstanceMultipleTimesThrows() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider<>(null, null); - classInstanceProvider.providedVsCount.put("foo", new AtomicInteger(1)); - - classInstanceProvider.releaseInstance("foo"); - classInstanceProvider.releaseInstance("foo"); - } - - @Test - public void testReleaseCloseableInstance() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider<>(null, null); - Closeable closeable = mock(Closeable.class); - classInstanceProvider.providedVsCount.put(closeable, new AtomicInteger(1)); - classInstanceProvider.instantiated.add(closeable); - - classInstanceProvider.releaseInstance(closeable); - verify(closeable).close(); - } - - @Test(expected = IOException.class) - public void testReleaseCloseableInstanceThrows() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider<>(null, null); - Closeable closeable = mock(Closeable.class); - doThrow(IOException.class).when(closeable).close(); - classInstanceProvider.providedVsCount.put(closeable, new AtomicInteger(1)); - classInstanceProvider.instantiated.add(closeable); - - classInstanceProvider.releaseInstance(closeable); - } - - @Test - public void testNewInstanceWithActualInstanceInServiceConfig() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider<>(null, configClass); - - TestService service = new TestService(); - TestServiceConfiguration config = new TestServiceConfiguration(service); - - TestService newService = classInstanceProvider.newInstance("test stuff", config); - - assertThat(newService, sameInstance(service)); - } - - @Test - public void testSameInstanceRetrievedMultipleTimesUpdatesTheProvidedCount() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider<>(null, configClass); - - TestService service = new TestService(); - TestServiceConfiguration config = new TestServiceConfiguration(service); - - TestService newService = classInstanceProvider.newInstance("test stuff", config); - assertThat(newService, sameInstance(service)); - assertThat(classInstanceProvider.providedVsCount.get(service).get(), is(1)); - newService = classInstanceProvider.newInstance("test stuff", config); - assertThat(newService, sameInstance(service)); - assertThat(classInstanceProvider.providedVsCount.get(service).get(), is(2)); - } - - @Test - public void testInstancesNotCreatedByProviderDoesNotClose() throws IOException { - @SuppressWarnings("unchecked") - Class> configClass = (Class) ClassInstanceConfiguration.class; - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider<>(null, configClass); - - TestCloaseableService service = mock(TestCloaseableService.class); - TestCloaseableServiceConfig config = new TestCloaseableServiceConfig(service); - - TestCloaseableService newService = classInstanceProvider.newInstance("testClose", config); - assertThat(newService, sameInstance(service)); - classInstanceProvider.releaseInstance(newService); - verify(service, times(0)).close(); - - } - - - public static abstract class TestCloaseableService implements Service, Closeable { - - } - - public static class TestCloaseableServiceConfig extends ClassInstanceConfiguration implements ServiceConfiguration { - - public TestCloaseableServiceConfig() { - super(TestCloaseableService.class); - } - - public TestCloaseableServiceConfig(TestCloaseableService testCloaseableService) { - super(testCloaseableService); - } - - @Override - public Class getServiceType() { - return TestCloaseableService.class; - } - } - - public static class TestService implements Service { - public final String theString; - - public TestService() { - this(null); - } - - public TestService(String theString) { - this.theString = theString; - } - - @Override - public void start(ServiceProvider serviceProvider) { - } - - @Override - public void stop() { - } - } - - public static class TestServiceConfiguration extends ClassInstanceConfiguration implements ServiceConfiguration { - public TestServiceConfiguration() { - super(TestService.class); - } - - public TestServiceConfiguration(TestService service) { - super(service); - } - - @Override - public Class getServiceType() { - return TestService.class; - } - } - - public static class TestServiceProviderConfiguration extends ClassInstanceProviderConfiguration implements ServiceConfiguration { - @Override - public Class getServiceType() { - return TestService.class; - } - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java b/impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java deleted file mode 100644 index e83ec9371c..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.events; - -import org.ehcache.core.spi.store.events.StoreEvent; -import org.ehcache.core.spi.store.events.StoreEventFilter; -import org.ehcache.core.spi.store.events.StoreEventListener; -import org.ehcache.event.EventType; -import org.hamcrest.Matcher; -import org.junit.Before; -import org.junit.Test; -import org.mockito.InOrder; - -import java.util.HashSet; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.BlockingQueue; - -import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; -import static org.ehcache.impl.internal.store.offheap.AbstractOffHeapStoreTest.eventType; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; - -/** - * InvocationScopedEventSinkTest - */ -public class InvocationScopedEventSinkTest { - - private StoreEventListener listener; - private InvocationScopedEventSink eventSink; - - @Before - @SuppressWarnings("unchecked") - public void setUp() { - HashSet> storeEventListeners = new HashSet<>(); - listener = mock(StoreEventListener.class); - storeEventListeners.add(listener); - eventSink = new InvocationScopedEventSink(new HashSet<>(), - false, new BlockingQueue[] { new ArrayBlockingQueue>(10) }, storeEventListeners); - - } - - @Test - public void testReset() { - eventSink.created("k1", "v1"); - eventSink.evicted("k1", supplierOf("v2")); - eventSink.reset(); - eventSink.created("k1", "v1"); - eventSink.updated("k1", supplierOf("v1"), "v2"); - eventSink.evicted("k1", supplierOf("v2")); - eventSink.close(); - - InOrder inOrder = inOrder(listener); - Matcher> createdMatcher = eventType(EventType.CREATED); - inOrder.verify(listener).onEvent(argThat(createdMatcher)); - Matcher> updatedMatcher = eventType(EventType.UPDATED); - inOrder.verify(listener).onEvent(argThat(updatedMatcher)); - Matcher> evictedMatcher = eventType(EventType.EVICTED); - inOrder.verify(listener).onEvent(argThat(evictedMatcher)); - verifyNoMoreInteractions(listener); - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcherTest.java b/impl/src/test/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcherTest.java deleted file mode 100644 index bb9066a316..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcherTest.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.events; - -import org.ehcache.event.EventType; -import org.ehcache.core.events.StoreEventSink; -import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; -import org.ehcache.core.spi.store.events.StoreEvent; -import org.ehcache.core.spi.store.events.StoreEventFilter; -import org.ehcache.core.spi.store.events.StoreEventListener; -import org.hamcrest.Matcher; -import org.junit.Test; -import org.mockito.ArgumentMatchers; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Random; -import java.util.concurrent.CountDownLatch; -import java.util.function.BiFunction; - -import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; -import static org.ehcache.impl.internal.util.Matchers.eventOfType; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; -import static org.mockito.Mockito.withSettings; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; - -/** - * ScopedStoreEventDispatcherTest - */ -public class ScopedStoreEventDispatcherTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(ScopedStoreEventDispatcherTest.class); - - @Test - public void testRegistersOrderingChange() { - ScopedStoreEventDispatcher dispatcher = new ScopedStoreEventDispatcher<>(1); - - assertThat(dispatcher.isEventOrdering(), is(false)); - dispatcher.setEventOrdering(true); - assertThat(dispatcher.isEventOrdering(), is(true)); - dispatcher.setEventOrdering(false); - assertThat(dispatcher.isEventOrdering(), is(false)); - } - - @Test - @SuppressWarnings("unchecked") - public void testListenerNotifiedUnordered() { - ScopedStoreEventDispatcher dispatcher = new ScopedStoreEventDispatcher<>(1); - @SuppressWarnings("unchecked") - StoreEventListener listener = mock(StoreEventListener.class); - dispatcher.addEventListener(listener); - - StoreEventSink sink = dispatcher.eventSink(); - sink.created("test", "test"); - dispatcher.releaseEventSink(sink); - - verify(listener).onEvent(any(StoreEvent.class)); - } - - @Test - @SuppressWarnings("unchecked") - public void testListenerNotifiedOrdered() { - ScopedStoreEventDispatcher dispatcher = new ScopedStoreEventDispatcher<>(1); - @SuppressWarnings("unchecked") - StoreEventListener listener = mock(StoreEventListener.class); - dispatcher.addEventListener(listener); - dispatcher.setEventOrdering(true); - - StoreEventSink sink = dispatcher.eventSink(); - sink.created("test", "test"); - dispatcher.releaseEventSink(sink); - - verify(listener).onEvent(any(StoreEvent.class)); - } - - @Test - public void testEventFiltering() { - ScopedStoreEventDispatcher dispatcher = new ScopedStoreEventDispatcher<>(1); - @SuppressWarnings("unchecked") - StoreEventListener listener = mock(StoreEventListener.class, withSettings().verboseLogging()); - dispatcher.addEventListener(listener); - - @SuppressWarnings("unchecked") - StoreEventFilter filter = mock(StoreEventFilter.class); - when(filter.acceptEvent(eq(EventType.CREATED), anyString(), ArgumentMatchers.isNull(), anyString())).thenReturn(true); - when(filter.acceptEvent(eq(EventType.REMOVED), anyString(), anyString(), anyString())).thenReturn(false); - dispatcher.addEventFilter(filter); - - StoreEventSink sink = dispatcher.eventSink(); - sink.removed("gone", supplierOf("really gone")); - sink.created("new", "and shiny"); - dispatcher.releaseEventSink(sink); - - Matcher> matcher = eventOfType(EventType.CREATED); - verify(listener).onEvent(argThat(matcher)); - verifyNoMoreInteractions(listener); - } - - @Test - public void testOrderedEventDelivery() throws Exception { - final ScopedStoreEventDispatcher dispatcher = new ScopedStoreEventDispatcher<>(4); - dispatcher.setEventOrdering(true); - final ConcurrentHashMap map = new ConcurrentHashMap<>(); - final long[] keys = new long[] { 1L, 42L, 256L }; - map.put(keys[0], 125L); - map.put(keys[1], 42 * 125L); - map.put(keys[2], 256 * 125L); - - final ConcurrentHashMap resultMap = new ConcurrentHashMap<>(map); - dispatcher.addEventListener(event -> { - if (event.getNewValue()) { - resultMap.compute(event.getKey(), (key, value) -> value + 10L); - } else { - resultMap.compute(event.getKey(), (key, value) -> 7L - value); - } - }); - - final long seed = new Random().nextLong(); - LOGGER.info("Starting test with seed {}", seed); - - int workers = Runtime.getRuntime().availableProcessors() + 2; - final CountDownLatch latch = new CountDownLatch(workers); - for (int i = 0; i < workers; i++) { - final int index =i; - new Thread(() -> { - Random random = new Random(seed * index); - for (int j = 0; j < 10000; j++) { - int keyIndex = random.nextInt(3); - final StoreEventSink sink = dispatcher.eventSink(); - if (random.nextBoolean()) { - map.compute(keys[keyIndex], (key, value) -> { - long newValue = value + 10L; - sink.created(key, true); - return newValue; - }); - } else { - map.compute(keys[keyIndex], (key, value) -> { - long newValue = 7L - value; - sink.created(key, false); - return newValue; - }); - } - dispatcher.releaseEventSink(sink); - } - latch.countDown(); - }).start(); - } - - latch.await(); - - LOGGER.info("\n\tResult map {} \n\tWork map {}", resultMap, map); - - assertThat(resultMap, is(map)); - } -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/executor/PooledExecutionServiceTest.java b/impl/src/test/java/org/ehcache/impl/internal/executor/PooledExecutionServiceTest.java deleted file mode 100644 index 3c18d4758f..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/executor/PooledExecutionServiceTest.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.impl.internal.executor; - -import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration; -import org.ehcache.impl.internal.util.ThreadFactoryUtil; -import org.junit.After; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.io.PrintWriter; -import java.io.StringWriter; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.LinkedBlockingDeque; -import java.util.concurrent.TimeUnit; - -import static org.assertj.core.api.Assertions.assertThat; - -/** - * @author Ludovic Orban - */ -public class PooledExecutionServiceTest { - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - PooledExecutionService pooledExecutionService; - - @After - public void after() { - if(pooledExecutionService != null) { - pooledExecutionService.stop(); - } - } - - @Test - public void testEmptyConfigThrowsAtStart() throws Exception { - PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); - pooledExecutionService = new PooledExecutionService(configuration); - - expectedException.expectMessage("Pool configuration is empty"); - pooledExecutionService.start(null); - } - - @Test - public void testGetOrderedExecutorFailsOnNonExistentPool() throws Exception { - PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); - configuration.addPool("getOrderedExecutorFailsOnNonExistentPool", 0, 1); - pooledExecutionService = new PooledExecutionService(configuration); - - pooledExecutionService.start(null); - - expectedException.expectMessage("Pool 'abc' is not in the set of available pools [getOrderedExecutorFailsOnNonExistentPool]"); - pooledExecutionService.getOrderedExecutor("abc", new LinkedBlockingDeque<>()); - } - - @Test - public void testGetOrderedExecutorFailsOnNonExistentDefaultPool() throws Exception { - PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); - configuration.addPool("getOrderedExecutorFailsOnNonExistentDefaultPool", 0, 1); - pooledExecutionService = new PooledExecutionService(configuration); - - pooledExecutionService.start(null); - - expectedException.expectMessage("Null pool alias provided and no default pool configured"); - pooledExecutionService.getOrderedExecutor(null, new LinkedBlockingDeque<>()); - } - - @Test - public void testGetOrderedExecutorSucceedsOnExistingPool() throws Exception { - PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); - configuration.addPool("getOrderedExecutorSucceedsOnExistingPool", 0, 1); - pooledExecutionService = new PooledExecutionService(configuration); - - pooledExecutionService.start(null); - - ExecutorService aaa = pooledExecutionService.getOrderedExecutor("getOrderedExecutorSucceedsOnExistingPool", new LinkedBlockingDeque<>()); - aaa.shutdown(); - } - - @Test - public void testGetOrderedExecutorSucceedsOnExistingDefaultPool() throws Exception { - PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); - configuration.addDefaultPool("getOrderedExecutorSucceedsOnExistingDefaultPool", 0, 1); - pooledExecutionService = new PooledExecutionService(configuration); - - pooledExecutionService.start(null); - - ExecutorService dflt = pooledExecutionService.getOrderedExecutor(null, new LinkedBlockingDeque<>()); - dflt.shutdown(); - } - - @Test - public void testAllThreadsAreStopped() throws Exception { - PooledExecutionServiceConfiguration configuration = new PooledExecutionServiceConfiguration(); - configuration.addDefaultPool("allThreadsAreStopped", 0, 1); - pooledExecutionService = new PooledExecutionService(configuration); - pooledExecutionService.start(null); - - final CountDownLatch latch = new CountDownLatch(1); - - pooledExecutionService.getScheduledExecutor("allThreadsAreStopped") - .execute(latch::countDown); - - assertThat(latch.await(30, TimeUnit.SECONDS)).isTrue(); - - pooledExecutionService.stop(); - - assertThat(Thread.currentThread().isInterrupted()).isFalse(); - - assertThat(pooledExecutionService.isStopped()).isTrue(); - } - - /** - * This method can be used to debug a failure in {@link #testAllThreadsAreStopped()} but also any other king of thread - * leaking. You can enable thread tracking in {@link ThreadFactoryUtil}. Note that on a slow machine, the detector might "lie". Because - * even if a thread pool is stopped, it doesn't mean all the underlying threads had the time to die. It only means that they are not - * processing any tasks anymore. - */ - public static void detectLeakingThreads() { - Set threadSet = Thread.getAllStackTraces().keySet(); - Set leakedThreads = new HashSet<>(); - - Map createdThreads = ThreadFactoryUtil.getCreatedThreads(); - - for(Thread thread : threadSet) { - if(thread.isAlive() && thread.getName().startsWith("Ehcache [")) { - int hash = System.identityHashCode(thread); - String stackTrace = null; - if(createdThreads != null) { - Exception exception = createdThreads.get(hash); - StringWriter errors = new StringWriter(); - exception.printStackTrace(new PrintWriter(errors)); - stackTrace = errors.toString(); - } - leakedThreads.add(thread + "(" + hash + ")" + stackTrace); - } - } - - assertThat(leakedThreads).isEmpty(); - } -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindProviderFactoryTest.java b/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindProviderFactoryTest.java deleted file mode 100644 index 91c6d3a5f5..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/WriteBehindProviderFactoryTest.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.loaderwriter.writebehind; - -import org.ehcache.Cache; -import org.ehcache.CacheManager; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; -import org.ehcache.config.builders.WriteBehindConfigurationBuilder; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; -import org.ehcache.spi.service.ServiceConfiguration; -import org.hamcrest.core.IsCollectionContaining; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.util.Collection; -import java.util.Map; - -import static java.util.concurrent.TimeUnit.SECONDS; -import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertThat; - -/** - * @author rism - */ -public class WriteBehindProviderFactoryTest { - - @Rule - public ExpectedException expectedEx = ExpectedException.none(); - - @SuppressWarnings("unchecked") - @Test - public void testAddingWriteBehindConfigurationAtCacheLevel() { - CacheManagerBuilder cacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder(); - WriteBehindConfiguration writeBehindConfiguration = WriteBehindConfigurationBuilder.newBatchedWriteBehindConfiguration(Long.MAX_VALUE, SECONDS, 1) - .concurrencyLevel(3) - .queueSize(10) - .build(); - Class> klazz = (Class>) (Class) (SampleLoaderWriter.class); - CacheManager cacheManager = cacheManagerBuilder.build(true); - final Cache cache = cacheManager.createCache("cache", - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(100)) - .add(writeBehindConfiguration) - .add(new DefaultCacheLoaderWriterConfiguration(klazz)) - .build()); - Collection> serviceConfiguration = cache.getRuntimeConfiguration() - .getServiceConfigurations(); - assertThat(serviceConfiguration, IsCollectionContaining.>hasItem(instanceOf(WriteBehindConfiguration.class))); - cacheManager.close(); - } - - @Test - public void testWriteBehindWithoutCacheLoaderWriter() { - expectedEx.expect(NullPointerException.class); - expectedEx.expectMessage("WriteBehind requires a non null CacheLoaderWriter"); - - WriteBehindProviderFactory factory = new WriteBehindProviderFactory(); - factory.create(null).createWriteBehindLoaderWriter(null, null); - } - - public static class SampleLoaderWriter implements CacheLoaderWriter { - - @Override - public V load(K key) throws Exception { - throw new UnsupportedOperationException("Implement Me"); - } - - @Override - public Map loadAll(Iterable keys) throws Exception { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public void write(K key, V value) throws Exception { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public void writeAll(Iterable> entries) throws Exception { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public void delete(K key) throws Exception { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public void deleteAll(Iterable keys) throws Exception { - throw new UnsupportedOperationException("Implement me!"); - } - } -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultCacheStatisticsTest.java b/impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultCacheStatisticsTest.java deleted file mode 100644 index e90d3f3112..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/statistics/DefaultCacheStatisticsTest.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.statistics; - -import java.util.concurrent.TimeUnit; - -import org.assertj.core.api.AbstractObjectAssert; -import org.ehcache.CacheManager; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.core.InternalCache; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.impl.internal.TimeSourceConfiguration; -import org.ehcache.internal.TestTimeSource; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; - -public class DefaultCacheStatisticsTest { - - private static final int TIME_TO_EXPIRATION = 100; - - private DefaultCacheStatistics cacheStatistics; - private CacheManager cacheManager; - private InternalCache cache; - private TestTimeSource timeSource = new TestTimeSource(System.currentTimeMillis()); - - @Before - public void before() { - CacheConfiguration cacheConfiguration = - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - newResourcePoolsBuilder().heap(10)) - .withExpiry(Expirations.timeToLiveExpiration(Duration.of(TIME_TO_EXPIRATION, TimeUnit.MILLISECONDS))) - .build(); - - cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(new TimeSourceConfiguration(timeSource)) - .build(true); - - cache = (InternalCache) cacheManager.getCache("aCache", Long.class, String.class); - - cacheStatistics = new DefaultCacheStatistics(cache); - } - - @After - public void after() { - if(cacheManager != null) { - cacheManager.close(); - } - } - - @Test - public void getKnownStatistics() { - assertThat(cacheStatistics.getKnownStatistics()).containsOnlyKeys("Cache:HitCount", "Cache:MissCount", - "Cache:RemovalCount", "Cache:EvictionCount", "Cache:PutCount", - "OnHeap:ExpirationCount", "Cache:ExpirationCount", "OnHeap:HitCount", "OnHeap:MissCount", - "OnHeap:PutCount", "OnHeap:RemovalCount", "OnHeap:EvictionCount", - "OnHeap:MappingCount", "OnHeap:OccupiedByteSize"); - } - - @Test - public void getCacheHits() throws Exception { - cache.put(1L, "a"); - cache.get(1L); - assertThat(cacheStatistics.getCacheHits()).isEqualTo(1L); - assertStat("Cache:HitCount").isEqualTo(1L); - } - - @Test - public void getCacheHitPercentage() throws Exception { - cache.put(1L, "a"); - cache.get(1L); - assertThat(cacheStatistics.getCacheHitPercentage()).isEqualTo(100.0f); - } - - @Test - public void getCacheMisses() throws Exception { - cache.get(1L); - assertThat(cacheStatistics.getCacheMisses()).isEqualTo(1L); - assertStat("Cache:MissCount").isEqualTo(1L); - } - - @Test - public void getCacheMissPercentage() throws Exception { - cache.get(1L); - assertThat(cacheStatistics.getCacheMissPercentage()).isEqualTo(100.0f); - } - - @Test - public void getCacheGets() throws Exception { - cache.get(1L); - assertThat(cacheStatistics.getCacheGets()).isEqualTo(1); - } - - @Test - public void getCachePuts() throws Exception { - cache.put(1L, "a"); - assertThat(cacheStatistics.getCachePuts()).isEqualTo(1); - assertStat("Cache:PutCount").isEqualTo(1L); - } - - @Test - public void getCacheRemovals() throws Exception { - cache.put(1L, "a"); - cache.remove(1L); - assertThat(cacheStatistics.getCacheRemovals()).isEqualTo(1); - assertStat("Cache:RemovalCount").isEqualTo(1L); - } - - @Test - public void getCacheEvictions() throws Exception { - for (long i = 0; i < 11; i++) { - cache.put(i, "a"); - } - assertThat(cacheStatistics.getCacheEvictions()).isEqualTo(1); - assertStat("Cache:EvictionCount").isEqualTo(1L); - } - - @Test - public void getExpirations() throws Exception { - cache.put(1L, "a"); - timeSource.advanceTime(TIME_TO_EXPIRATION); - assertThat(cache.get(1L)).isNull(); - assertThat(cacheStatistics.getCacheExpirations()).isEqualTo(1L); - assertStat("Cache:ExpirationCount").isEqualTo(1L); - } - - @Test - public void getCacheAverageGetTime() throws Exception { - cache.get(1L); - assertThat(cacheStatistics.getCacheAverageGetTime()).isGreaterThan(0); - } - - @Test - public void getCacheAveragePutTime() throws Exception { - cache.put(1L, "a"); - assertThat(cacheStatistics.getCacheAveragePutTime()).isGreaterThan(0); - } - - @Test - public void getCacheAverageRemoveTime() throws Exception { - cache.remove(1L); - assertThat(cacheStatistics.getCacheAverageRemoveTime()).isGreaterThan(0); - } - - private AbstractObjectAssert assertStat(String key) { - return assertThat(cacheStatistics.getKnownStatistics().get(key).value()); - } -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/statistics/StatsUtilsTest.java b/impl/src/test/java/org/ehcache/impl/internal/statistics/StatsUtilsTest.java deleted file mode 100644 index b36a648c6c..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/statistics/StatsUtilsTest.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.statistics; - -import java.util.Collections; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Callable; - -import org.ehcache.Cache; -import org.ehcache.CacheManager; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.statistics.TierOperationOutcomes; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.terracotta.context.ContextManager; -import org.terracotta.context.TreeNode; -import org.terracotta.context.query.Matchers; -import org.terracotta.context.query.Query; -import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.StatisticsManager; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.ehcache.impl.internal.statistics.StatsUtils.findLowestTier; -import static org.ehcache.impl.internal.statistics.StatsUtils.findOperationStatisticOnChildren; -import static org.ehcache.impl.internal.statistics.StatsUtils.findStatisticOnDescendants; -import static org.ehcache.impl.internal.statistics.StatsUtils.findTiers; -import static org.ehcache.impl.internal.statistics.StatsUtils.hasProperty; -import static org.ehcache.impl.internal.statistics.StatsUtils.hasTag; -import static org.terracotta.context.query.Matchers.attributes; -import static org.terracotta.context.query.Matchers.context; -import static org.terracotta.context.query.Matchers.hasAttribute; -import static org.terracotta.context.query.QueryBuilder.queryBuilder; - -public class StatsUtilsTest { - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - CacheManager cacheManager; - Cache cache; - - @Before - public void before() { - CacheConfiguration cacheConfiguration = - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)).build(); - - cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .build(true); - - cache = cacheManager.getCache("aCache", Long.class, String.class); - - StatisticsManager.createPassThroughStatistic(cache, "test", Collections.emptySet(), Collections.singletonMap("myproperty", "myvalue"), (Callable) () -> 0); - - cache.get(1L); - } - - @After - public void after() { - if(cacheManager != null) { - cacheManager.close(); - } - } - - @Test - public void testHasTag_found() throws Exception { - Set statResult = queryProperty("cache"); - assertThat(statResult.size()).isEqualTo(1); - } - - @Test - public void testHasTag_notfound() throws Exception { - Set statResult = queryProperty("xxx"); - assertThat(statResult.size()).isZero(); - } - - private Set queryProperty(String tag) { - @SuppressWarnings("unchecked") - Query statQuery = queryBuilder() - .descendants() - .filter(context(attributes(Matchers.>allOf( - hasAttribute("name", "get"), - hasTag(tag) - )))) - .build(); - - return statQuery.execute(Collections.singleton(ContextManager.nodeFor(cache))); - } - - @Test - public void testHasProperty_found() throws Exception { - Set statResult = queryProperty("myproperty", "myvalue"); - assertThat(statResult.size()).isEqualTo(1); - } - - @Test - public void testHasProperty_notfoundKey() throws Exception { - Set statResult = queryProperty("xxx"); - assertThat(statResult.size()).isZero(); - } - - @Test - public void testHasProperty_valueDoesntMatch() throws Exception { - Set statResult = queryProperty("myproperty", "xxx"); - assertThat(statResult.size()).isZero(); - } - - @SuppressWarnings("unchecked") - private Set queryProperty(String key, String value) { - Query statQuery = queryBuilder() - .descendants() - .filter(context(attributes(Matchers.>allOf( - hasAttribute("name", "test"), - hasProperty(key, value) - )))) - .build(); - - return statQuery.execute(Collections.singleton(ContextManager.nodeFor(cache))); - } - - @SuppressWarnings("unchecked") - @Test - public void testFindStatisticOnDescendantsWithDiscriminator() throws Exception { - OperationStatistic stat = findStatisticOnDescendants(cache, "OnHeap", "tier", "get"); - assertThat(stat.sum()).isEqualTo(1L); - - stat = findStatisticOnDescendants(cache, "OnHeap", "tier", "xxx"); - assertThat(stat).isNull(); - - stat = findStatisticOnDescendants(cache, "xxx", "tier", "xxx"); - assertThat(stat).isNull(); - } - - @SuppressWarnings("unchecked") - @Test - public void testFindStatisticOnDescendants() throws Exception { - OperationStatistic stat = findStatisticOnDescendants(cache, "OnHeap", "get"); - assertThat(stat.sum()).isEqualTo(1L); - - stat = findStatisticOnDescendants(cache, "OnHeap", "xxx"); - assertThat(stat).isNull(); - - stat = findStatisticOnDescendants(cache, "xxx", "xxx"); - assertThat(stat).isNull(); - } - - @Test - public void testFindCacheStatistic() { - OperationStatistic stat = findOperationStatisticOnChildren(cache, CacheOperationOutcomes.GetOutcome.class, "get"); - assertThat(stat.sum()).isEqualTo(1L); - } - - @Test - public void testFindCacheStatistic_notExisting() { - expectedException.expect(RuntimeException.class); - findOperationStatisticOnChildren(cache, CacheOperationOutcomes.GetOutcome.class, "xxx"); - } - - @Test - public void testFindTiers() { - String[] tiers = findTiers(cache); - assertThat(tiers).containsOnly("OnHeap"); - } - - @Test - public void testFindLowerTier_one() { - String tier = findLowestTier(new String[] { "OnHeap" }); - assertThat(tier).isEqualTo("OnHeap"); - } - - @Test - public void testFindLowerTier_two() { - String tier = findLowestTier(new String[] { "OnHeap", "Offheap" }); - assertThat(tier).isEqualTo("Offheap"); - } - - @Test - public void testFindLowerTier_three() { - String tier = findLowestTier(new String[] { "OnHeap", "Offheap", "Disk" }); - assertThat(tier).isEqualTo("Disk"); - } - - @Test - public void testFindLowerTier_none() { - expectedException.expect(RuntimeException.class); - findLowestTier(new String[0]); - } -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java deleted file mode 100644 index ea9fa0fa7f..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.heap; - -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePools; -import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.copy.IdentityCopier; -import org.ehcache.impl.internal.events.TestStoreEventDispatcher; -import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; -import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapValueHolder; -import org.ehcache.core.spi.time.SystemTimeSource; -import org.ehcache.core.spi.time.TimeSource; -import org.ehcache.internal.store.StoreFactory; -import org.ehcache.internal.store.StoreSPITest; -import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.service.ServiceConfiguration; -import org.junit.Before; - -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; - -public class ByteSizedOnHeapStoreByRefSPITest extends StoreSPITest { - - private StoreFactory storeFactory; - private static final int MAGIC_NUM = 500; - - @Override - protected StoreFactory getStoreFactory() { - return storeFactory; - } - - @Before - public void setUp() { - storeFactory = new StoreFactory() { - - final Copier DEFAULT_COPIER = new IdentityCopier(); - - @Override - public Store newStore() { - return newStore(null, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); - } - - @Override - public Store newStoreWithCapacity(long capacity) { - return newStore(capacity, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); - } - - @Override - public Store newStoreWithExpiry(Expiry expiry, TimeSource timeSource) { - return newStore(null, null, expiry, timeSource); - } - - @Override - public Store newStoreWithEvictionAdvisor(EvictionAdvisor evictionAdvisor) { - return newStore(null, evictionAdvisor, Expirations.noExpiration(), SystemTimeSource.INSTANCE); - } - - @SuppressWarnings("unchecked") - private Store newStore(Long capacity, EvictionAdvisor evictionAdvisor, Expiry expiry, TimeSource timeSource) { - ResourcePools resourcePools = buildResourcePools(capacity); - Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), - evictionAdvisor, getClass().getClassLoader(), expiry, resourcePools, 0, null, null); - return new OnHeapStore(config, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, - new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), new TestStoreEventDispatcher<>()); - } - - @Override - @SuppressWarnings("unchecked") - public Store.ValueHolder newValueHolder(final String value) { - return new CopiedOnHeapValueHolder(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, DEFAULT_COPIER); - } - - private ResourcePools buildResourcePools(Comparable capacityConstraint) { - if (capacityConstraint == null) { - return newResourcePoolsBuilder().heap(10, MemoryUnit.KB).build(); - } else { - return newResourcePoolsBuilder().heap((Long)capacityConstraint * MAGIC_NUM, MemoryUnit.B).build(); - } - } - - @Override - public Class getKeyType() { - return String.class; - } - - @Override - public Class getValueType() { - return String.class; - } - - @Override - public ServiceConfiguration[] getServiceConfigurations() { - return new ServiceConfiguration[0]; - } - - @Override - public String createKey(long seed) { - return new String("" + seed); - } - - @Override - public String createValue(long seed) { - return new String("" + seed); - } - - @Override - public void close(final Store store) { - OnHeapStore.Provider.close((OnHeapStore)store); - } - - @Override - public ServiceLocator getServiceProvider() { - ServiceLocator locator = dependencySet().build(); - try { - locator.startAllServices(); - } catch (Exception e) { - throw new RuntimeException(e); - } - return locator; - } - }; - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java deleted file mode 100644 index fa7d5cc3ab..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java +++ /dev/null @@ -1,335 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.heap; - -import org.ehcache.config.units.EntryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; -import org.ehcache.impl.copy.IdentityCopier; -import org.ehcache.core.events.NullStoreEventDispatcher; -import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; -import org.ehcache.core.spi.time.SystemTimeSource; -import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.copy.Copier; -import org.hamcrest.Matchers; -import org.junit.Test; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentMap; -import java.util.function.Function; - -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -/** - * @author Ludovic Orban - */ -public class OnHeapStoreBulkMethodsTest { - - public static final Copier DEFAULT_COPIER = new IdentityCopier(); - - @SuppressWarnings("unchecked") - protected Store.Configuration mockStoreConfig() { - @SuppressWarnings("rawtypes") - Store.Configuration config = mock(Store.Configuration.class); - when(config.getExpiry()).thenReturn(Expirations.noExpiration()); - when(config.getKeyType()).thenReturn(Number.class); - when(config.getValueType()).thenReturn(CharSequence.class); - when(config.getResourcePools()).thenReturn(newResourcePoolsBuilder().heap(Long.MAX_VALUE, EntryUnit.ENTRIES).build()); - return config; - } - - @SuppressWarnings("unchecked") - protected OnHeapStore newStore() { - Store.Configuration configuration = mockStoreConfig(); - return new OnHeapStore(configuration, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, - new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - } - - @Test - @SuppressWarnings("unchecked") - public void testBulkComputeFunctionGetsValuesOfEntries() throws Exception { - @SuppressWarnings("rawtypes") - Store.Configuration config = mock(Store.Configuration.class); - when(config.getExpiry()).thenReturn(Expirations.noExpiration()); - when(config.getKeyType()).thenReturn(Number.class); - when(config.getValueType()).thenReturn(Number.class); - when(config.getResourcePools()).thenReturn(newResourcePoolsBuilder().heap(Long.MAX_VALUE, EntryUnit.ENTRIES).build()); - Store.Configuration configuration = config; - - OnHeapStore store = new OnHeapStore(configuration, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, - new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - store.put(1, 2); - store.put(2, 3); - store.put(3, 4); - - Map> result = store.bulkCompute(new HashSet(Arrays.asList(1, 2, 3, 4, 5, 6)), entries -> { - Map newValues = new HashMap<>(); - for (Map.Entry entry : entries) { - final Number currentValue = entry.getValue(); - if(currentValue == null) { - if(entry.getKey().equals(4)) { - newValues.put(entry.getKey(), null); - } else { - newValues.put(entry.getKey(), 0); - } - } else { - newValues.put(entry.getKey(), currentValue.intValue() * 2); - } - - } - return newValues.entrySet(); - }); - - ConcurrentMap check = new ConcurrentHashMap<>(); - check.put(1, 4); - check.put(2, 6); - check.put(3, 8); - check.put(4, 0); - check.put(5, 0); - check.put(6, 0); - - assertThat(result.get(1).value(), Matchers.is(check.get(1))); - assertThat(result.get(2).value(), Matchers.is(check.get(2))); - assertThat(result.get(3).value(), Matchers.is(check.get(3))); - assertThat(result.get(4), nullValue()); - assertThat(result.get(5).value(), Matchers.is(check.get(5))); - assertThat(result.get(6).value(), Matchers.is(check.get(6))); - - for (Number key : check.keySet()) { - final Store.ValueHolder holder = store.get(key); - if(holder != null) { - check.remove(key, holder.value()); - } - } - assertThat(check.size(), is(1)); - assertThat(check.containsKey(4), is(true)); - - } - - @Test - public void testBulkComputeHappyPath() throws Exception { - OnHeapStore store = newStore(); - store.put(1, "one"); - - Map> result = store.bulkCompute(new HashSet(Arrays.asList(1, 2)), entries -> { - Map newValues = new HashMap<>(); - for (Map.Entry entry : entries) { - if(entry.getKey().intValue() == 1) { - newValues.put(entry.getKey(), "un"); - } else if (entry.getKey().intValue() == 2) { - newValues.put(entry.getKey(), "deux"); - } - } - return newValues.entrySet(); - }); - - assertThat(result.size(), is(2)); - assertThat(result.get(1).value(), Matchers.equalTo("un")); - assertThat(result.get(2).value(), Matchers.equalTo("deux")); - - assertThat(store.get(1).value(), Matchers.equalTo("un")); - assertThat(store.get(2).value(), Matchers.equalTo("deux")); - } - - @Test - public void testBulkComputeStoreRemovesValueWhenFunctionReturnsNullMappings() throws Exception { - Store.Configuration configuration = mockStoreConfig(); - - @SuppressWarnings("unchecked") - OnHeapStore store = new OnHeapStore(configuration, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - store.put(1, "one"); - store.put(2, "two"); - store.put(3, "three"); - - Map> result = store.bulkCompute(new HashSet(Arrays.asList(2, 1, 5)), entries -> { - Map newValues = new HashMap<>(); - for (Map.Entry entry : entries) { - newValues.put(entry.getKey(), null); - } - return newValues.entrySet(); - }); - - assertThat(result.size(), is(3)); - - assertThat(store.get(1), is(nullValue())); - assertThat(store.get(2), is(nullValue())); - assertThat(store.get(3).value(), Matchers.equalTo("three")); - assertThat(store.get(5), is(nullValue())); - } - - @Test - public void testBulkComputeRemoveNullValueEntriesFromFunctionReturn() throws Exception { - - OnHeapStore store = newStore(); - store.put(1, "one"); - store.put(2, "two"); - store.put(3, "three"); - - Map> result = store.bulkCompute(new HashSet(Arrays.asList(1, 2, 3)), entries -> { - Map result1 = new HashMap<>(); - for (Map.Entry entry : entries) { - if (entry.getKey().equals(1)) { - result1.put(entry.getKey(), null); - } else if (entry.getKey().equals(3)) { - result1.put(entry.getKey(), null); - } else { - result1.put(entry.getKey(), entry.getValue()); - } - } - return result1.entrySet(); - }); - - assertThat(result.size(), is(3)); - assertThat(result.get(1), is(nullValue())); - assertThat(result.get(2).value(), Matchers.equalTo("two")); - assertThat(result.get(3), is(nullValue())); - - assertThat(store.get(1),is(nullValue())); - assertThat(store.get(2).value(), Matchers.equalTo("two")); - assertThat(store.get(3),is(nullValue())); - - } - - @Test - public void testBulkComputeIfAbsentFunctionDoesNotGetPresentKeys() throws Exception { - - OnHeapStore store = newStore(); - store.put(1, "one"); - store.put(2, "two"); - store.put(3, "three"); - - Map> result = store.bulkComputeIfAbsent(new HashSet(Arrays.asList(1, 2, 3, 4, 5, 6)), keys -> { - Map result1 = new HashMap<>(); - - for (Number key : keys) { - if (key.equals(1)) { - fail(); - } else if (key.equals(2)) { - fail(); - } else if (key.equals(3)) { - fail(); - } else { - result1.put(key, null); - } - } - return result1.entrySet(); - }); - - assertThat(result.size(), is(6)); - assertThat(result.get(1).value(), Matchers.equalTo("one")); - assertThat(result.get(2).value(), Matchers.equalTo("two")); - assertThat(result.get(3).value(), Matchers.equalTo("three")); - assertThat(result.get(4), is(nullValue())); - assertThat(result.get(5), is(nullValue())); - assertThat(result.get(6), is(nullValue())); - - assertThat(store.get(1).value(), Matchers.equalTo("one")); - assertThat(store.get(2).value(), Matchers.equalTo("two")); - assertThat(store.get(3).value(), Matchers.equalTo("three")); - assertThat(store.get(4), is(nullValue())); - assertThat(store.get(5), is(nullValue())); - assertThat(store.get(6), is(nullValue())); - - - } - - @Test - public void testBulkComputeIfAbsentDoesNotOverridePresentKeys() throws Exception { - - OnHeapStore store = newStore(); - store.put(1, "one"); - store.put(2, "two"); - store.put(3, "three"); - - Map> result = store.bulkComputeIfAbsent(new HashSet(Arrays.asList(1, 2, 3, 4, 5, 6)), numbers -> { - Map result1 = new HashMap<>(); - for (Number key : numbers) { - if(key.equals(4)) { - result1.put(key, "quatre"); - } else if(key.equals(5)) { - result1.put(key, "cinq"); - } else if(key.equals(6)) { - result1.put(key, "six"); - } - } - return result1.entrySet(); - }); - - assertThat(result.size(), is(6)); - assertThat(result.get(1).value(), Matchers.equalTo("one")); - assertThat(result.get(2).value(), Matchers.equalTo("two")); - assertThat(result.get(3).value(), Matchers.equalTo("three")); - assertThat(result.get(4).value(), Matchers.equalTo("quatre")); - assertThat(result.get(5).value(), Matchers.equalTo("cinq")); - assertThat(result.get(6).value(), Matchers.equalTo("six")); - - assertThat(store.get(1).value(), Matchers.equalTo("one")); - assertThat(store.get(2).value(), Matchers.equalTo("two")); - assertThat(store.get(3).value(), Matchers.equalTo("three")); - assertThat(store.get(4).value(), Matchers.equalTo("quatre")); - assertThat(store.get(5).value(), Matchers.equalTo("cinq")); - assertThat(store.get(6).value(), Matchers.equalTo("six")); - } - - @Test - public void testBulkComputeIfAbsentDoNothingOnNullValues() throws Exception { - - OnHeapStore store = newStore(); - store.put(1, "one"); - store.put(2, "two"); - store.put(3, "three"); - - Map> result = store.bulkComputeIfAbsent(new HashSet(Arrays.asList(2, 1, 5)), numbers -> { - Map result1 = new HashMap<>(); - for (Number key : numbers) { - // 5 is a missing key, so it's the only key that is going passed to the function - if(key.equals(5)) { - result1.put(key, null); - } - } - Set numbersSet = new HashSet<>(); - for (Number number : numbers) { - numbersSet.add(number); - } - assertThat(numbersSet.size(), is(1)); - assertThat(numbersSet.iterator().next(), Matchers.equalTo(5)); - - return result1.entrySet(); - }); - - assertThat(result.size(), is(3)); - assertThat(result.get(2).value(), Matchers.equalTo("two")); - assertThat(result.get(1).value(), Matchers.equalTo("one")); - assertThat(result.get(5), is(nullValue())); - - assertThat(store.get(1).value(), Matchers.equalTo("one")); - assertThat(store.get(2).value(), Matchers.equalTo("two")); - assertThat(store.get(3).value(), Matchers.equalTo("three")); - assertThat(store.get(5), is(nullValue())); - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java deleted file mode 100755 index a6b9a3a5e3..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.heap; - -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePools; -import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.copy.IdentityCopier; -import org.ehcache.impl.internal.events.TestStoreEventDispatcher; -import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; -import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapValueHolder; -import org.ehcache.core.spi.time.SystemTimeSource; -import org.ehcache.core.spi.time.TimeSource; -import org.ehcache.internal.store.StoreFactory; -import org.ehcache.internal.store.StoreSPITest; -import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.service.ServiceConfiguration; -import org.junit.Before; - -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; - -/** - * Test the {@link org.ehcache.internal.store.heap.OnHeapStore} compliance to the - * {@link Store} contract. - * - * @author Aurelien Broszniowski - */ - -public class OnHeapStoreByRefSPITest extends StoreSPITest { - - private StoreFactory storeFactory; - - @Override - protected StoreFactory getStoreFactory() { - return storeFactory; - } - - @Before - public void setUp() { - storeFactory = new StoreFactory() { - - final Copier DEFAULT_COPIER = new IdentityCopier(); - - @Override - public Store newStore() { - return newStore(null, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); - } - - @Override - public Store newStoreWithCapacity(long capacity) { - return newStore(capacity, null, Expirations.noExpiration(), SystemTimeSource.INSTANCE); - } - - @Override - public Store newStoreWithExpiry(Expiry expiry, TimeSource timeSource) { - return newStore(null, null, expiry, timeSource); - } - - @Override - public Store newStoreWithEvictionAdvisor(EvictionAdvisor evictionAdvisor) { - return newStore(null, evictionAdvisor, Expirations.noExpiration(), SystemTimeSource.INSTANCE); - } - - @SuppressWarnings("unchecked") - private Store newStore(Long capacity, EvictionAdvisor evictionAdvisor, Expiry expiry, TimeSource timeSource) { - ResourcePools resourcePools = buildResourcePools(capacity); - Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), - evictionAdvisor, getClass().getClassLoader(), expiry, resourcePools, 0, null, null); - return new OnHeapStore(config, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, new NoopSizeOfEngine(), new TestStoreEventDispatcher<>()); - } - - @Override - @SuppressWarnings("unchecked") - public Store.ValueHolder newValueHolder(final String value) { - return new CopiedOnHeapValueHolder(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, DEFAULT_COPIER); - } - - private ResourcePools buildResourcePools(Comparable capacityConstraint) { - if (capacityConstraint == null) { - return newResourcePoolsBuilder().heap(Long.MAX_VALUE, EntryUnit.ENTRIES).build(); - } else { - return newResourcePoolsBuilder().heap((Long)capacityConstraint, EntryUnit.ENTRIES).build(); - } - } - - @Override - public Class getKeyType() { - return String.class; - } - - @Override - public Class getValueType() { - return String.class; - } - - @Override - public ServiceConfiguration[] getServiceConfigurations() { - return new ServiceConfiguration[0]; - } - - @Override - public String createKey(long seed) { - return new String("" + seed); - } - - @Override - public String createValue(long seed) { - return new String("" + seed); - } - - @Override - public void close(final Store store) { - OnHeapStore.Provider.close((OnHeapStore)store); - } - - @Override - public ServiceLocator getServiceProvider() { - ServiceLocator locator = dependencySet().build(); - try { - locator.startAllServices(); - } catch (Exception e) { - throw new RuntimeException(e); - } - return locator; - } - }; - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java deleted file mode 100644 index 9eddac0fc5..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.heap; - -import org.ehcache.config.ResourcePools; -import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.impl.copy.IdentityCopier; -import org.ehcache.core.events.NullStoreEventDispatcher; -import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; -import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapValueHolder; -import org.ehcache.core.spi.time.SystemTimeSource; -import org.ehcache.internal.tier.CachingTierFactory; -import org.ehcache.internal.tier.CachingTierSPITest; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.tiering.CachingTier; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.junit.Before; - -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; - -/** - * This factory instantiates a CachingTier - * - * @author Aurelien Broszniowski - */ -public class OnHeapStoreCachingTierByRefSPITest extends CachingTierSPITest { - - private CachingTierFactory cachingTierFactory; - - @Override - protected CachingTierFactory getCachingTierFactory() { - return cachingTierFactory; - } - - @Before - @SuppressWarnings("unchecked") - public void setUp() { - cachingTierFactory = new CachingTierFactory() { - - private final Copier DEFAULT_COPIER = new IdentityCopier(); - - @Override - public CachingTier newCachingTier() { - return newCachingTier(null); - } - - @Override - public CachingTier newCachingTier(long capacity) { - return newCachingTier((Long) capacity); - } - - private CachingTier newCachingTier(Long capacity) { - Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), null, - ClassLoader.getSystemClassLoader(), Expirations.noExpiration(), buildResourcePools(capacity), 0, null, null); - - return new OnHeapStore(config, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - } - - @Override - public Store.ValueHolder newValueHolder(final String value) { - return new CopiedOnHeapValueHolder(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, DEFAULT_COPIER); - } - - @Override - public Store.Provider newProvider() { - return new OnHeapStore.Provider(); - } - - private ResourcePools buildResourcePools(Comparable capacityConstraint) { - if (capacityConstraint == null) { - return newResourcePoolsBuilder().heap(Long.MAX_VALUE, EntryUnit.ENTRIES).build(); - } else { - return newResourcePoolsBuilder().heap((Long)capacityConstraint, EntryUnit.ENTRIES).build(); - } - } - - @Override - public Class getKeyType() { - return String.class; - } - - @Override - public Class getValueType() { - return String.class; - } - - @Override - public ServiceConfiguration[] getServiceConfigurations() { - return new ServiceConfiguration[0]; - } - - @Override - public String createKey(long seed) { - return new String("" + seed); - } - - @Override - public String createValue(long seed) { - return new String("" + seed); - } - - @Override - public void disposeOf(CachingTier tier) { - } - - @Override - public ServiceProvider getServiceProvider() { - return dependencySet().build(); - } - - }; - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java deleted file mode 100644 index 0885e675b8..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.heap; - -import org.ehcache.config.ResourcePools; -import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.impl.copy.SerializingCopier; -import org.ehcache.core.events.NullStoreEventDispatcher; -import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; -import org.ehcache.impl.internal.store.heap.holders.SerializedOnHeapValueHolder; -import org.ehcache.core.spi.time.SystemTimeSource; -import org.ehcache.impl.serialization.JavaSerializer; -import org.ehcache.internal.tier.CachingTierFactory; -import org.ehcache.internal.tier.CachingTierSPITest; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.tiering.CachingTier; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.junit.Before; - -import static java.lang.ClassLoader.getSystemClassLoader; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; - -/** - * This factory instantiates a CachingTier - * - * @author Aurelien Broszniowski - */ -public class OnHeapStoreCachingTierByValueSPITest extends CachingTierSPITest { - - private CachingTierFactory cachingTierFactory; - - @Override - protected CachingTierFactory getCachingTierFactory() { - return cachingTierFactory; - } - - @Before - public void setUp() { - cachingTierFactory = new CachingTierFactory() { - - final Serializer defaultSerializer = new JavaSerializer<>(getClass().getClassLoader()); - final Copier defaultCopier = new SerializingCopier<>(defaultSerializer); - - @Override - public CachingTier newCachingTier() { - return newCachingTier(null); - } - - @Override - public CachingTier newCachingTier(long capacity) { - return newCachingTier((Long) capacity); - } - - private CachingTier newCachingTier(Long capacity) { - Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), null, - ClassLoader.getSystemClassLoader(), Expirations.noExpiration(), buildResourcePools(capacity), 0, - new JavaSerializer<>(getSystemClassLoader()), new JavaSerializer<>(getSystemClassLoader())); - - return new OnHeapStore<>(config, SystemTimeSource.INSTANCE, defaultCopier, defaultCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - } - - @Override - public Store.ValueHolder newValueHolder(final String value) { - return new SerializedOnHeapValueHolder<>(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, defaultSerializer); - } - - @Override - public Store.Provider newProvider() { - Store.Provider service = new OnHeapStore.Provider(); - service.start(dependencySet().build()); - return service; - } - - private ResourcePools buildResourcePools(Comparable capacityConstraint) { - if (capacityConstraint == null) { - return newResourcePoolsBuilder().heap(Long.MAX_VALUE, EntryUnit.ENTRIES).build(); - } else { - return newResourcePoolsBuilder().heap((Long)capacityConstraint, EntryUnit.ENTRIES).build(); - } - } - - @Override - public Class getKeyType() { - return String.class; - } - - @Override - public Class getValueType() { - return String.class; - } - - @Override - public ServiceConfiguration[] getServiceConfigurations() { - return new ServiceConfiguration[0]; - } - - @Override - public String createKey(long seed) { - return new String("" + seed); - } - - @Override - public String createValue(long seed) { - return new String("" + seed); - } - - @Override - public void disposeOf(CachingTier tier) { - } - - @Override - public ServiceProvider getServiceProvider() { - return dependencySet().build(); - } - - }; - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java deleted file mode 100644 index e4624a3956..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.impl.internal.store.heap; - -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.core.spi.store.events.StoreEvent; -import org.ehcache.core.spi.store.events.StoreEventListener; -import org.ehcache.event.EventType; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.copy.IdentityCopier; -import org.ehcache.core.events.NullStoreEventDispatcher; -import org.ehcache.impl.internal.events.TestStoreEventDispatcher; -import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; -import org.ehcache.impl.internal.store.heap.holders.OnHeapValueHolder; -import org.ehcache.core.spi.time.SystemTimeSource; -import org.ehcache.core.spi.time.TimeSource; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.Store.ValueHolder; -import org.ehcache.internal.TestTimeSource; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.core.spi.store.heap.SizeOfEngine; -import org.junit.Test; - -import java.io.Serializable; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Semaphore; -import java.util.function.BiFunction; -import java.util.function.Function; - -import static org.ehcache.config.Eviction.noAdvice; -import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.is; - -public class OnHeapStoreEvictionTest { - - protected OnHeapStoreForTests newStore() { - return newStore(SystemTimeSource.INSTANCE, null); - } - - /** eviction tests : asserting the evict method is called **/ - - @Test - public void testComputeCalledEnforceCapacity() throws Exception { - OnHeapStoreForTests store = newStore(); - - store.put("key", "value"); - store.compute("key", (mappedKey, mappedValue) -> "value2"); - - assertThat(store.enforceCapacityWasCalled(), is(true)); - } - - @Test - public void testComputeIfAbsentCalledEnforceCapacity() throws Exception { - OnHeapStoreForTests store = newStore(); - - store.computeIfAbsent("key", mappedKey -> "value2"); - - assertThat(store.enforceCapacityWasCalled(), is(true)); - } - - @Test - public void testFaultsDoNotGetToEvictionAdvisor() throws StoreAccessException { - final Semaphore semaphore = new Semaphore(0); - - final OnHeapStoreForTests store = newStore(SystemTimeSource.INSTANCE, noAdvice()); - - ExecutorService executor = Executors.newCachedThreadPool(); - try { - executor.submit(() -> store.getOrComputeIfAbsent("prime", key -> { - semaphore.acquireUninterruptibly(); - return new OnHeapValueHolder(0, 0, false) { - @Override - public String value() { - return key; - } - }; - })); - - while (!semaphore.hasQueuedThreads()); - store.put("boom", "boom"); - } finally { - semaphore.release(1); - executor.shutdown(); - } - } - - @Test - public void testEvictionCandidateLimits() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - StoreConfigurationImpl configuration = new StoreConfigurationImpl<>( - String.class, String.class, noAdvice(), - getClass().getClassLoader(), Expirations.noExpiration(), heap(1).build(), 1, null, null); - TestStoreEventDispatcher eventDispatcher = new TestStoreEventDispatcher<>(); - final String firstKey = "daFirst"; - eventDispatcher.addEventListener(event -> { - if (event.getType().equals(EventType.EVICTED)) { - assertThat(event.getKey(), is(firstKey)); - } - }); - OnHeapStore store = new OnHeapStore<>(configuration, timeSource, - new IdentityCopier<>(), new IdentityCopier<>(), new NoopSizeOfEngine(), eventDispatcher); - timeSource.advanceTime(10000L); - store.put(firstKey, "daValue"); - timeSource.advanceTime(10000L); - store.put("other", "otherValue"); - } - - protected OnHeapStoreForTests newStore(final TimeSource timeSource, - final EvictionAdvisor evictionAdvisor) { - return new OnHeapStoreForTests<>(new Store.Configuration() { - @SuppressWarnings("unchecked") - @Override - public Class getKeyType() { - return (Class) String.class; - } - - @SuppressWarnings("unchecked") - @Override - public Class getValueType() { - return (Class) Serializable.class; - } - - @Override - public EvictionAdvisor getEvictionAdvisor() { - return evictionAdvisor; - } - - @Override - public ClassLoader getClassLoader() { - return getClass().getClassLoader(); - } - - @Override - public Expiry getExpiry() { - return Expirations.noExpiration(); - } - - @Override - public ResourcePools getResourcePools() { - return newResourcePoolsBuilder().heap(1, EntryUnit.ENTRIES).build(); - } - - @Override - public Serializer getKeySerializer() { - throw new AssertionError(); - } - - @Override - public Serializer getValueSerializer() { - throw new AssertionError(); - } - - @Override - public int getDispatcherConcurrency() { - return 1; - } - }, timeSource); - } - - public static class OnHeapStoreForTests extends OnHeapStore { - - private static final Copier DEFAULT_COPIER = new IdentityCopier(); - - @SuppressWarnings("unchecked") - public OnHeapStoreForTests(final Configuration config, final TimeSource timeSource) { - super(config, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - } - - @SuppressWarnings("unchecked") - public OnHeapStoreForTests(final Configuration config, final TimeSource timeSource, final SizeOfEngine engine) { - super(config, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, engine, NullStoreEventDispatcher.nullStoreEventDispatcher()); - } - - private boolean enforceCapacityWasCalled = false; - - @Override - protected void enforceCapacity() { - enforceCapacityWasCalled = true; - super.enforceCapacity(); - } - - boolean enforceCapacityWasCalled() { - return enforceCapacityWasCalled; - } - - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java deleted file mode 100644 index e47e8339db..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.heap; - -import org.ehcache.Cache; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.expiry.Expirations; -import org.ehcache.impl.copy.IdentityCopier; -import org.ehcache.core.events.NullStoreEventDispatcher; -import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; -import org.ehcache.core.spi.time.SystemTimeSource; -import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.copy.Copier; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Map; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; - -import static java.util.Collections.singleton; -import static java.util.Collections.singletonMap; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.sameInstance; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -/** - * OnHeapStoreValueCopierTest - */ -@RunWith(Parameterized.class) -public class OnHeapStoreValueCopierTest { - - private static final Long KEY = 42L; - public static final Value VALUE = new Value("TheAnswer"); - public static final Supplier NOT_REPLACE_EQUAL = () -> false; - public static final Supplier REPLACE_EQUAL = () -> true; - - @Parameterized.Parameters(name = "copyForRead: {0} - copyForWrite: {1}") - public static Collection config() { - return Arrays.asList(new Object[][] { - {false, false}, {false, true}, {true, false}, {true, true} - }); - } - - @Parameterized.Parameter(value = 0) - public boolean copyForRead; - - @Parameterized.Parameter(value = 1) - public boolean copyForWrite; - - private OnHeapStore store; - - @Before - public void setUp() { - Store.Configuration configuration = mock(Store.Configuration.class); - when(configuration.getResourcePools()).thenReturn(newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build()); - when(configuration.getKeyType()).thenReturn(Long.class); - when(configuration.getValueType()).thenReturn(Value.class); - when(configuration.getExpiry()).thenReturn(Expirations.noExpiration()); - @SuppressWarnings("unchecked") - Store.Configuration config = configuration; - - Copier valueCopier = new Copier() { - @Override - public Value copyForRead(Value obj) { - if (copyForRead) { - return new Value(obj.state); - } - return obj; - } - - @Override - public Value copyForWrite(Value obj) { - if (copyForWrite) { - return new Value(obj.state); - } - return obj; - } - }; - - store = new OnHeapStore<>(config, SystemTimeSource.INSTANCE, new IdentityCopier<>(), valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - } - - @Test - public void testPutAndGet() throws StoreAccessException { - store.put(KEY, VALUE); - - Store.ValueHolder firstStoreValue = store.get(KEY); - Store.ValueHolder secondStoreValue = store.get(KEY); - compareValues(VALUE, firstStoreValue.value()); - compareValues(VALUE, secondStoreValue.value()); - compareReadValues(firstStoreValue.value(), secondStoreValue.value()); - } - - @Test - public void testCompute() throws StoreAccessException { - final Store.ValueHolder firstValue = store.compute(KEY, (aLong, value) -> VALUE); - store.compute(KEY, (aLong, value) -> { - compareReadValues(value, firstValue.value()); - return value; - }); - - compareValues(VALUE, firstValue.value()); - } - - @Test - public void testComputeWithoutReplaceEqual() throws StoreAccessException { - final Store.ValueHolder firstValue = store.compute(KEY, (aLong, value) -> VALUE, NOT_REPLACE_EQUAL); - store.compute(KEY, (aLong, value) -> { - compareReadValues(value, firstValue.value()); - return value; - }, NOT_REPLACE_EQUAL); - - compareValues(VALUE, firstValue.value()); - } - - @Test - public void testComputeWithReplaceEqual() throws StoreAccessException { - final Store.ValueHolder firstValue = store.compute(KEY, (aLong, value) -> VALUE, REPLACE_EQUAL); - store.compute(KEY, (aLong, value) -> { - compareReadValues(value, firstValue.value()); - return value; - }, REPLACE_EQUAL); - - compareValues(VALUE, firstValue.value()); - } - - @Test - public void testComputeIfAbsent() throws StoreAccessException { - Store.ValueHolder computedValue = store.computeIfAbsent(KEY, aLong -> VALUE); - Store.ValueHolder secondComputedValue = store.computeIfAbsent(KEY, aLong -> { - fail("There should have been a mapping"); - return null; - }); - compareValues(VALUE, computedValue.value()); - compareReadValues(computedValue.value(), secondComputedValue.value()); - } - - @Test - public void testBulkCompute() throws StoreAccessException { - final Map> results = store.bulkCompute(singleton(KEY), entries -> singletonMap(KEY, VALUE).entrySet()); - store.bulkCompute(singleton(KEY), entries -> { - compareReadValues(results.get(KEY).value(), entries.iterator().next().getValue()); - return entries; - }); - compareValues(VALUE, results.get(KEY).value()); - } - - @Test - public void testBulkComputeWithoutReplaceEqual() throws StoreAccessException { - final Map> results = store.bulkCompute(singleton(KEY), entries -> singletonMap(KEY, VALUE).entrySet(), NOT_REPLACE_EQUAL); - store.bulkCompute(singleton(KEY), entries -> { - compareReadValues(results.get(KEY).value(), entries.iterator().next().getValue()); - return entries; - }, NOT_REPLACE_EQUAL); - compareValues(VALUE, results.get(KEY).value()); - } - - @Test - public void testBulkComputeWithReplaceEqual() throws StoreAccessException { - final Map> results = store.bulkCompute(singleton(KEY), entries -> singletonMap(KEY, VALUE).entrySet(), REPLACE_EQUAL); - store.bulkCompute(singleton(KEY), entries -> { - compareReadValues(results.get(KEY).value(), entries.iterator().next().getValue()); - return entries; - }, REPLACE_EQUAL); - compareValues(VALUE, results.get(KEY).value()); - } - - @Test - public void testBulkComputeIfAbsent() throws StoreAccessException { - Map> results = store.bulkComputeIfAbsent(singleton(KEY), longs -> singletonMap(KEY, VALUE).entrySet()); - Map> secondResults = store.bulkComputeIfAbsent(singleton(KEY), longs -> { - fail("There should have been a mapping!"); - return null; - }); - compareValues(VALUE, results.get(KEY).value()); - compareReadValues(results.get(KEY).value(), secondResults.get(KEY).value()); - } - - @Test - public void testIterator() throws StoreAccessException { - store.put(KEY, VALUE); - Store.Iterator>> iterator = store.iterator(); - assertThat(iterator.hasNext(), is(true)); - while (iterator.hasNext()) { - Cache.Entry> entry = iterator.next(); - compareValues(entry.getValue().value(), VALUE); - } - } - - private void compareValues(Value first, Value second) { - if (copyForRead || copyForWrite) { - assertThat(first, not(sameInstance(second))); - } else { - assertThat(first, sameInstance(second)); - } - } - - private void compareReadValues(Value first, Value second) { - if (copyForRead) { - assertThat(first, not(sameInstance(second))); - } else { - assertThat(first, sameInstance(second)); - } - } - - public static final class Value { - String state; - - public Value(String state) { - this.state = state; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Value value = (Value) o; - return state.equals(value.state); - } - - @Override - public int hashCode() { - return state.hashCode(); - } - } -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java deleted file mode 100644 index 7a232c2f89..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.heap.bytesized; - -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; -import org.ehcache.core.events.NullStoreEventDispatcher; -import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; -import org.ehcache.impl.internal.store.heap.OnHeapStore; -import org.ehcache.core.spi.time.SystemTimeSource; -import org.ehcache.core.spi.store.Store; -import org.hamcrest.Matchers; -import org.junit.Test; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.concurrent.ConcurrentMap; -import java.util.function.Function; - -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class OnHeapStoreBulkMethodsTest extends org.ehcache.impl.internal.store.heap.OnHeapStoreBulkMethodsTest { - - @SuppressWarnings("unchecked") - protected Store.Configuration mockStoreConfig() { - @SuppressWarnings("rawtypes") - Store.Configuration config = mock(Store.Configuration.class); - when(config.getExpiry()).thenReturn(Expirations.noExpiration()); - when(config.getKeyType()).thenReturn(Number.class); - when(config.getValueType()).thenReturn(CharSequence.class); - when(config.getResourcePools()).thenReturn(newResourcePoolsBuilder().heap(100, MemoryUnit.KB).build()); - return config; - } - - @SuppressWarnings("unchecked") - protected OnHeapStore newStore() { - Store.Configuration configuration = mockStoreConfig(); - return new OnHeapStore(configuration, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, - new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), NullStoreEventDispatcher.nullStoreEventDispatcher()); - } - - @SuppressWarnings("unchecked") - @Test - public void testBulkComputeFunctionGetsValuesOfEntries() throws Exception { - @SuppressWarnings("rawtypes") - Store.Configuration config = mock(Store.Configuration.class); - when(config.getExpiry()).thenReturn(Expirations.noExpiration()); - when(config.getKeyType()).thenReturn(Number.class); - when(config.getValueType()).thenReturn(Number.class); - when(config.getResourcePools()).thenReturn(newResourcePoolsBuilder().heap(100, MemoryUnit.KB).build()); - Store.Configuration configuration = config; - - OnHeapStore store = new OnHeapStore(configuration, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, - new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), NullStoreEventDispatcher.nullStoreEventDispatcher()); - store.put(1, 2); - store.put(2, 3); - store.put(3, 4); - - Map> result = store.bulkCompute(new HashSet(Arrays.asList(1, 2, 3, 4, 5, 6)), entries -> { - Map newValues = new HashMap<>(); - for (Map.Entry entry : entries) { - final Number currentValue = entry.getValue(); - if(currentValue == null) { - if(entry.getKey().equals(4)) { - newValues.put(entry.getKey(), null); - } else { - newValues.put(entry.getKey(), 0); - } - } else { - newValues.put(entry.getKey(), currentValue.intValue() * 2); - } - - } - return newValues.entrySet(); - }); - - ConcurrentMap check = new ConcurrentHashMap<>(); - check.put(1, 4); - check.put(2, 6); - check.put(3, 8); - check.put(4, 0); - check.put(5, 0); - check.put(6, 0); - - assertThat(result.get(1).value(), Matchers.is(check.get(1))); - assertThat(result.get(2).value(), Matchers.is(check.get(2))); - assertThat(result.get(3).value(), Matchers.is(check.get(3))); - assertThat(result.get(4), nullValue()); - assertThat(result.get(5).value(), Matchers.is(check.get(5))); - assertThat(result.get(6).value(), Matchers.is(check.get(6))); - - for (Number key : check.keySet()) { - final Store.ValueHolder holder = store.get(key); - if(holder != null) { - check.remove(key, holder.value()); - } - } - assertThat(check.size(), is(1)); - assertThat(check.containsKey(4), is(true)); - - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java deleted file mode 100644 index 076d998527..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.heap.bytesized; - -import org.ehcache.config.ResourcePools; -import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.impl.copy.IdentityCopier; -import org.ehcache.core.events.NullStoreEventDispatcher; -import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; -import org.ehcache.impl.internal.store.heap.OnHeapStore; -import org.ehcache.impl.internal.store.heap.holders.CopiedOnHeapValueHolder; -import org.ehcache.core.spi.time.SystemTimeSource; -import org.ehcache.internal.tier.CachingTierFactory; -import org.ehcache.internal.tier.CachingTierSPITest; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.tiering.CachingTier; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.junit.Before; - -import java.util.Arrays; - -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; - -public class OnHeapStoreCachingTierByRefSPITest extends CachingTierSPITest { - - private CachingTierFactory cachingTierFactory; - - @Override - protected CachingTierFactory getCachingTierFactory() { - return cachingTierFactory; - } - - @Before - @SuppressWarnings("unchecked") - public void setUp() { - cachingTierFactory = new CachingTierFactory() { - - private final Copier DEFAULT_COPIER = new IdentityCopier(); - - @Override - public CachingTier newCachingTier() { - return newCachingTier(null); - } - - @Override - public CachingTier newCachingTier(long capacity) { - return newCachingTier((Long) capacity); - } - - private CachingTier newCachingTier(Long capacity) { - Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), null, - ClassLoader.getSystemClassLoader(), Expirations.noExpiration(), buildResourcePools(capacity), 0, null, null); - - return new OnHeapStore(config, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, - new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), NullStoreEventDispatcher.nullStoreEventDispatcher()); - } - - @Override - public Store.ValueHolder newValueHolder(final String value) { - return new CopiedOnHeapValueHolder(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, DEFAULT_COPIER); - } - - @Override - public Store.Provider newProvider() { - return new OnHeapStore.Provider(); - } - - private ResourcePools buildResourcePools(Comparable capacityConstraint) { - if (capacityConstraint == null) { - capacityConstraint = 10L; - } - return newResourcePoolsBuilder().heap((Long)capacityConstraint, MemoryUnit.MB).build(); - } - - @Override - public Class getKeyType() { - return String.class; - } - - @Override - public Class getValueType() { - return String.class; - } - - @Override - public ServiceConfiguration[] getServiceConfigurations() { - return new ServiceConfiguration[0]; - } - - @Override - public String createKey(long seed) { - return Long.toString(seed); - } - - @Override - public String createValue(long seed) { - char[] chars = new char[600 * 1024]; - Arrays.fill(chars, (char) (0x1 + (seed & 0x7e))); - return new String(chars); - } - - @Override - public void disposeOf(CachingTier tier) { - } - - @Override - public ServiceProvider getServiceProvider() { - return dependencySet().build(); - } - - }; - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java deleted file mode 100644 index 9761e1b7d0..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.heap.bytesized; - -import org.ehcache.config.ResourcePools; -import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.impl.copy.SerializingCopier; -import org.ehcache.core.events.NullStoreEventDispatcher; -import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; -import org.ehcache.impl.internal.store.heap.OnHeapStore; -import org.ehcache.impl.internal.store.heap.holders.SerializedOnHeapValueHolder; -import org.ehcache.core.spi.time.SystemTimeSource; -import org.ehcache.impl.serialization.JavaSerializer; -import org.ehcache.internal.tier.CachingTierFactory; -import org.ehcache.internal.tier.CachingTierSPITest; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.tiering.CachingTier; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.junit.Before; - -import java.util.Arrays; - -import static java.lang.ClassLoader.getSystemClassLoader; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; - -public class OnHeapStoreCachingTierByValueSPITest extends CachingTierSPITest { - - private CachingTierFactory cachingTierFactory; - - @Override - protected CachingTierFactory getCachingTierFactory() { - return cachingTierFactory; - } - - @Before - public void setUp() { - cachingTierFactory = new CachingTierFactory() { - - final Serializer defaultSerializer = new JavaSerializer<>(getClass().getClassLoader()); - final Copier defaultCopier = new SerializingCopier<>(defaultSerializer); - - @Override - public CachingTier newCachingTier() { - return newCachingTier(null); - } - - @Override - public CachingTier newCachingTier(long capacity) { - return newCachingTier((Long) capacity); - } - - private CachingTier newCachingTier(Long capacity) { - Store.Configuration config = new StoreConfigurationImpl<>(getKeyType(), getValueType(), null, - ClassLoader.getSystemClassLoader(), Expirations.noExpiration(), buildResourcePools(capacity), 0, - new JavaSerializer<>(getSystemClassLoader()), new JavaSerializer<>(getSystemClassLoader())); - return new OnHeapStore<>(config, SystemTimeSource.INSTANCE, defaultCopier, defaultCopier, - new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE), NullStoreEventDispatcher.nullStoreEventDispatcher()); - } - - @Override - public Store.ValueHolder newValueHolder(final String value) { - return new SerializedOnHeapValueHolder<>(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, defaultSerializer); - } - - @Override - public Store.Provider newProvider() { - Store.Provider service = new OnHeapStore.Provider(); - service.start(dependencySet().build()); - return service; - } - - private ResourcePools buildResourcePools(Comparable capacityConstraint) { - if (capacityConstraint == null) { - capacityConstraint = 10L; - } - return newResourcePoolsBuilder().heap((Long)capacityConstraint, MemoryUnit.MB).build(); - } - - @Override - public Class getKeyType() { - return String.class; - } - - @Override - public Class getValueType() { - return String.class; - } - - @Override - public ServiceConfiguration[] getServiceConfigurations() { - return new ServiceConfiguration[0]; - } - - @Override - public String createKey(long seed) { - return Long.toString(seed); - } - - @Override - public String createValue(long seed) { - char[] chars = new char[600 * 1024]; - Arrays.fill(chars, (char) (0x1 + (seed & 0x7e))); - return new String(chars); - } - - @Override - public void disposeOf(CachingTier tier) { - } - - @Override - public ServiceProvider getServiceProvider() { - return dependencySet().build(); - } - - }; - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreEvictionTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreEvictionTest.java deleted file mode 100644 index 37abbe782c..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreEvictionTest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.impl.internal.store.heap.bytesized; - -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.core.spi.time.TimeSource; -import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; -import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.serialization.Serializer; - -import java.io.Serializable; - -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; - -public class OnHeapStoreEvictionTest extends org.ehcache.impl.internal.store.heap.OnHeapStoreEvictionTest { - - protected OnHeapStoreForTests newStore(final TimeSource timeSource, - final EvictionAdvisor evictionAdvisor) { - return new OnHeapStoreForTests<>(new Store.Configuration() { - @SuppressWarnings("unchecked") - @Override - public Class getKeyType() { - return (Class) String.class; - } - - @SuppressWarnings("unchecked") - @Override - public Class getValueType() { - return (Class) Serializable.class; - } - - @Override - public EvictionAdvisor getEvictionAdvisor() { - return evictionAdvisor; - } - - @Override - public ClassLoader getClassLoader() { - return getClass().getClassLoader(); - } - - @Override - public Expiry getExpiry() { - return Expirations.noExpiration(); - } - - @Override - public ResourcePools getResourcePools() { - return newResourcePoolsBuilder().heap(500, MemoryUnit.B).build(); - } - - @Override - public Serializer getKeySerializer() { - throw new AssertionError(); - } - - @Override - public Serializer getValueSerializer() { - throw new AssertionError(); - } - - @Override - public int getDispatcherConcurrency() { - return 0; - } - }, timeSource, new DefaultSizeOfEngine(Long.MAX_VALUE, Long.MAX_VALUE)); - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java deleted file mode 100644 index 6bed6257fa..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java +++ /dev/null @@ -1,687 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.offheap; - -import org.ehcache.Cache; -import org.ehcache.ValueSupplier; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.event.EventType; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.core.spi.time.TimeSource; -import org.ehcache.core.spi.store.AbstractValueHolder; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.events.StoreEvent; -import org.ehcache.core.spi.store.events.StoreEventListener; -import org.ehcache.core.spi.store.tiering.CachingTier; -import org.ehcache.core.statistics.LowerCachingTierOperationsOutcome; -import org.ehcache.core.statistics.StoreOperationOutcomes; -import org.hamcrest.Description; -import org.hamcrest.Matcher; -import org.hamcrest.Matchers; -import org.hamcrest.TypeSafeMatcher; -import org.junit.After; -import org.junit.Test; -import org.terracotta.context.ContextElement; -import org.terracotta.context.TreeNode; -import org.terracotta.context.query.QueryBuilder; -import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.StatisticsManager; - -import java.util.ArrayList; -import java.util.EnumSet; -import java.util.List; -import java.util.Random; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; - -import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; -import static org.ehcache.impl.internal.util.Matchers.valueHeld; -import static org.ehcache.impl.internal.util.StatisticsTestUtils.validateStats; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.lessThan; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.atLeast; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; - -/** - * - * @author cdennis - */ -public abstract class AbstractOffHeapStoreTest { - - private TestTimeSource timeSource = new TestTimeSource(); - private AbstractOffHeapStore offHeapStore; - - @After - public void after() { - if(offHeapStore != null) { - destroyStore(offHeapStore); - } - } - - @Test - public void testGetAndRemoveNoValue() throws Exception { - offHeapStore = createAndInitStore(timeSource, Expirations.noExpiration()); - - assertThat(offHeapStore.getAndRemove("1"), is(nullValue())); - validateStats(offHeapStore, EnumSet.of(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS)); - } - - @Test - public void testGetAndRemoveValue() throws Exception { - offHeapStore = createAndInitStore(timeSource, Expirations.noExpiration()); - - offHeapStore.put("1", "one"); - assertThat(offHeapStore.getAndRemove("1").value(), equalTo("one")); - validateStats(offHeapStore, EnumSet.of(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.HIT_REMOVED)); - assertThat(offHeapStore.get("1"), is(nullValue())); - } - - @Test - public void testGetAndRemoveExpiredElementReturnsNull() throws Exception { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToIdleExpiration(new Duration(15L, TimeUnit.MILLISECONDS))); - - assertThat(offHeapStore.getAndRemove("1"), is(nullValue())); - - offHeapStore.put("1", "one"); - - final AtomicReference> invalidated = new AtomicReference<>(); - offHeapStore.setInvalidationListener((key, valueHolder) -> invalidated.set(valueHolder)); - - timeSource.advanceTime(20); - assertThat(offHeapStore.getAndRemove("1"), is(nullValue())); - assertThat(invalidated.get().value(), equalTo("one")); - assertThat(invalidated.get().isExpired(timeSource.getTimeMillis(), TimeUnit.MILLISECONDS), is(true)); - assertThat(getExpirationStatistic(offHeapStore).count(StoreOperationOutcomes.ExpirationOutcome.SUCCESS), is(1L)); - } - - @Test - public void testInstallMapping() throws Exception { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToIdleExpiration(new Duration(15L, TimeUnit.MILLISECONDS))); - - assertThat(offHeapStore.installMapping("1", key -> new SimpleValueHolder<>("one", timeSource.getTimeMillis(), 15)).value(), equalTo("one")); - - validateStats(offHeapStore, EnumSet.of(LowerCachingTierOperationsOutcome.InstallMappingOutcome.PUT)); - - timeSource.advanceTime(20); - - try { - offHeapStore.installMapping("1", key -> new SimpleValueHolder<>("un", timeSource.getTimeMillis(), 15)); - fail("expected AssertionError"); - } catch (AssertionError ae) { - // expected - } - } - - @Test - public void testInvalidateKeyAbsent() throws Exception { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToIdleExpiration(new Duration(15L, TimeUnit.MILLISECONDS))); - - final AtomicReference> invalidated = new AtomicReference<>(); - offHeapStore.setInvalidationListener((key, valueHolder) -> invalidated.set(valueHolder)); - - offHeapStore.invalidate("1"); - assertThat(invalidated.get(), is(nullValue())); - validateStats(offHeapStore, EnumSet.of(LowerCachingTierOperationsOutcome.InvalidateOutcome.MISS)); - } - - @Test - public void testInvalidateKeyPresent() throws Exception { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToIdleExpiration(new Duration(15L, TimeUnit.MILLISECONDS))); - - offHeapStore.put("1", "one"); - - final AtomicReference> invalidated = new AtomicReference<>(); - offHeapStore.setInvalidationListener((key, valueHolder) -> invalidated.set(valueHolder)); - - offHeapStore.invalidate("1"); - assertThat(invalidated.get().value(), equalTo("one")); - validateStats(offHeapStore, EnumSet.of(LowerCachingTierOperationsOutcome.InvalidateOutcome.REMOVED)); - - assertThat(offHeapStore.get("1"), is(nullValue())); - } - - @Test - public void testClear() throws Exception { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToIdleExpiration(new Duration(15L, TimeUnit.MILLISECONDS))); - - offHeapStore.put("1", "one"); - offHeapStore.put("2", "two"); - offHeapStore.put("3", "three"); - offHeapStore.clear(); - - assertThat(offHeapStore.get("1"), is(nullValue())); - assertThat(offHeapStore.get("2"), is(nullValue())); - assertThat(offHeapStore.get("3"), is(nullValue())); - } - - @Test - public void testWriteBackOfValueHolder() throws StoreAccessException { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToIdleExpiration(new Duration(15L, TimeUnit.MILLISECONDS))); - - offHeapStore.put("key1", "value1"); - timeSource.advanceTime(10); - OffHeapValueHolder valueHolder = (OffHeapValueHolder)offHeapStore.get("key1"); - assertThat(valueHolder.lastAccessTime(TimeUnit.MILLISECONDS), is(10L)); - timeSource.advanceTime(10); - assertThat(offHeapStore.get("key1"), notNullValue()); - timeSource.advanceTime(16); - assertThat(offHeapStore.get("key1"), nullValue()); - } - - @Test - public void testEvictionAdvisor() throws StoreAccessException { - Expiry expiry = Expirations.timeToIdleExpiration(new Duration(15L, TimeUnit.MILLISECONDS)); - EvictionAdvisor evictionAdvisor = (key, value) -> true; - - performEvictionTest(timeSource, expiry, evictionAdvisor); - } - - @Test - public void testBrokenEvictionAdvisor() throws StoreAccessException { - Expiry expiry = Expirations.timeToIdleExpiration(new Duration(15L, TimeUnit.MILLISECONDS)); - EvictionAdvisor evictionAdvisor = (key, value) -> { - throw new UnsupportedOperationException("Broken advisor!"); - }; - - performEvictionTest(timeSource, expiry, evictionAdvisor); - } - - @Test - public void testFlushUpdatesAccessStats() throws StoreAccessException { - Expiry expiry = Expirations.timeToIdleExpiration(new Duration(15L, TimeUnit.MILLISECONDS)); - offHeapStore = createAndInitStore(timeSource, expiry); - try { - final String key = "foo"; - final String value = "bar"; - offHeapStore.put(key, value); - final Store.ValueHolder firstValueHolder = offHeapStore.getAndFault(key); - offHeapStore.put(key, value); - final Store.ValueHolder secondValueHolder = offHeapStore.getAndFault(key); - timeSource.advanceTime(10); - ((AbstractValueHolder) firstValueHolder).accessed(timeSource.getTimeMillis(), expiry.getExpiryForAccess(key, supplierOf(value))); - timeSource.advanceTime(10); - ((AbstractValueHolder) secondValueHolder).accessed(timeSource.getTimeMillis(), expiry.getExpiryForAccess(key, supplierOf(value))); - assertThat(offHeapStore.flush(key, new DelegatingValueHolder<>(firstValueHolder)), is(false)); - assertThat(offHeapStore.flush(key, new DelegatingValueHolder<>(secondValueHolder)), is(true)); - timeSource.advanceTime(10); // this should NOT affect - assertThat(offHeapStore.getAndFault(key).lastAccessTime(TimeUnit.MILLISECONDS), is(secondValueHolder.creationTime(TimeUnit.MILLISECONDS) + 20)); - } finally { - destroyStore(offHeapStore); - } - } - - @Test - public void testFlushUpdatesHits() throws StoreAccessException { - offHeapStore = createAndInitStore(timeSource, Expirations.noExpiration()); - final String key = "foo1"; - final String value = "bar1"; - offHeapStore.put(key, value); - for(int i = 0; i < 5; i++) { - final Store.ValueHolder valueHolder = offHeapStore.getAndFault(key); - timeSource.advanceTime(1); - ((AbstractValueHolder)valueHolder).accessed(timeSource.getTimeMillis(), new Duration(1L, TimeUnit.MILLISECONDS)); - assertThat(offHeapStore.flush(key, new DelegatingValueHolder<>(valueHolder)), is(true)); - } - assertThat(offHeapStore.getAndFault(key).hits(), is(5L)); - } - - @Test - public void testExpiryEventFiredOnExpiredCachedEntry() throws StoreAccessException { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToIdleExpiration(new Duration(10L, TimeUnit.MILLISECONDS))); - - final List expiredKeys = new ArrayList<>(); - offHeapStore.getStoreEventSource().addEventListener(event -> { - if (event.getType() == EventType.EXPIRED) { - expiredKeys.add(event.getKey()); - } - }); - - offHeapStore.put("key1", "value1"); - offHeapStore.put("key2", "value2"); - - offHeapStore.get("key1"); // Bring the entry to the caching tier - - timeSource.advanceTime(11); // Expire the elements - - offHeapStore.get("key1"); - offHeapStore.get("key2"); - assertThat(expiredKeys, containsInAnyOrder("key1", "key2")); - assertThat(getExpirationStatistic(offHeapStore).count(StoreOperationOutcomes.ExpirationOutcome.SUCCESS), is(2L)); - } - - @Test - public void testGetWithExpiryOnAccess() throws Exception { - offHeapStore = createAndInitStore(timeSource, Expirations.builder().setAccess(Duration.ZERO).build()); - offHeapStore.put("key", "value"); - final AtomicReference expired = new AtomicReference<>(); - offHeapStore.getStoreEventSource().addEventListener(event -> { - if (event.getType() == EventType.EXPIRED) { - expired.set(event.getKey()); - } - }); - assertThat(offHeapStore.get("key"), valueHeld("value")); - assertThat(expired.get(), is("key")); - } - - @Test - public void testExpiryCreateException() throws Exception{ - offHeapStore = createAndInitStore(timeSource, new Expiry() { - @Override - public Duration getExpiryForCreation(String key, String value) { - throw new RuntimeException(); - } - - @Override - public Duration getExpiryForAccess(String key, ValueSupplier value) { - throw new AssertionError(); - } - - @Override - public Duration getExpiryForUpdate(String key, ValueSupplier oldValue, String newValue) { - throw new AssertionError(); - } - }); - offHeapStore.put("key", "value"); - assertNull(offHeapStore.get("key")); - } - - @Test - public void testExpiryAccessException() throws Exception{ - offHeapStore = createAndInitStore(timeSource, new Expiry() { - @Override - public Duration getExpiryForCreation(String key, String value) { - return Duration.INFINITE; - } - - @Override - public Duration getExpiryForAccess(String key, ValueSupplier value) { - throw new RuntimeException(); - } - - @Override - public Duration getExpiryForUpdate(String key, ValueSupplier oldValue, String newValue) { - return null; - } - }); - - offHeapStore.put("key", "value"); - assertThat(offHeapStore.get("key"), valueHeld("value")); - assertNull(offHeapStore.get("key")); - } - - @Test - public void testExpiryUpdateException() throws Exception{ - offHeapStore = createAndInitStore(timeSource, new Expiry() { - @Override - public Duration getExpiryForCreation(String key, String value) { - return Duration.INFINITE; - } - - @Override - public Duration getExpiryForAccess(String key, ValueSupplier value) { - return Duration.INFINITE; - } - - @Override - public Duration getExpiryForUpdate(String key, ValueSupplier oldValue, String newValue) { - if (timeSource.getTimeMillis() > 0) { - throw new RuntimeException(); - } - return Duration.INFINITE; - } - }); - - offHeapStore.put("key", "value"); - assertThat(offHeapStore.get("key").value(), is("value")); - timeSource.advanceTime(1000); - offHeapStore.put("key", "newValue"); - assertNull(offHeapStore.get("key")); - } - - @Test - public void testGetAndFaultOnExpiredEntry() throws StoreAccessException { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToIdleExpiration(new Duration(10L, TimeUnit.MILLISECONDS))); - try { - offHeapStore.put("key", "value"); - timeSource.advanceTime(20L); - - Store.ValueHolder valueHolder = offHeapStore.getAndFault("key"); - assertThat(valueHolder, nullValue()); - assertThat(getExpirationStatistic(offHeapStore).count(StoreOperationOutcomes.ExpirationOutcome.SUCCESS), is(1L)); - } finally { - destroyStore(offHeapStore); - } - } - - @Test - public void testComputeExpiresOnAccess() throws StoreAccessException { - timeSource.advanceTime(1000L); - offHeapStore = createAndInitStore(timeSource, - Expirations.builder().setAccess(Duration.ZERO).setUpdate(Duration.ZERO).build()); - - offHeapStore.put("key", "value"); - Store.ValueHolder result = offHeapStore.compute("key", (s, s2) -> s2, () -> false); - - assertThat(result, valueHeld("value")); - } - - @Test - public void testComputeExpiresOnUpdate() throws StoreAccessException { - timeSource.advanceTime(1000L); - - offHeapStore = createAndInitStore(timeSource, - Expirations.builder().setAccess(Duration.ZERO).setUpdate(Duration.ZERO).build()); - - offHeapStore.put("key", "value"); - Store.ValueHolder result = offHeapStore.compute("key", (s, s2) -> "newValue", () -> false); - - assertThat(result, valueHeld("newValue")); - } - - @Test - public void testComputeOnExpiredEntry() throws StoreAccessException { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToIdleExpiration(new Duration(10L, TimeUnit.MILLISECONDS))); - - offHeapStore.put("key", "value"); - timeSource.advanceTime(20L); - - offHeapStore.compute("key", (mappedKey, mappedValue) -> { - assertThat(mappedKey, is("key")); - assertThat(mappedValue, Matchers.nullValue()); - return "value2"; - }); - assertThat(getExpirationStatistic(offHeapStore).count(StoreOperationOutcomes.ExpirationOutcome.SUCCESS), is(1L)); - } - - @Test - public void testComputeIfAbsentOnExpiredEntry() throws StoreAccessException { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToIdleExpiration(new Duration(10L, TimeUnit.MILLISECONDS))); - - offHeapStore.put("key", "value"); - timeSource.advanceTime(20L); - - offHeapStore.computeIfAbsent("key", mappedKey -> { - assertThat(mappedKey, is("key")); - return "value2"; - }); - assertThat(getExpirationStatistic(offHeapStore).count(StoreOperationOutcomes.ExpirationOutcome.SUCCESS), is(1L)); - } - - @Test - public void testIteratorDoesNotSkipOrExpiresEntries() throws Exception { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToLiveExpiration(new Duration(10L, TimeUnit.MILLISECONDS))); - - offHeapStore.put("key1", "value1"); - offHeapStore.put("key2", "value2"); - - timeSource.advanceTime(11L); - - offHeapStore.put("key3", "value3"); - offHeapStore.put("key4", "value4"); - - final List expiredKeys = new ArrayList<>(); - offHeapStore.getStoreEventSource().addEventListener(event -> { - if (event.getType() == EventType.EXPIRED) { - expiredKeys.add(event.getKey()); - } - }); - - List iteratedKeys = new ArrayList<>(); - Store.Iterator>> iterator = offHeapStore.iterator(); - while(iterator.hasNext()) { - iteratedKeys.add(iterator.next().getKey()); - } - - assertThat(iteratedKeys, containsInAnyOrder("key1", "key2", "key3", "key4")); - assertThat(expiredKeys.isEmpty(), is(true)); - } - - @Test - public void testIteratorWithSingleExpiredEntry() throws Exception { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToLiveExpiration(new Duration(10L, TimeUnit.MILLISECONDS))); - - offHeapStore.put("key1", "value1"); - - timeSource.advanceTime(11L); - - Store.Iterator>> iterator = offHeapStore.iterator(); - assertTrue(iterator.hasNext()); - assertThat(iterator.next().getKey(), equalTo("key1")); - assertFalse(iterator.hasNext()); - } - - @Test - public void testIteratorWithSingleNonExpiredEntry() throws Exception { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToLiveExpiration(new Duration(10L, TimeUnit.MILLISECONDS))); - - offHeapStore.put("key1", "value1"); - - timeSource.advanceTime(5L); - - Store.Iterator>> iterator = offHeapStore.iterator(); - assertTrue(iterator.hasNext()); - assertThat(iterator.next().getKey(), is("key1")); - } - - @Test - public void testIteratorOnEmptyStore() throws Exception { - offHeapStore = createAndInitStore(timeSource, Expirations.timeToLiveExpiration(new Duration(10L, TimeUnit.MILLISECONDS))); - - Store.Iterator>> iterator = offHeapStore.iterator(); - assertFalse(iterator.hasNext()); - } - - protected abstract AbstractOffHeapStore createAndInitStore(final TimeSource timeSource, final Expiry expiry); - - protected abstract AbstractOffHeapStore createAndInitStore(final TimeSource timeSource, final Expiry expiry, EvictionAdvisor evictionAdvisor); - - protected abstract void destroyStore(AbstractOffHeapStore store); - - private void performEvictionTest(TestTimeSource timeSource, Expiry expiry, EvictionAdvisor evictionAdvisor) throws StoreAccessException { - AbstractOffHeapStore offHeapStore = createAndInitStore(timeSource, expiry, evictionAdvisor); - try { - @SuppressWarnings("unchecked") - StoreEventListener listener = mock(StoreEventListener.class); - offHeapStore.getStoreEventSource().addEventListener(listener); - - byte[] value = getBytes(MemoryUnit.KB.toBytes(200)); - offHeapStore.put("key1", value); - offHeapStore.put("key2", value); - offHeapStore.put("key3", value); - offHeapStore.put("key4", value); - offHeapStore.put("key5", value); - offHeapStore.put("key6", value); - - Matcher> matcher = eventType(EventType.EVICTED); - verify(listener, atLeast(1)).onEvent(argThat(matcher)); - } finally { - destroyStore(offHeapStore); - } - } - - public static Matcher> eventType(final EventType type) { - return new TypeSafeMatcher>() { - @Override - protected boolean matchesSafely(StoreEvent item) { - return item.getType().equals(type); - } - - @Override - public void describeTo(Description description) { - description.appendText("store event of type '").appendValue(type).appendText("'"); - } - }; - } - - @SuppressWarnings("unchecked") - private OperationStatistic getExpirationStatistic(Store store) { - StatisticsManager statisticsManager = new StatisticsManager(); - statisticsManager.root(store); - TreeNode treeNode = statisticsManager.queryForSingleton(QueryBuilder.queryBuilder() - .descendants() - .filter(org.terracotta.context.query.Matchers.context( - org.terracotta.context.query.Matchers.allOf(org.terracotta.context.query.Matchers.identifier(org.terracotta.context.query.Matchers - .subclassOf(OperationStatistic.class)), - org.terracotta.context.query.Matchers.attributes(org.terracotta.context.query.Matchers.hasAttribute("name", "expiration"))))) - .build()); - return (OperationStatistic) treeNode.getContext().attributes().get("this"); - } - - private byte[] getBytes(long valueLength) { - assertThat(valueLength, lessThan((long) Integer.MAX_VALUE)); - int valueLengthInt = (int) valueLength; - byte[] value = new byte[valueLengthInt]; - new Random().nextBytes(value); - return value; - } - - private static class TestTimeSource implements TimeSource { - - private long time = 0; - - @Override - public long getTimeMillis() { - return time; - } - - public void advanceTime(long step) { - time += step; - } - } - - public static class DelegatingValueHolder implements Store.ValueHolder { - - private final Store.ValueHolder valueHolder; - - public DelegatingValueHolder(final Store.ValueHolder valueHolder) { - this.valueHolder = valueHolder; - } - - @Override - public T value() { - return valueHolder.value(); - } - - @Override - public long creationTime(final TimeUnit unit) { - return valueHolder.creationTime(unit); - } - - @Override - public long expirationTime(final TimeUnit unit) { - return valueHolder.expirationTime(unit); - } - - @Override - public boolean isExpired(final long expirationTime, final TimeUnit unit) { - return valueHolder.isExpired(expirationTime, unit); - } - - @Override - public long lastAccessTime(final TimeUnit unit) { - return valueHolder.lastAccessTime(unit); - } - - @Override - public float hitRate(final long now, final TimeUnit unit) { - return valueHolder.hitRate(now, unit); - } - - @Override - public long hits() { - return valueHolder.hits(); - } - - @Override - public long getId() { - return valueHolder.getId(); - } - } - - static class SimpleValueHolder extends AbstractValueHolder { - - private final T value; - - public SimpleValueHolder(T v, long creationTime, long expirationTime) { - super(-1, creationTime, expirationTime); - this.value = v; - } - - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } - - @Override - public T value() { - return value; - } - - @Override - public long creationTime(TimeUnit unit) { - return 0; - } - - @Override - public long expirationTime(TimeUnit unit) { - return 0; - } - - @Override - public boolean isExpired(long expirationTime, TimeUnit unit) { - return false; - } - - @Override - public long lastAccessTime(TimeUnit unit) { - return 0; - } - - @Override - public float hitRate(long now, TimeUnit unit) { - return 0; - } - - @Override - public long hits() { - return 0; - } - - @Override - public long getId() { - return 0; - } - } -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreLifecycleHelper.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreLifecycleHelper.java deleted file mode 100644 index 93f26c4a96..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreLifecycleHelper.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.impl.internal.store.offheap; - -/** - * @author Ludovic Orban - */ -public class OffHeapStoreLifecycleHelper { - - private OffHeapStoreLifecycleHelper() { - } - - public static void init(OffHeapStore offHeapStore) { - OffHeapStore.Provider.init(offHeapStore); - } - - public static void close(OffHeapStore offHeapStore) { - OffHeapStore.Provider.close(offHeapStore); - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapValueHolderPortabilityTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapValueHolderPortabilityTest.java deleted file mode 100644 index 7e0ad7cefc..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapValueHolderPortabilityTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.offheap; - -import org.ehcache.impl.internal.store.offheap.portability.OffHeapValueHolderPortability; -import org.ehcache.core.spi.store.AbstractValueHolder; -import org.ehcache.impl.internal.spi.serialization.DefaultSerializationProvider; -import org.ehcache.spi.serialization.SerializationProvider; -import org.ehcache.spi.serialization.UnsupportedTypeException; -import org.junit.Before; -import org.junit.Test; -import org.terracotta.offheapstore.storage.portability.WriteContext; - -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.nio.ByteBuffer; -import java.util.concurrent.TimeUnit; - -import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - -public class OffHeapValueHolderPortabilityTest { - - private OffHeapValueHolderPortability valueHolderPortability; - private OffHeapValueHolder originalValue; - - @Before - public void setup() throws UnsupportedTypeException { - SerializationProvider provider = new DefaultSerializationProvider(null); - provider.start(providerContaining()); - valueHolderPortability = new OffHeapValueHolderPortability<>(provider - .createValueSerializer(String.class, getClass().getClassLoader())); - - originalValue = new BasicOffHeapValueHolder<>(-1, "aValue", 1L, 2L, 3L, 0); - - } - - @Test - public void testEncodeDecode() { - ByteBuffer encoded = valueHolderPortability.encode(originalValue); - OffHeapValueHolder decoded = valueHolderPortability.decode(encoded); - - assertThat(originalValue, equalTo(decoded)); - } - - @Test - public void testWriteBackSupport() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { - ByteBuffer encoded = valueHolderPortability.encode(originalValue); - WriteContext writeContext = mock(WriteContext.class); - OffHeapValueHolder decoded = valueHolderPortability.decode(encoded, writeContext); - - Class abstractValueHolder = AbstractValueHolder.class; - Method setHits = abstractValueHolder.getDeclaredMethod("setHits", long.class); - setHits.setAccessible(true); - - decoded.setExpirationTime(4L, TimeUnit.MILLISECONDS); - decoded.setLastAccessTime(6L, TimeUnit.MILLISECONDS); - setHits.invoke(decoded, 8L); - - decoded.writeBack(); - verify(writeContext).setLong(OffHeapValueHolderPortability.ACCESS_TIME_OFFSET, 6L); - verify(writeContext).setLong(OffHeapValueHolderPortability.EXPIRE_TIME_OFFSET, 4L); - verify(writeContext).setLong(OffHeapValueHolderPortability.HITS_OFFSET, 8L); - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/Matchers.java b/impl/src/test/java/org/ehcache/impl/internal/util/Matchers.java deleted file mode 100644 index bd2b3564ff..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/util/Matchers.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.util; - -import org.ehcache.Cache; -import org.ehcache.ValueSupplier; -import org.ehcache.event.EventType; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.events.StoreEvent; -import org.hamcrest.Description; -import org.hamcrest.Matcher; -import org.hamcrest.TypeSafeMatcher; - -/** - * - * @author cdennis - */ -public class Matchers { - - public static Matcher> hasKey(final K key) { - return new TypeSafeMatcher>() { - - @Override - protected boolean matchesSafely(Cache item) { - return item.containsKey(key); - } - - @Override - public void describeTo(Description description) { - description.appendText("cache containing key '").appendValue(key).appendText("'"); - } - }; - } - - public static Matcher> hasEntry(final K key, final V value) { - return new TypeSafeMatcher>() { - - @Override - protected boolean matchesSafely(Cache item) { - return value.equals(item.get(key)); - } - - @Override - public void describeTo(Description description) { - description.appendText("cache containing entry {").appendValue(key).appendText(", ").appendValue(value).appendText("}"); - } - }; - } - - public static Matcher> valueHeld(final V value) { - return new TypeSafeMatcher>() { - @Override - protected boolean matchesSafely(Store.ValueHolder item) { - return item.value().equals(value); - } - - @Override - public void describeTo(Description description) { - description.appendText("value holder containing value '").appendValue(value).appendText("'"); - } - }; - } - - public static Matcher> holding(final V value) { - return new TypeSafeMatcher>() { - @Override - protected boolean matchesSafely(ValueSupplier item) { - return item.value().equals(value); - } - - @Override - public void describeTo(Description description) { - description.appendText("holder containing value '").appendValue(value).appendText("'"); - } - }; - } - - public static Matcher> eventOfType(final EventType type) { - return new TypeSafeMatcher>() { - @Override - protected boolean matchesSafely(StoreEvent item) { - return item.getType().equals(type); - } - - @Override - public void describeTo(Description description) { - description.appendText("event of type '").appendValue(type).appendText("'"); - } - }; - } -} diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/StatisticsTestUtils.java b/impl/src/test/java/org/ehcache/impl/internal/util/StatisticsTestUtils.java deleted file mode 100644 index 745cbd30cc..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/util/StatisticsTestUtils.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.util; - -import org.ehcache.core.spi.store.Store; -import org.hamcrest.Description; -import org.hamcrest.Factory; -import org.hamcrest.Matcher; -import org.hamcrest.TypeSafeMatcher; -import org.junit.Assert; -import org.terracotta.context.ContextManager; -import org.terracotta.context.TreeNode; -import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.ValueStatistic; - -import java.util.Arrays; -import java.util.EnumSet; -import java.util.List; - -/** - * StatisticsTestUtils - */ -public class StatisticsTestUtils { - /** - * Validates expected {@link org.terracotta.statistics.OperationStatistic} updates for the - * indicated {@code Ehcache} instance. The statistics identified in {@code changed} are - * checked for a value of {@code 1}; all other statistics in the same enumeration class are - * checked for a value of {@code 0}. - * - * @param store the store instance to check - * @param changed the statistics values that should have updated values - * @param the statistics enumeration type - */ - public static > void validateStats(final Store store, final EnumSet changed) { - assert changed != null; - final EnumSet unchanged = EnumSet.complementOf(changed); - - @SuppressWarnings("unchecked") - final List> sets = Arrays.asList(changed, unchanged); - Class statsClass = null; - for (final EnumSet set : sets) { - if (!set.isEmpty()) { - statsClass = set.iterator().next().getDeclaringClass(); - break; - } - } - assert statsClass != null; - - final OperationStatistic operationStatistic = getOperationStatistic(store, statsClass); - for (final E statId : changed) { - Assert.assertThat(String.format("Value for %s.%s", statId.getDeclaringClass().getName(), statId.name()), - getStatistic(operationStatistic, statId), StatisticMatcher.equalTo(1L)); - } - for (final E statId : unchanged) { - Assert.assertThat(String.format("Value for %s.%s", statId.getDeclaringClass().getName(), statId.name()), - getStatistic(operationStatistic, statId), StatisticMatcher.equalTo(0L)); - } - } - - public static > void validateStat(final Store store, E outcome, long count) { - OperationStatistic operationStatistic = getOperationStatistic(store, outcome.getDeclaringClass()); - Assert.assertThat(getStatistic(operationStatistic, outcome), StatisticMatcher.equalTo(count)); - } - - /** - * Gets the value of the statistic indicated from an {@link OperationStatistic} - * instance. - * - * @param operationStatistic the {@code OperationStatistic} instance from which the statistic is to - * be obtained - * @param statId the {@code Enum} constant identifying the statistic for which the value must be obtained - * @param The {@code Enum} type for the statistics - * - * @return the value, possibly null, for {@code statId} about {@code ehcache} - */ - private static > Number getStatistic(final OperationStatistic operationStatistic, final E statId) { - if (operationStatistic != null) { - final ValueStatistic valueStatistic = operationStatistic.statistic(statId); - return (valueStatistic == null ? null : valueStatistic.value()); - } - return null; - } - - /** - * Gets a reference to the {@link OperationStatistic} instance holding the - * class of statistics specified for the {@code Ehcache} instance provided. - * - * @param store the store instance for which the {@code OperationStatistic} instance - * should be obtained - * @param statsClass the {@code Class} of statistics for which the {@code OperationStatistic} instance - * should be obtained - * @param the {@code Enum} type for the statistics - * - * @return a reference to the {@code OperationStatistic} instance holding the {@code statsClass} statistics; - * may be {@code null} if {@code statsClass} statistics do not exist for {@code ehcache} - */ - private static > OperationStatistic getOperationStatistic(final Store store, final Class statsClass) { - for (final TreeNode statNode : ContextManager.nodeFor(store).getChildren()) { - final Object statObj = statNode.getContext().attributes().get("this"); - if (statObj instanceof OperationStatistic) { - @SuppressWarnings("unchecked") - final OperationStatistic statistic = (OperationStatistic)statObj; - if (statistic.type().equals(statsClass)) { - return statistic; - } - } - } - return null; - } - - /** - * Local {@code org.hamcrest.TypeSafeMatcher} implementation for testing - * {@code org.terracotta.statistics.OperationStatistic} values. - */ - private static final class StatisticMatcher extends TypeSafeMatcher { - - final Number expected; - - private StatisticMatcher(final Class expectedType, final Number expected) { - super(expectedType); - this.expected = expected; - } - - @Override - protected boolean matchesSafely(final Number value) { - if (value != null) { - return (value.longValue() == this.expected.longValue()); - } else { - return this.expected.longValue() == 0L; - } - } - - @Override - public void describeTo(final Description description) { - if (this.expected.longValue() == 0L) { - description.appendText("zero or null"); - } else { - description.appendValue(this.expected); - } - } - - @Factory - public static Matcher equalTo(final Number expected) { - return new StatisticMatcher(Number.class, expected); - } - } -} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java b/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java deleted file mode 100644 index c2562dfe11..0000000000 --- a/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.serialization; - -import org.ehcache.spi.serialization.StatefulSerializer; -import org.hamcrest.core.IsSame; -import org.junit.Assert; -import org.junit.Test; - -import java.io.Serializable; - -import static org.ehcache.impl.serialization.SerializerTestUtilities.createClassNameRewritingLoader; -import static org.ehcache.impl.serialization.SerializerTestUtilities.newClassName; -import static org.ehcache.impl.serialization.SerializerTestUtilities.popTccl; -import static org.ehcache.impl.serialization.SerializerTestUtilities.pushTccl; - -/** - * - * @author cdennis - */ -public class EnumTest { - - @Test - public void basicInstanceSerialization() throws ClassNotFoundException { - @SuppressWarnings("unchecked") - StatefulSerializer s = new CompactJavaSerializer(null); - s.init(new TransientStateRepository()); - - Assert.assertThat(s.read(s.serialize(People.Alice)), IsSame.sameInstance(People.Alice)); - Assert.assertThat(s.read(s.serialize(People.Bob)), IsSame.sameInstance(People.Bob)); - Assert.assertThat(s.read(s.serialize(People.Eve)), IsSame.sameInstance(People.Eve)); - } - - @Test - public void classSerialization() throws ClassNotFoundException { - @SuppressWarnings("unchecked") - StatefulSerializer s = new CompactJavaSerializer(null); - s.init(new TransientStateRepository()); - - Assert.assertThat(s.read(s.serialize(Enum.class)), IsSame.sameInstance(Enum.class)); - Assert.assertThat(s.read(s.serialize(Dogs.Handel.getClass())), IsSame.sameInstance(Dogs.Handel.getClass())); - Assert.assertThat(s.read(s.serialize(Dogs.Cassie.getClass())), IsSame.sameInstance(Dogs.Cassie.getClass())); - Assert.assertThat(s.read(s.serialize(Dogs.Penny.getClass())), IsSame.sameInstance(Dogs.Penny.getClass())); - } - - @Test - public void shiftingInstanceSerialization() throws ClassNotFoundException { - @SuppressWarnings("unchecked") - StatefulSerializer s = new CompactJavaSerializer(null); - s.init(new TransientStateRepository()); - - ClassLoader wLoader = createClassNameRewritingLoader(Foo_W.class); - ClassLoader rLoader = createClassNameRewritingLoader(Foo_R.class); - - Class wClass = wLoader.loadClass(newClassName(Foo_W.class)); - Class rClass = rLoader.loadClass(newClassName(Foo_R.class)); - - Object[] wInstances = wClass.getEnumConstants(); - Object[] rInstances = rClass.getEnumConstants(); - - pushTccl(rLoader); - try { - for (int i = 0; i < wInstances.length; i++) { - Assert.assertThat(s.read(s.serialize((Serializable) wInstances[i])), IsSame.sameInstance(rInstances[i])); - } - } finally { - popTccl(); - } - } - - public static enum Foo_W { a, b, c { int i = 5; }, d { float f = 5.0f; } } - public static enum Foo_R { a, b { byte b = 3; }, c, d { double d = 6.0; } } -} - -enum People { Alice, Bob, Eve } -enum Dogs { Handel, Cassie { int i = 0; }, Penny { double d = 3.0; } } diff --git a/impl/src/test/java/org/ehcache/impl/serialization/JavaSerializer.java b/impl/src/test/java/org/ehcache/impl/serialization/JavaSerializer.java deleted file mode 100644 index 53064bd045..0000000000 --- a/impl/src/test/java/org/ehcache/impl/serialization/JavaSerializer.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.impl.serialization; - -import org.ehcache.spi.serialization.SerializerException; -import org.ehcache.impl.internal.util.ByteBufferInputStream; -import org.ehcache.spi.serialization.Serializer; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.ObjectStreamClass; -import java.lang.reflect.Proxy; -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Map; - -/** - * - * @author cdennis - */ -public class JavaSerializer implements Serializer { - - private final ClassLoader classLoader; - - public JavaSerializer(ClassLoader classLoader) { - this.classLoader = classLoader; - } - - @Override - public ByteBuffer serialize(T object) { - ByteArrayOutputStream bout = new ByteArrayOutputStream(); - try { - ObjectOutputStream oout = new ObjectOutputStream(bout); - oout.writeObject(object); - } catch (IOException e) { - throw new SerializerException(e); - } finally { - try { - bout.close(); - } catch (IOException e) { - throw new AssertionError(e); - } - } - return ByteBuffer.wrap(bout.toByteArray()); - } - - @SuppressWarnings("unchecked") - @Override - public T read(ByteBuffer entry) throws SerializerException, ClassNotFoundException { - ByteBufferInputStream bin = new ByteBufferInputStream(entry); - try { - try (OIS ois = new OIS(bin, classLoader)) { - return (T) ois.readObject(); - } - } catch (IOException e) { - throw new SerializerException(e); - } finally { - try { - bin.close(); - } catch (IOException e) { - throw new AssertionError(e); - } - } - } - - @Override - public boolean equals(T object, ByteBuffer binary) throws SerializerException, ClassNotFoundException { - return object.equals(read(binary)); - } - - private static class OIS extends ObjectInputStream { - - private final ClassLoader classLoader; - - public OIS(InputStream in, ClassLoader classLoader) throws IOException { - super(in); - this.classLoader = classLoader; - } - - @Override - protected Class resolveClass(ObjectStreamClass desc) throws IOException, ClassNotFoundException { - try { - return Class.forName(desc.getName(), false, classLoader); - } catch (ClassNotFoundException cnfe) { - Class primitive = primitiveClasses.get(desc.getName()); - if (primitive != null) { - return primitive; - } - throw cnfe; - } - } - - @Override - protected Class resolveProxyClass(String[] interfaces) throws IOException, ClassNotFoundException { - Class[] interfaceClasses = new Class[interfaces.length]; - for (int i = 0; i < interfaces.length; i++) { - interfaceClasses[i] = Class.forName(interfaces[i], false, classLoader); - } - - return Proxy.getProxyClass(classLoader, interfaceClasses); - } - - private static final Map> primitiveClasses = new HashMap<>(); - static { - primitiveClasses.put("boolean", boolean.class); - primitiveClasses.put("byte", byte.class); - primitiveClasses.put("char", char.class); - primitiveClasses.put("double", double.class); - primitiveClasses.put("float", float.class); - primitiveClasses.put("int", int.class); - primitiveClasses.put("long", long.class); - primitiveClasses.put("short", short.class); - primitiveClasses.put("void", void.class); - } - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/SerializerTestUtilities.java b/impl/src/test/java/org/ehcache/impl/serialization/SerializerTestUtilities.java deleted file mode 100644 index e3a0595c59..0000000000 --- a/impl/src/test/java/org/ehcache/impl/serialization/SerializerTestUtilities.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.serialization; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Collections; -import java.util.Deque; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.Map; -import java.util.Map.Entry; - -import org.objectweb.asm.ClassReader; -import org.objectweb.asm.ClassVisitor; -import org.objectweb.asm.ClassWriter; -import org.objectweb.asm.commons.Remapper; -import org.objectweb.asm.commons.RemappingClassAdapter; - -/** - * - * @author cdennis - */ -public final class SerializerTestUtilities { - - private SerializerTestUtilities() { - //no instances please - } - - public static ClassLoader createClassNameRewritingLoader(Class initial, Class ... more) { - ClassLoader loader = initial.getClassLoader(); - Map remapping = new HashMap<>(); - remapping.putAll(createRemappings(initial)); - for (Class klazz : more) { - remapping.putAll(createRemappings(klazz)); - } - return new RewritingClassloader(loader, remapping); - } - - private static Map createRemappings(Class initial) { - HashMap remappings = new HashMap<>(); - remappings.put(initial.getName(), newClassName(initial)); - for (Class inner : initial.getDeclaredClasses()) { - remappings.put(inner.getName(), newClassName(inner)); - } - if (initial.isEnum()) { - for (Object e : initial.getEnumConstants()) { - Class eClass = e.getClass(); - if (eClass != initial) { - remappings.put(eClass.getName(), newClassName(eClass)); - } - } - } - return remappings; - } - - public static String newClassName(Class initial) { - String initialName = initial.getName(); - int lastUnderscore = initialName.lastIndexOf('_'); - if (lastUnderscore == -1) { - return initialName; - } else { - int nextDollar = initialName.indexOf('$', lastUnderscore); - if (nextDollar == -1) { - return initialName.substring(0, lastUnderscore); - } else { - return initialName.substring(0, lastUnderscore).concat(initialName.substring(nextDollar)); - } - } - } - - private static final ThreadLocal> tcclStacks = new ThreadLocal>() { - @Override - protected Deque initialValue() { - return new LinkedList<>(); - } - }; - - public static void pushTccl(ClassLoader loader) { - tcclStacks.get().push(Thread.currentThread().getContextClassLoader()); - Thread.currentThread().setContextClassLoader(loader); - } - - public static void popTccl() { - Thread.currentThread().setContextClassLoader(tcclStacks.get().pop()); - } - - static class RewritingClassloader extends ClassLoader { - - private final Map remappings; - - RewritingClassloader(ClassLoader parent, Map remappings) { - super(parent); - this.remappings = Collections.unmodifiableMap(new HashMap<>(remappings)); - } - - @Override - protected synchronized Class loadClass(String name, boolean resolve) throws ClassNotFoundException { - Class c = findLoadedClass(name); - if (c == null) { - if (remappings.containsValue(name)) { - c = findClass(name); - if (resolve) { - resolveClass(c); - } - } else { - return super.loadClass(name, resolve); - } - } - return c; - } - - - @Override - protected Class findClass(String name) throws ClassNotFoundException { - for (Entry mapping : remappings.entrySet()) { - if (name.equals(mapping.getValue())) { - String path = mapping.getKey().replace('.', '/').concat(".class"); - try { - try (InputStream resource = getResourceAsStream(path)) { - ClassReader reader = new ClassReader(resource); - - ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS); - ClassVisitor visitor = new RemappingClassAdapter(writer, new Remapper() { - - @Override - public String map(String from) { - String to = remappings.get(from.replace('/', '.')); - if (to == null) { - return from; - } else { - return to.replace('.', '/'); - } - } - }); - - reader.accept(visitor, ClassReader.EXPAND_FRAMES); - byte[] classBytes = writer.toByteArray(); - - return defineClass(name, classBytes, 0, classBytes.length); - } - } catch (IOException e) { - throw new ClassNotFoundException("IOException while loading", e); - } - } - } - return super.findClass(name); - } - } -} diff --git a/integration-test/.gitignore b/integration-test/.gitignore deleted file mode 100644 index ae3c172604..0000000000 --- a/integration-test/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/bin/ diff --git a/integration-test/build.gradle b/integration-test/build.gradle index b3c84c2b3f..876994e798 100644 --- a/integration-test/build.gradle +++ b/integration-test/build.gradle @@ -14,13 +14,21 @@ * limitations under the License. */ -dependencies { - compile project(':impl'), "org.slf4j:slf4j-api:$parent.slf4jVersion", "javax.cache:cache-api:$parent.jcacheVersion" - testCompile project(':107'), project(':impl'), project(':transactions') +plugins { + id 'org.ehcache.build.conventions.java' } -test { - if (testJava.javaVersion.isJava9Compatible()) { - jvmArgs += ['--add-modules', 'java.xml.bind'] +dependencies { + testImplementation "javax.cache:cache-api:$parent.jcacheVersion" + testImplementation project(':ehcache-107') + testImplementation project(':ehcache-impl') + testImplementation(project(':ehcache-transactions')) { + capabilities { + requireCapability('org.ehcache:ehcache-transactions-modules') + } + } + testImplementation (group: 'org.codehaus.btm', name: 'btm', version: '2.1.4') { + exclude group:'org.slf4j', module:'slf4j-api' } + testImplementation "org.terracotta:statistics:$parent.statisticVersion" } diff --git a/integration-test/gradle.properties b/integration-test/gradle.properties deleted file mode 100644 index 8a8b81c0c3..0000000000 --- a/integration-test/gradle.properties +++ /dev/null @@ -1,2 +0,0 @@ -subPomName = Ehcache 3 Integration Tests module -subPomDesc = The integration tests module of Ehcache 3 diff --git a/integration-test/src/test/java/org/ehcache/docs/GettingStartedWithStaticImports.java b/integration-test/src/test/java/org/ehcache/docs/GettingStartedWithStaticImports.java index 43e37a4c7b..9efb5abfbe 100644 --- a/integration-test/src/test/java/org/ehcache/docs/GettingStartedWithStaticImports.java +++ b/integration-test/src/test/java/org/ehcache/docs/GettingStartedWithStaticImports.java @@ -32,6 +32,7 @@ public class GettingStartedWithStaticImports { @Test + @SuppressWarnings("try") public void cachemanagerExample() { // tag::java7Example[] try(CacheManager cacheManager = newCacheManagerBuilder() // <1> diff --git a/integration-test/src/test/java/org/ehcache/docs/Performance.java b/integration-test/src/test/java/org/ehcache/docs/Performance.java new file mode 100644 index 0000000000..7ff07712b6 --- /dev/null +++ b/integration-test/src/test/java/org/ehcache/docs/Performance.java @@ -0,0 +1,51 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.docs; + +import org.ehcache.expiry.ExpiryPolicy; +import org.junit.Test; + +import java.time.Duration; +import java.util.function.Supplier; + +/** + * Samples showing performance strategies. + */ +public class Performance { + + @Test + public void expiryAllocation() { + // tag::expiryAllocation[] + new ExpiryPolicy() { + @Override + public Duration getExpiryForCreation(Object key, Object value) { + return null; + } + + @Override + public Duration getExpiryForAccess(Object key, Supplier value) { + return Duration.ofSeconds(10); // <1> + } + + @Override + public Duration getExpiryForUpdate(Object key, Supplier oldValue, Object newValue) { + return null; + } + }; + // end::expiryAllocation[] + } +} diff --git a/integration-test/src/test/java/org/ehcache/integration/CacheCopierTest.java b/integration-test/src/test/java/org/ehcache/integration/CacheCopierTest.java index bb7b2e2d5d..400d032a43 100644 --- a/integration-test/src/test/java/org/ehcache/integration/CacheCopierTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/CacheCopierTest.java @@ -38,10 +38,10 @@ import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertThat; /** * Created by alsu on 01/09/15. @@ -66,7 +66,7 @@ public void tearDown() throws Exception { @Test public void testCopyValueOnRead() throws Exception { CacheConfiguration cacheConfiguration = baseConfig - .add(new DefaultCopierConfiguration<>(PersonOnReadCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .withService(new DefaultCopierConfiguration<>(PersonOnReadCopier.class, DefaultCopierConfiguration.Type.VALUE)) .build(); Cache cache = cacheManager.createCache("cache", cacheConfiguration); @@ -89,7 +89,7 @@ public void testCopyValueOnRead() throws Exception { @Test public void testCopyValueOnWrite() throws Exception { CacheConfiguration cacheConfiguration = baseConfig - .add(new DefaultCopierConfiguration<>(PersonOnWriteCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .withService(new DefaultCopierConfiguration<>(PersonOnWriteCopier.class, DefaultCopierConfiguration.Type.VALUE)) .build(); Cache cache = cacheManager.createCache("cache", cacheConfiguration); @@ -131,8 +131,8 @@ public void testIdentityCopier() throws Exception { @Test public void testSerializingCopier() throws Exception { CacheConfiguration cacheConfiguration = baseConfig - .add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) - .add(new DefaultSerializerConfiguration<>(PersonSerializer.class, DefaultSerializerConfiguration.Type.VALUE)) + .withService(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) + .withService(new DefaultSerializerConfiguration<>(PersonSerializer.class, DefaultSerializerConfiguration.Type.VALUE)) .build(); Cache cache = cacheManager.createCache("cache", cacheConfiguration); @@ -155,7 +155,7 @@ public void testSerializingCopier() throws Exception { @Test public void testReadWriteCopier() throws Exception { CacheConfiguration cacheConfiguration = baseConfig - .add(new DefaultCopierConfiguration<>(PersonCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .withService(new DefaultCopierConfiguration<>(PersonCopier.class, DefaultCopierConfiguration.Type.VALUE)) .build(); Cache cache = cacheManager.createCache("cache", cacheConfiguration); @@ -210,6 +210,9 @@ public int hashCode() { } private static class Person implements Serializable { + + private static final long serialVersionUID = 1L; + String name; int age; diff --git a/integration-test/src/test/java/org/ehcache/integration/EhcacheBaseTest.java b/integration-test/src/test/java/org/ehcache/integration/EhcacheBaseTest.java new file mode 100644 index 0000000000..829ffaec15 --- /dev/null +++ b/integration-test/src/test/java/org/ehcache/integration/EhcacheBaseTest.java @@ -0,0 +1,324 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.integration; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Configuration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ConfigurationBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.core.EhcacheManager; +import org.ehcache.core.spi.service.StatisticsService; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.impl.internal.TimeSourceConfiguration; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; +import org.ehcache.integration.statistics.AbstractCacheCalculationTest; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.service.Service; +import org.junit.After; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.io.IOException; +import java.time.Duration; +import java.util.Collection; +import java.util.Collections; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatNullPointerException; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +/** + * @author Henri Tremblay + */ +@RunWith(Parameterized.class) +public class EhcacheBaseTest extends AbstractCacheCalculationTest { + + private CacheManager cacheManager; + + private Cache cache; + + private final StatisticsService statisticsService = new DefaultStatisticsService(); + + private final TestTimeSource timeSource = new TestTimeSource(); + + public EhcacheBaseTest(ResourcePoolsBuilder poolBuilder) { + super(poolBuilder); + } + + @After + public void after() { + if (cacheManager != null) { + cacheManager.close(); + } + } + + private void createCacheManager(CacheManagerBuilder builder) { + cacheManager = builder + .build(true); + } + + private void createNotAtomicCacheManager() throws IOException { + Configuration config = ConfigurationBuilder.newConfigurationBuilder() + .withService(new TimeSourceConfiguration(timeSource)) + .withService(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(); + + Collection services = Collections.singleton(statisticsService); + cacheManager = new EhcacheManager(config, services, false); + cacheManager.init(); + } + + private void createCacheManager() { + createCacheManager(baseCacheManagerConfig()); + } + + private CacheManagerBuilder baseCacheManagerConfig() { + try { + return CacheManagerBuilder.newCacheManagerBuilder() + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .using(statisticsService) + .using(new TimeSourceConfiguration(timeSource)); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private Cache createCache() { + return createCache(baseConfig()); + } + + private Cache createCache(CacheConfigurationBuilder config) { + Cache cache = cacheManager.createCache("cache", config); + cacheStatistics = statisticsService.getCacheStatistics("cache"); + return cache; + } + + private CacheConfigurationBuilder baseConfig() { + return CacheConfigurationBuilder.newCacheConfigurationBuilder(Integer.class, String.class, resources); + } + + @Test + public void putIfAbsent_absent() { + createCacheManager(); + + cache = createCache(); + + assertThat(cache.putIfAbsent(1, "a")).isNull(); + + assertThat(cache.get(1)).isEqualTo("a"); + + changesOf(1, 1, 1, 0); + } + + @Test + public void putIfAbsent_present() { + createCacheManager(); + + cache = createCache(); + + cache.put(1, "a"); + + assertThat(cache.putIfAbsent(1, "b")).isEqualTo("a"); + + changesOf(1, 0, 1, 0); + } + + @Test + public void putIfAbsent_presentButExpired() { + createCacheManager(); + + CacheConfigurationBuilder builder = baseConfig() + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(10))); + cache = createCache(builder); + + cache.put(1, "a"); + + timeSource.advanceTime(15); + + assertThat(cache.putIfAbsent(1, "b")).isNull(); + + assertThat(cache.get(1)).isEqualTo("b"); + + changesOf(1, 1, 2, 0); + } + + @Test + public void putIfAbsent_absentPutNull() { + createCacheManager(); + + cache = createCache(); + + assertThatNullPointerException().isThrownBy(() -> cache.putIfAbsent(1, null)); + + changesOf(0, 0, 0, 0); + } + + @Test + public void putIfAbsentLoaderWriter_absentAndLoaded() throws Exception { + createCacheManager(); + + CacheLoaderWriter loader = mockLoader(); + when(loader.load(1)).thenReturn("a"); + + CacheConfigurationBuilder builder = baseConfig() + .withLoaderWriter(loader); + cache = createCache(builder); + + assertThat(cache.putIfAbsent(1, "b")).isEqualTo("a"); + + assertThat(cache.get(1)).isEqualTo("a"); + + changesOf(2, 0, 0, 0); + } + + @Test + public void putIfAbsentLoaderWriter_absentAndNotLoaded() throws Exception { + createCacheManager(); + + CacheLoaderWriter loader = mockLoader(); + when(loader.load(1)).thenReturn(null); + + CacheConfigurationBuilder builder = baseConfig() + .withLoaderWriter(loader); + cache = createCache(builder); + + assertThat(cache.putIfAbsent(1, "b")).isNull(); + + verify(loader).write(1, "b"); + + assertThat(cache.get(1)).isEqualTo("b"); + + changesOf(1, 1, 1, 0); + } + + @Test + public void putIfAbsentLoaderWriter_present() throws Exception { + createCacheManager(); + + CacheLoaderWriter loader = mockLoader(); + + CacheConfigurationBuilder builder = baseConfig() + .withLoaderWriter(loader); + cache = createCache(builder); + + cache.put(1, "a"); + + assertThat(cache.putIfAbsent(1, "b")).isEqualTo("a"); + + verify(loader).write(1, "a"); + + changesOf(1, 0, 1, 0); + } + + @Test + public void putIfAbsentLoaderWriter_presentButExpiredAndLoaded() throws Exception { + createCacheManager(); + + CacheLoaderWriter loader = mockLoader(); + when(loader.load(1)).thenReturn("c"); + + CacheConfigurationBuilder builder = baseConfig() + .withLoaderWriter(loader) + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(10))); + cache = createCache(builder); + + cache.put(1, "a"); + + timeSource.advanceTime(15); + + assertThat(cache.putIfAbsent(1, "b")).isEqualTo("c"); + + verify(loader).write(1, "a"); + + changesOf(1, 0, 1, 0); + } + + @Test + public void putIfAbsentLoaderWriter_presentButExpiredAndNotLoaded() throws Exception { + createCacheManager(); + + CacheLoaderWriter loader = mockLoader(); + when(loader.load(1)).thenReturn(null); + + CacheConfigurationBuilder builder = baseConfig() + .withLoaderWriter(loader) + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(10))); + cache = createCache(builder); + + cache.put(1, "a"); + + timeSource.advanceTime(15); + + assertThat(cache.putIfAbsent(1, "b")).isNull(); + + verify(loader).write(1, "b"); + + changesOf(0, 1, 2, 0); + } + + @Test + public void putIfAbsentLoaderWriterNotAtomic_absent() throws Exception { + createNotAtomicCacheManager(); + + CacheLoaderWriter loader = mockLoader(); + + CacheConfigurationBuilder builder = baseConfig() + .withLoaderWriter(loader); + cache = createCache(builder); + + assertThat(cache.putIfAbsent(1, "a")).isNull(); + + verify(loader).write(1, "a"); + + assertThat(cache.get(1)).isEqualTo("a"); + + changesOf(1, 1, 1, 0); + } + + @Test + public void putIfAbsentLoaderWriterNotAtomic_present() throws Exception { + createNotAtomicCacheManager(); + + CacheLoaderWriter loader = mockLoader(); + + CacheConfigurationBuilder builder = baseConfig() + .withLoaderWriter(loader); + cache = createCache(builder); + + cache.put(1, "a"); + + assertThat(cache.putIfAbsent(1, "b")).isEqualTo("a"); + + verify(loader).write(1, "a"); + verifyNoMoreInteractions(loader); + + changesOf(1, 0, 1, 0); + } + + @SuppressWarnings("unchecked") + private static CacheLoaderWriter mockLoader() { + return mock(CacheLoaderWriter.class); + } +} diff --git a/integration-test/src/test/java/org/ehcache/integration/EhcacheBulkMethodsITest.java b/integration-test/src/test/java/org/ehcache/integration/EhcacheBulkMethodsITest.java index e9b0786fe0..5784f64acf 100644 --- a/integration-test/src/test/java/org/ehcache/integration/EhcacheBulkMethodsITest.java +++ b/integration-test/src/test/java/org/ehcache/integration/EhcacheBulkMethodsITest.java @@ -21,15 +21,16 @@ import org.ehcache.config.ResourceType; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.resilience.StoreAccessException; import org.ehcache.impl.copy.IdentityCopier; import org.ehcache.core.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.core.spi.time.SystemTimeSource; -import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.ServiceLocator; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.spi.copy.Copier; @@ -51,11 +52,11 @@ import java.util.function.Function; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.spi.ServiceLocator.dependencySet; import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsCollectionContaining.hasItems; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doThrow; @@ -100,8 +101,6 @@ public void testPutAll_without_cache_writer() throws Exception { public void testPutAll_with_cache_writer() throws Exception { CacheConfigurationBuilder cacheConfigurationBuilder = CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, heap(100)); - CacheConfiguration cacheConfiguration = cacheConfigurationBuilder - .build(); CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); @@ -110,6 +109,8 @@ public void testPutAll_with_cache_writer() throws Exception { when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), ArgumentMatchers.any(CacheConfiguration.class))).thenReturn(cacheLoaderWriter); CacheManagerBuilder managerBuilder = CacheManagerBuilder.newCacheManagerBuilder().using(cacheLoaderWriterProvider); + CacheConfiguration cacheConfiguration = cacheConfigurationBuilder.withLoaderWriter(cacheLoaderWriter) + .build(); CacheManager cacheManager = managerBuilder.withCache("myCache", cacheConfiguration).build(true); Cache myCache = cacheManager.getCache("myCache", String.class, String.class); @@ -140,8 +141,6 @@ public void testPutAll_with_cache_writer() throws Exception { public void testPutAll_with_cache_writer_that_throws_exception() throws Exception { CacheConfigurationBuilder cacheConfigurationBuilder = CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, heap(100)); - CacheConfiguration cacheConfiguration = cacheConfigurationBuilder - .build(); CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); CacheLoaderWriter cacheLoaderWriterThatThrows = mock(CacheLoaderWriter.class); @@ -151,6 +150,7 @@ public void testPutAll_with_cache_writer_that_throws_exception() throws Exceptio when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), ArgumentMatchers.any(CacheConfiguration.class))).thenReturn(cacheLoaderWriterThatThrows); CacheManagerBuilder managerBuilder = CacheManagerBuilder.newCacheManagerBuilder().using(cacheLoaderWriterProvider); + CacheConfiguration cacheConfiguration = cacheConfigurationBuilder.withLoaderWriter(cacheLoaderWriterThatThrows).build(); CacheManager cacheManager = managerBuilder.withCache("myCache", cacheConfiguration).build(true); Cache myCache = cacheManager.getCache("myCache", String.class, String.class); @@ -248,13 +248,13 @@ public void testGetAll_without_cache_loader() throws Exception { public void testGetAll_with_cache_loader() throws Exception { CacheConfigurationBuilder cacheConfigurationBuilder = CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, heap(100)); - CacheConfiguration cacheConfiguration = cacheConfigurationBuilder.build(); CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); when(cacheLoaderWriter.load(ArgumentMatchers.any())).thenThrow(new RuntimeException("We should not have called .load() but .loadAll()")); when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), ArgumentMatchers.any(CacheConfiguration.class))).thenReturn(cacheLoaderWriter); CacheManagerBuilder managerBuilder = CacheManagerBuilder.newCacheManagerBuilder().using(cacheLoaderWriterProvider); + CacheConfiguration cacheConfiguration = cacheConfigurationBuilder.withLoaderWriter(cacheLoaderWriter).build(); CacheManager cacheManager = managerBuilder.withCache("myCache", cacheConfiguration).build(true); when(cacheLoaderWriter.loadAll(argThat(IsCollectionContaining.hasItem("key0")))).thenReturn(new HashMap(){{put("key0","value0");}}); @@ -281,7 +281,6 @@ public void testGetAll_with_cache_loader() throws Exception { public void testGetAll_cache_loader_throws_exception() throws Exception { CacheConfigurationBuilder cacheConfigurationBuilder = CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, heap(100)); - CacheConfiguration cacheConfiguration = cacheConfigurationBuilder.build(); CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); @@ -289,6 +288,7 @@ public void testGetAll_cache_loader_throws_exception() throws Exception { when(cacheLoaderWriter.loadAll(ArgumentMatchers.any(Iterable.class))).thenThrow(new Exception("Simulating an exception from the cache loader")); when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), ArgumentMatchers.any(CacheConfiguration.class))).thenReturn(cacheLoaderWriter); CacheManagerBuilder managerBuilder = CacheManagerBuilder.newCacheManagerBuilder().using(cacheLoaderWriterProvider); + CacheConfiguration cacheConfiguration = cacheConfigurationBuilder.withLoaderWriter(cacheLoaderWriter).build(); CacheManager cacheManager = managerBuilder.withCache("myCache", cacheConfiguration).build(true); Cache myCache = cacheManager.getCache("myCache", String.class, String.class); @@ -384,8 +384,6 @@ public void testRemoveAll_without_cache_writer() throws Exception { public void testRemoveAll_with_cache_writer() throws Exception { CacheConfigurationBuilder cacheConfigurationBuilder = CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, heap(100)); - CacheConfiguration cacheConfiguration = cacheConfigurationBuilder - .build(); CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); @@ -393,6 +391,8 @@ public void testRemoveAll_with_cache_writer() throws Exception { when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), ArgumentMatchers.any(CacheConfiguration.class))).thenReturn(cacheLoaderWriter); CacheManagerBuilder managerBuilder = CacheManagerBuilder.newCacheManagerBuilder().using(cacheLoaderWriterProvider); + CacheConfiguration cacheConfiguration = cacheConfigurationBuilder.withLoaderWriter(cacheLoaderWriter) + .build(); CacheManager cacheManager = managerBuilder.withCache("myCache", cacheConfiguration).build(true); Cache myCache = cacheManager.getCache("myCache", String.class, String.class); @@ -430,8 +430,6 @@ public void testRemoveAll_with_cache_writer() throws Exception { public void testRemoveAll_cache_writer_throws_exception() throws Exception { CacheConfigurationBuilder cacheConfigurationBuilder = CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class, heap(100)); - CacheConfiguration cacheConfiguration = cacheConfigurationBuilder - .build(); CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); CacheLoaderWriter cacheLoaderWriterThatThrows = mock(CacheLoaderWriter.class); @@ -439,6 +437,8 @@ public void testRemoveAll_cache_writer_throws_exception() throws Exception { when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), ArgumentMatchers.any(CacheConfiguration.class))).thenReturn(cacheLoaderWriterThatThrows); CacheManagerBuilder managerBuilder = CacheManagerBuilder.newCacheManagerBuilder().using(cacheLoaderWriterProvider); + CacheConfiguration cacheConfiguration = cacheConfigurationBuilder.withLoaderWriter(cacheLoaderWriterThatThrows) + .build(); CacheManager cacheManager = managerBuilder.withCache("myCache", cacheConfiguration).build(true); Cache myCache = cacheManager.getCache("myCache", String.class, String.class); @@ -520,12 +520,12 @@ public void testRemoveAll_with_store_that_throws() throws Exception { */ private static class CustomStoreProvider implements Store.Provider { @Override - public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { + public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { return Integer.MAX_VALUE; // Ensure this Store.Provider is ranked highest } @Override - public Store createStore(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { + public Store createStore(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { ServiceLocator serviceLocator = dependencySet().with(new DefaultSerializationProvider(null)).build(); try { serviceLocator.startAllServices(); @@ -533,7 +533,7 @@ public Store createStore(Store.Configuration storeConfig, Ser throw new RuntimeException(e); } final Copier defaultCopier = new IdentityCopier(); - return new OnHeapStore(storeConfig, SystemTimeSource.INSTANCE, defaultCopier, defaultCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()) { + return new OnHeapStore(storeConfig, SystemTimeSource.INSTANCE, defaultCopier, defaultCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher(), new DefaultStatisticsService()) { @Override public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction) throws StoreAccessException { throw new StoreAccessException("Problem trying to bulk compute"); diff --git a/integration-test/src/test/java/org/ehcache/integration/EventNotificationTest.java b/integration-test/src/test/java/org/ehcache/integration/EventNotificationTest.java index a9000f6ea3..3d8d4a53b3 100644 --- a/integration-test/src/test/java/org/ehcache/integration/EventNotificationTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/EventNotificationTest.java @@ -18,7 +18,8 @@ import org.ehcache.Cache; import org.ehcache.CacheManager; -import org.ehcache.core.EhcacheWithLoaderWriter; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.core.Ehcache; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.units.EntryUnit; @@ -28,30 +29,29 @@ import org.ehcache.event.EventFiring; import org.ehcache.event.EventOrdering; import org.ehcache.event.EventType; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; import org.ehcache.impl.internal.TimeSourceConfiguration; +import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Serializable; +import java.time.Duration; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; public class EventNotificationTest { private static final TestTimeSource testTimeSource = new TestTimeSource(); @@ -241,11 +241,11 @@ public void testEventOrderForUpdateThatTriggersEviction () { @Test public void testEventFiringInCacheIterator() { - Logger logger = LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EventNotificationTest"); + Logger logger = LoggerFactory.getLogger(Ehcache.class + "-" + "EventNotificationTest"); CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder() .heap(5L, EntryUnit.ENTRIES).build()) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS))) + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1))) .build(); CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().withCache("cache", cacheConfiguration) .using(new TimeSourceConfiguration(testTimeSource)) @@ -261,12 +261,12 @@ public void testEventFiringInCacheIterator() { cache.put(4L, "4"); cache.put(5L, "5"); assertThat(listener1.expired.get(), is(0)); - for(Cache.Entry entry : cache) { + for(Cache.Entry entry : cache) { logger.info("Iterating over key : ", entry.getKey()); } testTimeSource.setTimeMillis(2000); - for(Cache.Entry entry : cache) { + for(Cache.Entry entry : cache) { logger.info("Iterating over key : ", entry.getKey()); } @@ -282,7 +282,7 @@ public void testMultiThreadedSyncAsyncNotifications() throws InterruptedExceptio CacheConfiguration cacheConfiguration = newCacheConfigurationBuilder(Number.class, Number.class, newResourcePoolsBuilder().heap(10L, EntryUnit.ENTRIES)) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS))) + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1))) .build(); CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().withCache("cache", cacheConfiguration) @@ -405,7 +405,7 @@ public static class Listener implements CacheEventListener { @Override public void onEvent(CacheEvent event) { - Logger logger = LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EventNotificationTest"); + Logger logger = LoggerFactory.getLogger(Ehcache.class + "-" + "EventNotificationTest"); logger.info(event.getType().toString()); eventTypeHashMap.put(event.getType(), eventCounter.get()); eventCounter.getAndIncrement(); @@ -463,6 +463,9 @@ public void onEvent(final CacheEvent event) } public static class SerializableObject implements Serializable { + + private static final long serialVersionUID = 1L; + private int size; private Byte [] data; diff --git a/integration-test/src/test/java/org/ehcache/integration/EvictionEhcacheTest.java b/integration-test/src/test/java/org/ehcache/integration/EvictionEhcacheTest.java index 29a3a81cb2..9653c387a0 100644 --- a/integration-test/src/test/java/org/ehcache/integration/EvictionEhcacheTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/EvictionEhcacheTest.java @@ -28,8 +28,8 @@ import java.util.Map; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; /** * @author Ludovic Orban diff --git a/integration-test/src/test/java/org/ehcache/integration/ExpiryEhcacheTestBase.java b/integration-test/src/test/java/org/ehcache/integration/ExpiryEhcacheTestBase.java index e4cbb83594..f718351015 100644 --- a/integration-test/src/test/java/org/ehcache/integration/ExpiryEhcacheTestBase.java +++ b/integration-test/src/test/java/org/ehcache/integration/ExpiryEhcacheTestBase.java @@ -19,24 +19,23 @@ import org.ehcache.CacheManager; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.impl.internal.TimeSourceConfiguration; import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; import org.junit.Test; +import java.time.Duration; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; -import java.util.concurrent.TimeUnit; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; /** * @author Ludovic Orban @@ -53,7 +52,7 @@ public void setUp() throws Exception { CacheManagerBuilder builder = CacheManagerBuilder.newCacheManagerBuilder().using(new TimeSourceConfiguration(manualTimeSource)); cacheManager = builder.build(true); CacheConfigurationBuilder objectObjectCacheConfigurationBuilder = CacheConfigurationBuilder.newCacheConfigurationBuilder(Number.class, CharSequence.class, heap(10)) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS))); + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1))); testCache = cacheManager.createCache("testCache", objectObjectCacheConfigurationBuilder.build()); } diff --git a/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java b/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java index 8ae5a9b0ef..30fd427980 100644 --- a/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java @@ -20,18 +20,15 @@ import org.ehcache.CacheManager; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.event.CacheEvent; -import org.ehcache.event.CacheEventListener; import org.ehcache.event.EventFiring; import org.ehcache.event.EventOrdering; import org.ehcache.event.EventType; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; import org.ehcache.impl.internal.TimeSourceConfiguration; import org.ehcache.impl.copy.SerializingCopier; import org.junit.After; @@ -41,10 +38,10 @@ import org.junit.rules.TemporaryFolder; import java.io.IOException; +import java.time.Duration; import java.util.EnumSet; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.TimeUnit; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.hamcrest.MatcherAssert.assertThat; @@ -60,10 +57,10 @@ public class ExpiryEventsTest { private static final CacheConfigurationBuilder byRefCacheConfigBuilder = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS))); + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1))); private static final CacheConfigurationBuilder byValueCacheConfigBuilder = - byRefCacheConfigBuilder.add(new DefaultCopierConfiguration<>( + byRefCacheConfigBuilder.withService(new DefaultCopierConfiguration<>( SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); private static final TestTimeSource testTimeSource = new TestTimeSource(); diff --git a/integration-test/src/test/java/org/ehcache/integration/LoaderWriterErrorEhcacheTest.java b/integration-test/src/test/java/org/ehcache/integration/LoaderWriterErrorEhcacheTest.java index 0333ca6284..f3c242a608 100644 --- a/integration-test/src/test/java/org/ehcache/integration/LoaderWriterErrorEhcacheTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/LoaderWriterErrorEhcacheTest.java @@ -29,8 +29,6 @@ import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentMatchers; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; import java.util.Arrays; import java.util.HashMap; @@ -40,13 +38,12 @@ import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNull.notNullValue; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyObject; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; @@ -63,7 +60,7 @@ public class LoaderWriterErrorEhcacheTest { private CacheManager cacheManager; private Cache testCache; - private CacheLoaderWriter cacheLoaderWriter; + private CacheLoaderWriter cacheLoaderWriter; @SuppressWarnings("unchecked") @Before @@ -72,7 +69,10 @@ public void setUp() throws Exception { cacheLoaderWriter = mock(CacheLoaderWriter.class); when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration) any())).thenReturn((CacheLoaderWriter) cacheLoaderWriter); cacheManager = newCacheManagerBuilder().using(cacheLoaderWriterProvider).build(true); - testCache = cacheManager.createCache("testCache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Number.class, CharSequence.class, heap(10)).build()); + testCache = cacheManager.createCache("testCache", CacheConfigurationBuilder + .newCacheConfigurationBuilder(Number.class, CharSequence.class, heap(10)) + .withLoaderWriter(cacheLoaderWriter) + .build()); } @After diff --git a/integration-test/src/test/java/org/ehcache/integration/LoaderWriterSimpleEhcacheTest.java b/integration-test/src/test/java/org/ehcache/integration/LoaderWriterSimpleEhcacheTest.java index 5f2e2ea57d..6e942ec5b1 100644 --- a/integration-test/src/test/java/org/ehcache/integration/LoaderWriterSimpleEhcacheTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/LoaderWriterSimpleEhcacheTest.java @@ -29,9 +29,9 @@ import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; @@ -59,7 +59,10 @@ public void setUp() throws Exception { when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), org.mockito.ArgumentMatchers.>any())) .thenReturn(CacheLoaderWriter.class.cast(cacheLoaderWriter)); cacheManager = newCacheManagerBuilder().using(cacheLoaderWriterProvider).build(true); - testCache = cacheManager.createCache("testCache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Number.class, CharSequence.class, heap(10)).build()); + testCache = cacheManager.createCache("testCache", CacheConfigurationBuilder + .newCacheConfigurationBuilder(Number.class, CharSequence.class, heap(10)) + .withLoaderWriter(cacheLoaderWriter) + .build()); } @After diff --git a/integration-test/src/test/java/org/ehcache/integration/OnHeapEvictionStrategyTest.java b/integration-test/src/test/java/org/ehcache/integration/OnHeapEvictionStrategyTest.java new file mode 100644 index 0000000000..d160090ef9 --- /dev/null +++ b/integration-test/src/test/java/org/ehcache/integration/OnHeapEvictionStrategyTest.java @@ -0,0 +1,174 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.integration; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.internal.TimeSourceConfiguration; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.time.Duration; + +import static org.assertj.core.api.Assertions.assertThat; + +/** + * @author Henri Tremblay + */ +public class OnHeapEvictionStrategyTest { + + private final TestTimeSource timeSource = new TestTimeSource(); + private final TimeSourceConfiguration timeSourceConfiguration = new TimeSourceConfiguration(timeSource); + + private CacheManager cacheManager; + + @Before + public void before() { + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .using(timeSourceConfiguration) + .build(true); + } + + @After + public void after() { + cacheManager.close(); + } + + @Test + public void noExpiryGet() { + Cache cache = createCache(ExpiryPolicyBuilder.noExpiration()); + + cache.put(1, "a"); + + timeSource.setTimeMillis(Long.MAX_VALUE); + + assertThat(cache.get(1)).isEqualTo("a"); + } + + @Test + public void ttlExpiryGet() { + Cache cache = createCache(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(100))); + + cache.put(1, "a"); + + assertThat(cache.get(1)).isEqualTo("a"); + + timeSource.setTimeMillis(100); + + assertThat(cache.get(1)).isNull(); + } + + @Test + public void ttiExpiryGet() { + Cache cache = createCache(ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(100))); + + cache.put(1, "a"); + + assertThat(cache.get(1)).isEqualTo("a"); + + timeSource.setTimeMillis(100); + + assertThat(cache.get(1)).isNull(); + } + + @Test + public void customExpiryGet() { + Cache cache = createCache( + ExpiryPolicyBuilder.expiry() + .create(ExpiryPolicy.INFINITE) + .update(Duration.ofMillis(100)) + .access((Duration) null) + .build()); + + cache.put(1, "a"); + + assertThat(cache.get(1)).isEqualTo("a"); + + cache.put(1, "b"); + + timeSource.setTimeMillis(100); + + assertThat(cache.get(1)).isNull(); + } + + @Test + public void noExpiryPut() { + Cache cache = createCache(ExpiryPolicyBuilder.noExpiration()); + + cache.put(1, "a"); + + timeSource.setTimeMillis(Long.MAX_VALUE); + + assertThat(cache.putIfAbsent(1, "b")).isEqualTo("a"); + } + + @Test + public void ttlExpiryPut() { + Cache cache = createCache(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(100))); + + cache.put(1, "a"); + + assertThat(cache.putIfAbsent(1, "b")).isEqualTo("a"); + + timeSource.setTimeMillis(100); + + assertThat(cache.putIfAbsent(1, "c")).isNull(); + } + + @Test + public void ttiExpiryPut() { + Cache cache = createCache(ExpiryPolicyBuilder.timeToIdleExpiration(Duration.ofMillis(100))); + + cache.put(1, "a"); + + assertThat(cache.putIfAbsent(1, "b")).isEqualTo("a"); + + timeSource.setTimeMillis(100); + + assertThat(cache.putIfAbsent(1, "c")).isNull(); + } + + @Test + public void customExpiryPut() { + Cache cache = createCache( + ExpiryPolicyBuilder.expiry() + .create(ExpiryPolicy.INFINITE) + .update(Duration.ofMillis(100)) + .access((Duration) null) + .build()); + + cache.put(1, "a"); // create + cache.put(1, "b"); // update that will expire + + timeSource.setTimeMillis(100); + + assertThat(cache.putIfAbsent(1, "d")).isNull(); // expires since update + } + + private Cache createCache(ExpiryPolicy expiryPolicy) { + return cacheManager.createCache("cache", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Integer.class, String.class, + ResourcePoolsBuilder.heap(10)) + .withExpiry(expiryPolicy)); + } +} diff --git a/integration-test/src/test/java/org/ehcache/integration/OsgiSafetyTest.java b/integration-test/src/test/java/org/ehcache/integration/OsgiSafetyTest.java new file mode 100644 index 0000000000..3728086e0b --- /dev/null +++ b/integration-test/src/test/java/org/ehcache/integration/OsgiSafetyTest.java @@ -0,0 +1,42 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.integration; + +import org.ehcache.core.osgi.SafeOsgi; +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.fail; + +public class OsgiSafetyTest { + + @Test + public void testOsgiIsNotHere() { + try { + Class.forName("org.osgi.framework.Bundle"); + fail("Expected ClassNotFoundException"); + } catch (ClassNotFoundException e) { + //expected + } + } + + @Test + public void testSafeOsgiIsSafe() { + assertThat(SafeOsgi.useOSGiServiceLoading(), is(false)); + } +} diff --git a/integration-test/src/test/java/org/ehcache/integration/OverSizeMappingTest.java b/integration-test/src/test/java/org/ehcache/integration/OverSizeMappingTest.java index 42df81ec9f..efe5d90a5f 100644 --- a/integration-test/src/test/java/org/ehcache/integration/OverSizeMappingTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/OverSizeMappingTest.java @@ -17,6 +17,7 @@ package org.ehcache.integration; import java.io.Serializable; +import java.util.Arrays; import org.ehcache.Cache; import org.ehcache.CacheManager; @@ -59,7 +60,7 @@ public void testOverSizedObjectGetsReturnedFromLowerTier() { CacheConfiguration objectGraphSize = CacheConfigurationBuilder .newCacheConfigurationBuilder(String.class, ObjectSizeGreaterThanN.class, newResourcePoolsBuilder() - .heap(100, MemoryUnit.KB).offheap(100, MemoryUnit.MB).build()) + .heap(100, MemoryUnit.KB).offheap(10, MemoryUnit.MB).build()) .build(); Cache objectGraphSizeCache = cacheManager.createCache("objectGraphSize", @@ -103,6 +104,8 @@ private static ObjectSizeGreaterThanN getObjectSizeGreaterThanN(int n) { private static class ObjectSizeGreaterThanN implements Serializable { + private static final long serialVersionUID = 1L; + private final Integer[] arr; private ObjectSizeGreaterThanN(int n) { @@ -118,10 +121,12 @@ public boolean equals(Object obj) { if (this == obj) { return true; } - if (obj instanceof ObjectSizeGreaterThanN && this.arr.length == ((ObjectSizeGreaterThanN)obj).arr.length ) { - return true; - } - return false; + return obj instanceof ObjectSizeGreaterThanN && this.arr.length == ((ObjectSizeGreaterThanN)obj).arr.length; + } + + @Override + public int hashCode() { + return arr.length; } } } diff --git a/integration-test/src/test/java/org/ehcache/integration/PersistentCacheTest.java b/integration-test/src/test/java/org/ehcache/integration/PersistentCacheTest.java index e911aa3564..b1e72ca780 100644 --- a/integration-test/src/test/java/org/ehcache/integration/PersistentCacheTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/PersistentCacheTest.java @@ -26,8 +26,8 @@ import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; -import org.junit.rules.TemporaryFolder; import org.junit.rules.TestName; +import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.Serializable; @@ -138,6 +138,7 @@ public void testRecoverPersistentCacheSucceedsWhenConfiguringArrayClass() throws } @Test + @SuppressWarnings("try") public void testPersistentCachesColliding() throws Exception { File folder = temporaryFolder.newFolder(testName.getMethodName()); try (PersistentCacheManager cm = CacheManagerBuilder.newCacheManagerBuilder() @@ -174,6 +175,7 @@ public void testPersistentCachesCollidingCrossProcess() throws Exception { public static final class Locker { + @SuppressWarnings("try") public static void main(String[] args) throws Exception { File folder = new File(args[0]); File ping = new File(folder, "ping"); diff --git a/integration-test/src/test/java/org/ehcache/integration/PersistentUserManagedCacheTest.java b/integration-test/src/test/java/org/ehcache/integration/PersistentUserManagedCacheTest.java index e943c285b5..c3921d46c1 100644 --- a/integration-test/src/test/java/org/ehcache/integration/PersistentUserManagedCacheTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/PersistentUserManagedCacheTest.java @@ -32,8 +32,8 @@ import java.io.File; import java.io.Serializable; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.notNullValue; -import static org.junit.Assert.assertThat; /** * PersistentUserManagedCacheTest @@ -77,6 +77,8 @@ public void recoversWithSerializableType() throws Exception { private static class Foo implements Serializable { + private static final long serialVersionUID = 1L; + private final int i; Foo(int i) { diff --git a/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java b/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java index 759f667ebc..0fa71ab5a6 100644 --- a/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java @@ -19,6 +19,7 @@ import org.ehcache.CacheManager; import org.ehcache.PersistentCacheManager; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.integration.domain.Person; @@ -28,7 +29,6 @@ import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import java.nio.ByteBuffer; @@ -37,15 +37,13 @@ import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; import static org.ehcache.config.builders.CacheManagerBuilder.persistence; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; public class SerializersTest { @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); - @Rule - public ExpectedException expectedException = ExpectedException.none(); @Test public void testStatefulSerializer() throws Exception { @@ -83,7 +81,7 @@ private void testSerializerWithByRefHeapCache(Serializer serializer) throw CacheManagerBuilder cmBuilder = newCacheManagerBuilder() .withCache("heapByRefCache", - newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10)) + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES)) .withKeySerializer(serializer) ); cmBuilder.build(true); @@ -93,7 +91,7 @@ private void testSerializerWithByValueHeapCache(Serializer serializer) thr CacheManagerBuilder cmBuilder = newCacheManagerBuilder() .withCache("heapByValueCache", - newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10)) + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES)) .withKeyCopier(SerializingCopier.asCopierClass()) .withKeySerializer(serializer) ); @@ -114,7 +112,7 @@ private void testSerializerWithHeapOffheapCache(Serializer serializer) thr CacheManagerBuilder cmBuilder = newCacheManagerBuilder() .withCache("heapOffheapCache", - newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10).offheap(2, MemoryUnit.MB)) + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).offheap(2, MemoryUnit.MB)) .withKeySerializer(serializer) ); cmBuilder.build(true); @@ -136,7 +134,7 @@ private void testSerializerWithHeapDiskCache(Serializer serializer) throws newCacheManagerBuilder() .with(persistence(temporaryFolder.newFolder().getAbsolutePath())) .withCache("heapDiskCache", - newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10).disk(8, MemoryUnit.MB, true)) + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).disk(8, MemoryUnit.MB, true)) .withKeySerializer(serializer) ); cmBuilder.build(true); @@ -147,7 +145,7 @@ private void testSerializerWithThreeTierCache(Serializer serializer) throw newCacheManagerBuilder() .with(persistence(temporaryFolder.newFolder().getAbsolutePath())) .withCache("heapOffheapDiskCache", - newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10).offheap(2, MemoryUnit.MB).disk(8, MemoryUnit.MB, true)) + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).offheap(2, MemoryUnit.MB).disk(8, MemoryUnit.MB, true)) .withKeySerializer(serializer) ); cmBuilder.build(true); diff --git a/integration-test/src/test/java/org/ehcache/integration/SimpleEhcacheTest.java b/integration-test/src/test/java/org/ehcache/integration/SimpleEhcacheTest.java index a60598073d..2ff0433ba4 100644 --- a/integration-test/src/test/java/org/ehcache/integration/SimpleEhcacheTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/SimpleEhcacheTest.java @@ -30,11 +30,11 @@ import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; /** * @author Ludovic Orban diff --git a/integration-test/src/test/java/org/ehcache/integration/StatefulSerializerWithStateRepositoryTest.java b/integration-test/src/test/java/org/ehcache/integration/StatefulSerializerWithStateRepositoryTest.java index 53f676400f..5b8f076b00 100644 --- a/integration-test/src/test/java/org/ehcache/integration/StatefulSerializerWithStateRepositoryTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/StatefulSerializerWithStateRepositoryTest.java @@ -30,8 +30,8 @@ import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; import static org.ehcache.config.builders.CacheManagerBuilder.persistence; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; public class StatefulSerializerWithStateRepositoryTest { @@ -43,7 +43,7 @@ public void testStatefulSerializerWithDiskStateRepository() throws Exception { CacheManagerBuilder cmBuilder = newCacheManagerBuilder().with(persistence(temporaryFolder.newFolder() .getAbsolutePath())) .withCache("myCache", newCacheConfigurationBuilder(Long.class, Person.class, heap(10).disk(50, MemoryUnit.MB, true)) - .withValueSerializer((Class) CompactJavaSerializer.class)); + .withValueSerializer(CompactJavaSerializer.asTypedSerializer())); PersistentCacheManager cacheManager = cmBuilder.build(true); Cache myCache = cacheManager.getCache("myCache", Long.class, Person.class); @@ -65,7 +65,7 @@ public void testStatefulSerializerWithDiskStateRepositoryDifferentPersistenceSer CacheManagerBuilder cmBuilder = newCacheManagerBuilder().with(persistence(temporaryFolder.newFolder() .getAbsolutePath())) .withCache("myCache", newCacheConfigurationBuilder(Long.class, Person.class, heap(10).disk(50, MemoryUnit.MB, true)) - .withValueSerializer((Class) CompactJavaSerializer.class)); + .withValueSerializer(CompactJavaSerializer.asTypedSerializer())); PersistentCacheManager cacheManager = cmBuilder.build(true); Cache myCache = cacheManager.getCache("myCache", Long.class, Person.class); diff --git a/integration-test/src/test/java/org/ehcache/integration/StoreStatisticsTest.java b/integration-test/src/test/java/org/ehcache/integration/StoreStatisticsTest.java index 9a71a98739..e086204646 100644 --- a/integration-test/src/test/java/org/ehcache/integration/StoreStatisticsTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/StoreStatisticsTest.java @@ -21,6 +21,7 @@ import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.config.store.StoreStatisticsConfiguration; import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; import org.ehcache.core.statistics.CachingTierOperationOutcomes; import org.ehcache.core.statistics.LowerCachingTierOperationsOutcome; @@ -38,16 +39,15 @@ import java.io.File; import java.io.IOException; -import java.net.URISyntaxException; import java.util.Collections; import java.util.Map; import java.util.Set; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; import static org.terracotta.context.query.Matchers.attributes; import static org.terracotta.context.query.Matchers.context; import static org.terracotta.context.query.Matchers.hasAttribute; @@ -66,47 +66,63 @@ public class StoreStatisticsTest { @Test public void test1TierStoreStatsAvailableInContextManager() throws Exception { - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + try(CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("threeTieredCache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(1)) - ).build(true); + .withService(new StoreStatisticsConfiguration(true)) // explicitly enable statistics + ).build(true)) { - Cache cache = cacheManager.getCache("threeTieredCache", Long.class, String.class); + Cache cache = cacheManager.getCache("threeTieredCache", Long.class, String.class); - assertNull(cache.get(0L)); + assertNull(cache.get(0L)); - long onHeapMisses = StoreStatisticsTest.findStat(cache, "get", "OnHeap").count(StoreOperationOutcomes.GetOutcome.MISS); - assertThat(onHeapMisses, equalTo(1L)); + long onHeapMisses = StoreStatisticsTest.findStat(cache, "get", "OnHeap").count(StoreOperationOutcomes.GetOutcome.MISS); + assertThat(onHeapMisses, equalTo(1L)); + } + } + + @Test + public void test1TierStoreStatsAvailableInContextManager_disabledByDefault() throws Exception { + try(CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("threeTieredCache", + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(1)) + ).build(true)) { + + Cache cache = cacheManager.getCache("threeTieredCache", Long.class, String.class); + + assertNull(cache.get(0L)); - cacheManager.close(); + assertNull("Statistics are disabled so nothing is expected here", StoreStatisticsTest.findStat(cache, "get", "OnHeap")); + } } @Test public void test2TiersStoreStatsAvailableInContextManager() throws Exception { - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + try(CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("threeTieredCache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder() .heap(1, MemoryUnit.MB) .offheap(2, MemoryUnit.MB) ) - ).build(true); - - Cache cache = cacheManager.getCache("threeTieredCache", Long.class, String.class); + ).build(true)) { - assertNull(cache.get(0L)); + Cache cache = cacheManager.getCache("threeTieredCache", Long.class, String.class); - long onHeapMisses = StoreStatisticsTest.findStat(cache, "getOrComputeIfAbsent", "OnHeap").count(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); - assertThat(onHeapMisses, equalTo(1L)); - long offheapMisses = StoreStatisticsTest.findStat(cache, "getAndFault", "OffHeap").count(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); - assertThat(offheapMisses, equalTo(1L)); + assertNull(cache.get(0L)); - cacheManager.close(); + long onHeapMisses = StoreStatisticsTest.findStat(cache, "getOrComputeIfAbsent", "OnHeap") + .count(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); + assertThat(onHeapMisses, equalTo(1L)); + long offheapMisses = StoreStatisticsTest.findStat(cache, "getAndFault", "OffHeap") + .count(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); + assertThat(offheapMisses, equalTo(1L)); + } } @Test public void test3TiersStoreStatsAvailableInContextManager() throws Exception { - PersistentCacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + try(PersistentCacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .with(new CacheManagerPersistenceConfiguration(new File(getStoragePath(), "StoreStatisticsTest"))) .withCache("threeTieredCache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, @@ -115,20 +131,21 @@ public void test3TiersStoreStatsAvailableInContextManager() throws Exception { .offheap(2, MemoryUnit.MB) .disk(5, MemoryUnit.MB) ) - ).build(true); + ).build(true)) { - Cache cache = cacheManager.getCache("threeTieredCache", Long.class, String.class); + Cache cache = cacheManager.getCache("threeTieredCache", Long.class, String.class); - assertNull(cache.get(0L)); + assertNull(cache.get(0L)); - long onHeapMisses = StoreStatisticsTest.findStat(cache, "getOrComputeIfAbsent", "OnHeap").count(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); - assertThat(onHeapMisses, equalTo(1L)); - long offHeapMisses = StoreStatisticsTest.findStat(cache, "getAndRemove", "OffHeap").count(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS); - assertThat(offHeapMisses, equalTo(1L)); - long diskMisses = StoreStatisticsTest.findStat(cache, "getAndFault", "Disk").count(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); - assertThat(diskMisses, equalTo(1L)); - - cacheManager.close(); + long onHeapMisses = StoreStatisticsTest.findStat(cache, "getOrComputeIfAbsent", "OnHeap") + .count(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); + assertThat(onHeapMisses, equalTo(1L)); + long offHeapMisses = StoreStatisticsTest.findStat(cache, "getAndRemove", "OffHeap") + .count(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS); + assertThat(offHeapMisses, equalTo(1L)); + long diskMisses = StoreStatisticsTest.findStat(cache, "getAndFault", "Disk").count(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); + assertThat(diskMisses, equalTo(1L)); + } } @SuppressWarnings("unchecked") @@ -147,12 +164,16 @@ protected boolean matchesSafely(Set object) { } }))))).build().execute(operationStatisticNodes); - if (result.size() != 1) { - throw new RuntimeException("query for unique stat '" + statName + "' with tag '" + tag + "' failed; found " + result.size() + " instance(s)"); + switch (result.size()) { + case 0: + return null; + case 1: { + TreeNode node = result.iterator().next(); + return (OperationStatistic) node.getContext().attributes().get("this"); + } + default: + throw new RuntimeException("query for unique stat '" + statName + "' with tag '" + tag + "' failed; found " + result.size() + " instance(s)"); } - - TreeNode node = result.iterator().next(); - return (OperationStatistic) node.getContext().attributes().get("this"); } private String getStoragePath() throws IOException { diff --git a/integration-test/src/test/java/org/ehcache/integration/TieringTest.java b/integration-test/src/test/java/org/ehcache/integration/TieringTest.java index 33cedcba2e..8ee077ba78 100644 --- a/integration-test/src/test/java/org/ehcache/integration/TieringTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/TieringTest.java @@ -24,8 +24,8 @@ import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; /** diff --git a/integration-test/src/test/java/org/ehcache/integration/UserManagedCacheEvictionTest.java b/integration-test/src/test/java/org/ehcache/integration/UserManagedCacheEvictionTest.java index e5e47cdaeb..7cf3387d73 100644 --- a/integration-test/src/test/java/org/ehcache/integration/UserManagedCacheEvictionTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/UserManagedCacheEvictionTest.java @@ -24,7 +24,7 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; /** * @author Anthony Dahanne diff --git a/integration-test/src/test/java/org/ehcache/integration/UserManagedCacheLoaderWriterTest.java b/integration-test/src/test/java/org/ehcache/integration/UserManagedCacheLoaderWriterTest.java new file mode 100644 index 0000000000..438a639649 --- /dev/null +++ b/integration-test/src/test/java/org/ehcache/integration/UserManagedCacheLoaderWriterTest.java @@ -0,0 +1,52 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.integration; + +import org.ehcache.UserManagedCache; +import org.ehcache.config.builders.UserManagedCacheBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.hamcrest.Matchers; +import org.junit.Test; + +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class UserManagedCacheLoaderWriterTest { + + @SuppressWarnings("unchecked") + @Test + public void testLoaderWriterWithUserManagedCache() throws Exception { + CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); + + UserManagedCache userManagedCache = UserManagedCacheBuilder.newUserManagedCacheBuilder(Long.class, Long.class) + .withResourcePools(newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES)) + .withLoaderWriter(cacheLoaderWriter).build(true); + + userManagedCache.put(1L, 1L); + verify(cacheLoaderWriter, times(1)).write(eq(1L), eq(1L)); + + when(cacheLoaderWriter.load(anyLong())).thenReturn(2L); + assertThat(userManagedCache.get(2L), Matchers.is(2L)); + } +} diff --git a/integration-test/src/test/java/org/ehcache/integration/domain/Person.java b/integration-test/src/test/java/org/ehcache/integration/domain/Person.java index fa2609edac..9e4ae87e7a 100644 --- a/integration-test/src/test/java/org/ehcache/integration/domain/Person.java +++ b/integration-test/src/test/java/org/ehcache/integration/domain/Person.java @@ -19,6 +19,9 @@ import java.io.Serializable; public class Person implements Serializable { + + private static final long serialVersionUID = 1L; + private final int age; private final String name; diff --git a/integration-test/src/test/java/org/ehcache/integration/statistics/AbstractCacheCalculationTest.java b/integration-test/src/test/java/org/ehcache/integration/statistics/AbstractCacheCalculationTest.java index 3bab93e7da..e5cfc61d2b 100644 --- a/integration-test/src/test/java/org/ehcache/integration/statistics/AbstractCacheCalculationTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/statistics/AbstractCacheCalculationTest.java @@ -18,13 +18,13 @@ import java.util.Arrays; import java.util.Collection; +import org.assertj.core.api.SoftAssertions; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.core.statistics.CacheStatistics; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import static org.assertj.core.api.Assertions.assertThat; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.EntryUnit.ENTRIES; import static org.ehcache.config.units.MemoryUnit.MB; @@ -79,10 +79,13 @@ public static Collection data() { * @param remove how many removes should have happened */ protected void changesOf(long hit, long miss, long put, long remove) { - assertThat(cacheStatistics.getCacheHits() - hitCount).as("Hits").isEqualTo(hit); - assertThat(cacheStatistics.getCacheMisses() - missCount).as("Misses").isEqualTo(miss); - assertThat(cacheStatistics.getCachePuts() - putCount).as("Puts").isEqualTo(put); - assertThat(cacheStatistics.getCacheRemovals() - removalCount).as("Removals").isEqualTo(remove); + SoftAssertions softly = new SoftAssertions(); + softly.assertThat(cacheStatistics.getCacheHits() - hitCount).as("Hits").isEqualTo(hit); + softly.assertThat(cacheStatistics.getCacheMisses() - missCount).as("Misses").isEqualTo(miss); + softly.assertThat(cacheStatistics.getCachePuts() - putCount).as("Puts").isEqualTo(put); + softly.assertThat(cacheStatistics.getCacheRemovals() - removalCount).as("Removals").isEqualTo(remove); + softly.assertAll(); + hitCount += hit; missCount += miss; putCount += put; diff --git a/integration-test/src/test/java/org/ehcache/integration/statistics/CacheCalculationTest.java b/integration-test/src/test/java/org/ehcache/integration/statistics/CacheCalculationTest.java index f833cb5321..be63a91bb2 100644 --- a/integration-test/src/test/java/org/ehcache/integration/statistics/CacheCalculationTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/statistics/CacheCalculationTest.java @@ -30,7 +30,7 @@ import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.core.spi.service.StatisticsService; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; -import org.ehcache.impl.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -110,6 +110,7 @@ public void get() { changesOf(1, 0, 0, 0); } + @SuppressWarnings("unchecked") @Test public void getAll() { expect(cache.getAll(asSet(1))).containsExactly(MapEntry.entry(1, null)); @@ -130,10 +131,10 @@ public void iterator() { changesOf(0, 0, 2, 0); Iterator> iterator = cache.iterator(); - changesOf(1, 0, 0, 0); // FIXME Why one?!? + changesOf(0, 0, 0, 0); iterator.next().getKey(); - changesOf(2, 0, 0, 0); // FIXME Why two?!? + changesOf(1, 0, 0, 0); expect(iterator.hasNext()).isTrue(); changesOf(0, 0, 0, 0); @@ -156,7 +157,7 @@ public void foreach() { changesOf(0, 0, 3, 0); cache.forEach(e -> {}); - changesOf(6, 0, 0, 0); // FIXME counted twice but works for JCache + changesOf(3, 0, 0, 0); } @Test @@ -167,7 +168,7 @@ public void spliterator() { changesOf(0, 0, 3, 0); StreamSupport.stream(cache.spliterator(), false).forEach(e -> {}); - changesOf(6, 0, 0, 0); // FIXME counted twice but works for JCache + changesOf(3, 0, 0, 0); } @Test diff --git a/integration-test/src/test/java/org/ehcache/integration/statistics/ExistEntryProcessor.java b/integration-test/src/test/java/org/ehcache/integration/statistics/ExistEntryProcessor.java index e967e1830b..6005962758 100644 --- a/integration-test/src/test/java/org/ehcache/integration/statistics/ExistEntryProcessor.java +++ b/integration-test/src/test/java/org/ehcache/integration/statistics/ExistEntryProcessor.java @@ -22,6 +22,8 @@ public class ExistEntryProcessor implements EntryProcessor, Serializable { + private static final long serialVersionUID = 1L; + /** * {@inheritDoc} */ diff --git a/integration-test/src/test/java/org/ehcache/integration/statistics/TierCalculationTest.java b/integration-test/src/test/java/org/ehcache/integration/statistics/TierCalculationTest.java index 58871de167..c590a6096d 100644 --- a/integration-test/src/test/java/org/ehcache/integration/statistics/TierCalculationTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/statistics/TierCalculationTest.java @@ -15,11 +15,11 @@ */ package org.ehcache.integration.statistics; +import java.time.Duration; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; -import java.util.concurrent.TimeUnit; import org.assertj.core.data.MapEntry; import org.ehcache.Cache; @@ -27,13 +27,13 @@ import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.core.config.store.StoreStatisticsConfiguration; import org.ehcache.core.spi.service.StatisticsService; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.impl.internal.TimeSourceConfiguration; -import org.ehcache.impl.internal.statistics.DefaultStatisticsService; +import org.ehcache.core.internal.statistics.DefaultStatisticsService; import org.ehcache.integration.TestTimeSource; import org.junit.After; import org.junit.Before; @@ -65,7 +65,8 @@ public void before() throws Exception { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder .newCacheConfigurationBuilder(Integer.class, String.class, resources) - .withExpiry(Expirations.timeToLiveExpiration(Duration.of(TIME_TO_EXPIRATION, TimeUnit.MILLISECONDS))) + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofMillis(TIME_TO_EXPIRATION))) + .withService(new StoreStatisticsConfiguration(true)) // explicitly enable statistics .build(); StatisticsService statisticsService = new DefaultStatisticsService(); @@ -127,6 +128,7 @@ public void get() { changesOf(1, 0, 0, 0); } + @SuppressWarnings("unchecked") @Test public void getAll() { expect(cache.getAll(asSet(1))).containsExactly(MapEntry.entry(1, null)); @@ -147,16 +149,16 @@ public void iterator() { changesOf(0, 0, 2, 0); Iterator> iterator = cache.iterator(); - changesOf(1, 0, 0, 0); // FIXME Why one?!? + changesOf(0, 0, 0, 0); iterator.next().getKey(); - changesOf(1, 0, 0, 0); // FIXME One hit and on the cache we have two + changesOf(0, 0, 0, 0); expect(iterator.hasNext()).isTrue(); changesOf(0, 0, 0, 0); iterator.next().getKey(); - changesOf(0, 0, 0, 0); // FIXME No hit on a next + changesOf(0, 0, 0, 0); expect(iterator.hasNext()).isFalse(); changesOf(0, 0, 0, 0); @@ -291,15 +293,6 @@ public void testMappingCount() { assertThat(tierStatistics.getMappings()).isEqualTo(1); } - @Test - public void testMaxMappingCount() { - assertThat(tierStatistics.getMaxMappings()).isEqualTo(-1); // FIXME Shouldn't it be 0? - cache.put(1, "a"); - cache.put(2, "b"); - cache.remove(1); - assertThat(tierStatistics.getMappings()).isEqualTo(1); // FIXME: I was expecting 2 - } - @Test public void testAllocatedByteSize() { assumeFalse(tierName.equals("OnHeap")); // FIXME: Not calculated for OnHeap when a size is allocated diff --git a/integration-test/src/test/java/org/ehcache/integration/transactions/xa/SampleLoaderWriter.java b/integration-test/src/test/java/org/ehcache/integration/transactions/xa/SampleLoaderWriter.java index 9c01913d09..d5aaf5e370 100644 --- a/integration-test/src/test/java/org/ehcache/integration/transactions/xa/SampleLoaderWriter.java +++ b/integration-test/src/test/java/org/ehcache/integration/transactions/xa/SampleLoaderWriter.java @@ -48,7 +48,7 @@ public void clear() { } @Override - public V load(K key) throws Exception { + public V load(K key) { lock.readLock().lock(); try { V value = data.get(key); @@ -60,12 +60,12 @@ public V load(K key) throws Exception { } @Override - public Map loadAll(Iterable keys) throws Exception { + public Map loadAll(Iterable keys) { throw new UnsupportedOperationException("Implement me!"); } @Override - public void write(K key, V value) throws Exception { + public void write(K key, V value) { lock.writeLock().lock(); try { data.put(key, value); @@ -76,7 +76,7 @@ public void write(K key, V value) throws Exception { } @Override - public void writeAll(Iterable> entries) throws BulkCacheWritingException, Exception { + public void writeAll(Iterable> entries) { lock.writeLock().lock(); try { for (Map.Entry entry : entries) { @@ -89,7 +89,7 @@ public void writeAll(Iterable> ent } @Override - public void delete(K key) throws Exception { + public void delete(K key) { lock.writeLock().lock(); try { data.remove(key); @@ -100,7 +100,7 @@ public void delete(K key) throws Exception { } @Override - public void deleteAll(Iterable keys) throws BulkCacheWritingException, Exception { + public void deleteAll(Iterable keys) { lock.writeLock().lock(); try { for (K key : keys) { diff --git a/integration-test/src/test/java/org/ehcache/integration/transactions/xa/XACacheTest.java b/integration-test/src/test/java/org/ehcache/integration/transactions/xa/XACacheTest.java index 9e145356fa..5d5f159fd1 100644 --- a/integration-test/src/test/java/org/ehcache/integration/transactions/xa/XACacheTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/transactions/xa/XACacheTest.java @@ -22,11 +22,10 @@ import org.ehcache.CacheManager; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ExpiryPolicyBuilder; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.spi.time.TimeSource; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.ehcache.impl.internal.DefaultTimeSourceService; @@ -46,7 +45,7 @@ import javax.transaction.Status; import javax.transaction.Transaction; import java.io.File; -import java.io.IOException; +import java.time.Duration; import java.util.HashMap; import java.util.Iterator; import java.util.Map; @@ -55,11 +54,11 @@ import java.util.concurrent.atomic.AtomicReference; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; /** @@ -114,8 +113,8 @@ public void testEndToEnd() throws Exception { cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) - .withCache("txCache1", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache1")).build()) - .withCache("txCache2", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache2")).build()) + .withCache("txCache1", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache1")).build()) + .withCache("txCache2", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache2")).build()) .withCache("nonTxCache", cacheConfigurationBuilder.build()) .build(true); @@ -177,8 +176,8 @@ public void testRecoveryWithInflightTx() throws Exception { .offheap(10, MemoryUnit.MB)); cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("txCache1", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache1")).build()) - .withCache("txCache2", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache2")).build()) + .withCache("txCache1", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache1")).build()) + .withCache("txCache2", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache2")).build()) .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) .build(true); @@ -219,8 +218,8 @@ public void testRecoveryAfterCrash() throws Exception { cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .with(new CacheManagerPersistenceConfiguration(getStoragePath())) - .withCache("txCache1", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache1")).build()) - .withCache("txCache2", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache2")).build()) + .withCache("txCache1", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache1")).build()) + .withCache("txCache2", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache2")).build()) .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) .build(true); @@ -265,6 +264,7 @@ public void testRecoveryAfterCrash() throws Exception { } static class AbortError extends Error { + private static final long serialVersionUID = 1L; } @Test @@ -273,11 +273,11 @@ public void testExpiry() throws Exception { newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .offheap(10, MemoryUnit.MB)) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS))); + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1))); cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("txCache1", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache1")).build()) - .withCache("txCache2", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache2")).build()) + .withCache("txCache1", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache1")).build()) + .withCache("txCache2", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache2")).build()) .using(new DefaultTimeSourceService(new TimeSourceConfiguration(testTimeSource))) .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) .build(true); @@ -321,15 +321,15 @@ public void testCopiers() throws Exception { cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .with(new CacheManagerPersistenceConfiguration(getStoragePath())) .withCache("txCache1", cacheConfigurationBuilder - .add(new XAStoreConfiguration("txCache1")) - .add(new DefaultCopierConfiguration<>(LongCopier.class, DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration<>(StringCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .withService(new XAStoreConfiguration("txCache1")) + .withService(new DefaultCopierConfiguration<>(LongCopier.class, DefaultCopierConfiguration.Type.KEY)) + .withService(new DefaultCopierConfiguration<>(StringCopier.class, DefaultCopierConfiguration.Type.VALUE)) .build() ) .withCache("txCache2", cacheConfigurationBuilder - .add(new XAStoreConfiguration("txCache2")) - .add(new DefaultCopierConfiguration<>(LongCopier.class, DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration<>(StringCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .withService(new XAStoreConfiguration("txCache2")) + .withService(new DefaultCopierConfiguration<>(LongCopier.class, DefaultCopierConfiguration.Type.KEY)) + .withService(new DefaultCopierConfiguration<>(StringCopier.class, DefaultCopierConfiguration.Type.VALUE)) .build()) .using(new DefaultTimeSourceService(new TimeSourceConfiguration(testTimeSource))) .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) @@ -372,15 +372,15 @@ public void testTimeout() throws Exception { cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .with(new CacheManagerPersistenceConfiguration(getStoragePath())) .withCache("txCache1", cacheConfigurationBuilder - .add(new XAStoreConfiguration("txCache1")) - .add(new DefaultCopierConfiguration<>(LongCopier.class, DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration<>(StringCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .withService(new XAStoreConfiguration("txCache1")) + .withService(new DefaultCopierConfiguration<>(LongCopier.class, DefaultCopierConfiguration.Type.KEY)) + .withService(new DefaultCopierConfiguration<>(StringCopier.class, DefaultCopierConfiguration.Type.VALUE)) .build() ) .withCache("txCache2", cacheConfigurationBuilder - .add(new XAStoreConfiguration("txCache2")) - .add(new DefaultCopierConfiguration<>(LongCopier.class, DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration<>(StringCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .withService(new XAStoreConfiguration("txCache2")) + .withService(new DefaultCopierConfiguration<>(LongCopier.class, DefaultCopierConfiguration.Type.KEY)) + .withService(new DefaultCopierConfiguration<>(StringCopier.class, DefaultCopierConfiguration.Type.VALUE)) .build()) .using(new DefaultTimeSourceService(new TimeSourceConfiguration(testTimeSource))) .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) @@ -431,11 +431,11 @@ public void testConcurrentTx() throws Exception { newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .offheap(10, MemoryUnit.MB)) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS))); + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1))); cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("txCache1", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache1")).build()) - .withCache("txCache2", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache2")).build()) + .withCache("txCache1", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache1")).build()) + .withCache("txCache2", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache2")).build()) .using(new DefaultTimeSourceService(new TimeSourceConfiguration(testTimeSource))) .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) .build(true); @@ -496,10 +496,10 @@ public void testAtomicsWithoutLoaderWriter() throws Exception { .heap(10, EntryUnit.ENTRIES) .offheap(10, MemoryUnit.MB) ) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS))); + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1))); cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("txCache1", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache1")).build()) + .withCache("txCache1", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache1")).build()) .using(new DefaultTimeSourceService(new TimeSourceConfiguration(testTimeSource))) .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) .build(true); @@ -524,11 +524,11 @@ public void testAtomicsWithLoaderWriter() throws Exception { newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .offheap(10, MemoryUnit.MB)) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS))) + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1))) .withLoaderWriter(loaderWriter); cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("txCache1", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache1")).build()) + .withCache("txCache1", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache1")).build()) .using(new DefaultTimeSourceService(new TimeSourceConfiguration(testTimeSource))) .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) .build(true); @@ -674,10 +674,10 @@ public void testIterate() throws Throwable { newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .offheap(10, MemoryUnit.MB)) - .withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS))); + .withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(1))); cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("txCache1", cacheConfigurationBuilder.add(new XAStoreConfiguration("txCache1")).build()) + .withCache("txCache1", cacheConfigurationBuilder.withService(new XAStoreConfiguration("txCache1")).build()) .using(new DefaultTimeSourceService(new TimeSourceConfiguration(testTimeSource))) .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) .build(true); diff --git a/integration-test/src/test/java/org/ehcache/integration/util/JavaExec.java b/integration-test/src/test/java/org/ehcache/integration/util/JavaExec.java index a26b43e107..55bed3fd4a 100644 --- a/integration-test/src/test/java/org/ehcache/integration/util/JavaExec.java +++ b/integration-test/src/test/java/org/ehcache/integration/util/JavaExec.java @@ -26,7 +26,7 @@ public final class JavaExec { private JavaExec() { } - public static CompletableFuture exec(Class klass, String ... args) throws IOException, InterruptedException { + public static CompletableFuture exec(Class klass, String ... args) throws IOException, InterruptedException { String javaHome = System.getProperty("java.home"); String javaBin = javaHome + separator + "bin" + separator + "java"; String classpath = System.getProperty("java.class.path"); diff --git a/integration-test/src/test/resources/configs/simple-xa.xml b/integration-test/src/test/resources/configs/simple-xa.xml index 4efbc3e835..ba09416aba 100644 --- a/integration-test/src/test/resources/configs/simple-xa.xml +++ b/integration-test/src/test/resources/configs/simple-xa.xml @@ -14,11 +14,8 @@ ~ limitations under the License. --> + xmlns:tx='http://www.ehcache.org/v3/tx'> diff --git a/management/.gitignore b/management/.gitignore deleted file mode 100755 index ae3c172604..0000000000 --- a/management/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/bin/ diff --git a/management/build.gradle b/management/build.gradle deleted file mode 100644 index 380b668c99..0000000000 --- a/management/build.gradle +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: EhDeploy - -dependencies { - // optional: if we want xml config - compileOnly project(':xml') - - // optional: if we want to use the clustered management layer - compileOnly project(':clustered:client') - compileOnly "org.terracotta:entity-client-api:$terracottaApisVersion" - compileOnly "org.terracotta.management.dist:mnm-nms-agent:$terracottaPlatformVersion" - - compileOnly project(':api') - compileOnly project(':core') - compileOnly project(':impl') - testCompile ("org.terracotta.management:management-registry:$terracottaPlatformVersion") { - exclude group: 'org.terracotta', module: 'statistics' - } - - testCompile project(':xml') - testCompile "com.fasterxml.jackson.core:jackson-databind:$jacksonVersion" -} - -test { - if (testJava.javaVersion.isJava9Compatible()) { - jvmArgs += ['--add-modules', 'java.xml.bind'] - } -} - -tasks.withType(JavaCompile) { - options.compilerArgs += ['-Werror'] -} diff --git a/management/gradle.properties b/management/gradle.properties deleted file mode 100644 index 226794f83c..0000000000 --- a/management/gradle.properties +++ /dev/null @@ -1,3 +0,0 @@ -subPomName = Ehcache 3 Management and Monitoring module -subPomDesc = The Management and Monitoring module of Ehcache 3 -osgi = {"Import-Package" : ["!sun.misc.*", "!sun.security.action.*"]} diff --git a/management/src/main/java/org/ehcache/management/ManagementRegistryServiceConfiguration.java b/management/src/main/java/org/ehcache/management/ManagementRegistryServiceConfiguration.java deleted file mode 100644 index 56cb8a004a..0000000000 --- a/management/src/main/java/org/ehcache/management/ManagementRegistryServiceConfiguration.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management; - -import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.terracotta.management.model.context.Context; - -import java.util.Collection; - -/** - * Configuration interface for a {@link ManagementRegistryService}. - */ -public interface ManagementRegistryServiceConfiguration extends ServiceCreationConfiguration { - - /** - * The context used to identify this cache manager - */ - Context getContext(); - - /** - * Gets the alias of the executor to use for asynchronous collector service tasks. - * - * @return The static colector executor alias - */ - String getCollectorExecutorAlias(); - - /** - * The users tags that can be used to filter this client's management registry amongst others - */ - Collection getTags(); - -} diff --git a/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java b/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java deleted file mode 100644 index 00b666d05a..0000000000 --- a/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.cluster; - -import org.ehcache.Cache; -import org.ehcache.StateTransitionException; -import org.ehcache.Status; -import org.ehcache.clustered.client.service.ClientEntityFactory; -import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.clustered.client.service.EntityService; -import org.ehcache.core.events.CacheManagerListener; -import org.ehcache.core.spi.service.CacheManagerProviderService; -import org.ehcache.core.spi.service.ExecutionService; -import org.ehcache.core.spi.store.InternalCacheManager; -import org.ehcache.core.spi.time.TimeSourceService; -import org.ehcache.management.CollectorService; -import org.ehcache.management.ManagementRegistryService; -import org.ehcache.management.registry.DefaultCollectorService; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceDependencies; -import org.ehcache.spi.service.ServiceProvider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.terracotta.exception.EntityNotFoundException; -import org.terracotta.management.entity.nms.agent.client.NmsAgentEntity; -import org.terracotta.management.entity.nms.agent.client.NmsAgentService; -import org.terracotta.management.model.notification.ContextualNotification; -import org.terracotta.management.model.stats.ContextualStatistics; - -import java.util.Collection; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import static org.ehcache.impl.internal.executor.ExecutorUtil.shutdownNow; - -@ServiceDependencies({CacheManagerProviderService.class, ExecutionService.class, TimeSourceService.class, ManagementRegistryService.class, EntityService.class, ClusteringService.class}) -public class DefaultClusteringManagementService implements ClusteringManagementService, CacheManagerListener, CollectorService.Collector { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultClusteringManagementService.class); - - private final ClusteringManagementServiceConfiguration configuration; - - private volatile ManagementRegistryService managementRegistryService; - private volatile CollectorService collectorService; - private volatile NmsAgentService nmsAgentService; - private volatile ClientEntityFactory nmsAgentFactory; - private volatile InternalCacheManager cacheManager; - private volatile ExecutorService managementCallExecutor; - private volatile ClusteringService clusteringService; - - public DefaultClusteringManagementService() { - this(new DefaultClusteringManagementServiceConfiguration()); - } - - public DefaultClusteringManagementService(ClusteringManagementServiceConfiguration configuration) { - this.configuration = configuration == null ? new DefaultClusteringManagementServiceConfiguration() : configuration; - } - - @Override - public void start(ServiceProvider serviceProvider) { - this.clusteringService = serviceProvider.getService(ClusteringService.class); - this.managementRegistryService = serviceProvider.getService(ManagementRegistryService.class); - this.cacheManager = serviceProvider.getService(CacheManagerProviderService.class).getCacheManager(); - // get an ordered executor to keep ordering of management call requests - this.managementCallExecutor = serviceProvider.getService(ExecutionService.class).getOrderedExecutor( - configuration.getManagementCallExecutorAlias(), - new ArrayBlockingQueue<>(configuration.getManagementCallQueueSize())); - - this.collectorService = new DefaultCollectorService(this); - this.collectorService.start(serviceProvider); - - EntityService entityService = serviceProvider.getService(EntityService.class); - this.nmsAgentFactory = entityService.newClientEntityFactory("NmsAgent", NmsAgentEntity.class, 1, null); - - this.cacheManager.registerListener(this); - } - - @Override - public void stop() { - if(collectorService != null) { - collectorService.stop(); - } - shutdownNow(managementCallExecutor); - - // nullify so that no further actions are done with them (see null-checks below) - if(nmsAgentService != null) { - nmsAgentService.close(); - managementRegistryService = null; - } - nmsAgentService = null; - managementCallExecutor = null; - } - - @Override - public void cacheAdded(String alias, Cache cache) { - } - - @Override - public void cacheRemoved(String alias, Cache cache) { - } - - @Override - public void stateTransition(Status from, Status to) { - // we are only interested when cache manager is initializing (but at the end of the initialization) - switch (to) { - - case AVAILABLE: { - // create / fetch the management entity - NmsAgentEntity nmsAgentEntity; - try { - nmsAgentEntity = nmsAgentFactory.retrieve(); - } catch (EntityNotFoundException e) { - // should never occur because entity is permanent - throw (AssertionError) new AssertionError("Entity " + NmsAgentEntity.class.getSimpleName() + " not found").initCause(e.getCause()); - } - nmsAgentService = new NmsAgentService(nmsAgentEntity); - nmsAgentService.setOperationTimeout(configuration.getManagementCallTimeoutSec(), TimeUnit.SECONDS); - nmsAgentService.setManagementRegistry(managementRegistryService); - // setup the executor that will handle the management call requests received from the server. We log failures. - nmsAgentService.setManagementCallExecutor(new LoggingExecutor( - managementCallExecutor, - LoggerFactory.getLogger(getClass().getName() + ".managementCallExecutor"))); - - try { - nmsAgentService.init(); - // expose tags - nmsAgentService.setTags(managementRegistryService.getConfiguration().getTags()); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new StateTransitionException(e); - } catch (ExecutionException e) { - throw new StateTransitionException(e.getCause()); - } catch (TimeoutException e) { - throw new StateTransitionException(e); - } - - break; - } - - - case UNINITIALIZED: { - this.cacheManager.deregisterListener(this); - break; - } - - case MAINTENANCE: - // in case we need management capabilities in maintenance mode - break; - - default: - throw new AssertionError("Unsupported state: " + to); - } - } - - @Override - public void onNotification(ContextualNotification notification) { - NmsAgentService service = nmsAgentService; - if (service != null && clusteringService.isConnected()) { - try { - service.pushNotification(notification); - } catch (InterruptedException e) { - LOGGER.error("Failed to push notification " + notification + ": " + e.getMessage(), e); - Thread.currentThread().interrupt(); - } catch (Exception e) { - LOGGER.error("Failed to push notification " + notification + ": " + e.getMessage(), e); - } - } - } - - @Override - public void onStatistics(Collection statistics) { - NmsAgentService service = nmsAgentService; - if (service != null && clusteringService.isConnected()) { - try { - service.pushStatistics(statistics); - } catch (InterruptedException e) { - LOGGER.error("Failed to push statistics " + statistics + ": " + e.getMessage(), e); - Thread.currentThread().interrupt(); - } catch (Exception e) { - LOGGER.error("Failed to push statistics " + statistics + ": " + e.getMessage(), e); - } - } - } - -} diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java b/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java deleted file mode 100644 index e700e181b4..0000000000 --- a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.providers.statistics; - -import org.ehcache.core.spi.service.StatisticsService; -import org.ehcache.management.ManagementRegistryServiceConfiguration; -import org.ehcache.management.providers.CacheBinding; -import org.ehcache.management.providers.CacheBindingManagementProvider; -import org.ehcache.management.providers.ExposedCacheBinding; -import org.terracotta.management.model.capabilities.descriptors.Descriptor; -import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; -import org.terracotta.management.model.context.Context; -import org.terracotta.management.registry.Named; -import org.terracotta.management.registry.ExposedObject; -import org.terracotta.management.registry.collect.StatisticProvider; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - -@Named("StatisticsCapability") -@StatisticProvider -public class EhcacheStatisticsProvider extends CacheBindingManagementProvider { - - private static final Comparator STATISTIC_DESCRIPTOR_COMPARATOR = Comparator.comparing(StatisticDescriptor::getName); - - private final StatisticsService statisticsService; - - public EhcacheStatisticsProvider(ManagementRegistryServiceConfiguration configuration, StatisticsService statisticsService) { - super(configuration); - this.statisticsService = statisticsService; - } - - @Override - protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { - return new StandardEhcacheStatistics(registryConfiguration, cacheBinding, statisticsService); - } - - @Override - public final Collection getDescriptors() { - Collection capabilities = new HashSet<>(); - for (ExposedObject o : getExposedObjects()) { - capabilities.addAll(((StandardEhcacheStatistics) o).getDescriptors()); - } - List list = new ArrayList<>(capabilities); - Collections.sort(list, STATISTIC_DESCRIPTOR_COMPARATOR); - return list; - } - - @Override - public Map collectStatistics(Context context, Collection statisticNames) { - StandardEhcacheStatistics ehcacheStatistics = (StandardEhcacheStatistics) findExposedObject(context); - if (ehcacheStatistics != null) { - if (statisticNames == null || statisticNames.isEmpty()) { - return ehcacheStatistics.queryStatistics(); - } else { - Map statistics = new TreeMap<>(); - for (String statisticName : statisticNames) { - try { - statistics.put(statisticName, ehcacheStatistics.queryStatistic(statisticName)); - } catch (IllegalArgumentException ignored) { - // ignore when statisticName does not exist and throws an exception - } - } - return statistics; - } - } - return Collections.emptyMap(); - } - -} diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java deleted file mode 100644 index 32f1297992..0000000000 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.providers.statistics; - -import org.ehcache.core.spi.service.StatisticsService; -import org.ehcache.core.statistics.CacheStatistics; -import org.ehcache.core.statistics.TypedValueStatistic; -import org.ehcache.management.ManagementRegistryServiceConfiguration; -import org.ehcache.management.providers.CacheBinding; -import org.ehcache.management.providers.ExposedCacheBinding; -import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; -import org.terracotta.management.registry.collect.StatisticRegistry; - -import java.util.Collection; -import java.util.Map; - -public class StandardEhcacheStatistics extends ExposedCacheBinding { - - private final StatisticRegistry statisticRegistry; - private final String cacheName; - - StandardEhcacheStatistics(ManagementRegistryServiceConfiguration registryConfiguration, CacheBinding cacheBinding, StatisticsService statisticsService) { - super(registryConfiguration, cacheBinding); - this.cacheName = cacheBinding.getAlias(); - this.statisticRegistry = new StatisticRegistry(cacheBinding.getCache()); - - CacheStatistics cacheStatistics = statisticsService.getCacheStatistics(cacheName); - Map knownStatistics = cacheStatistics.getKnownStatistics(); - - for(Map.Entry stat : knownStatistics.entrySet()) { - String name = stat.getKey(); - TypedValueStatistic valueStatistic = stat.getValue(); - switch (valueStatistic.getType()) { - case COUNTER: - statisticRegistry.registerCounter(name, valueStatistic); - break; - case SIZE: - statisticRegistry.registerSize(name, valueStatistic); - break; - default: - throw new IllegalArgumentException("Unsupported statistic type: " + valueStatistic.getType()); - } - } - } - - public Number queryStatistic(String fullStatisticName) { - return statisticRegistry.queryStatistic(fullStatisticName); - } - - public Map queryStatistics() { - return statisticRegistry.queryStatistics(); - } - - @Override - public Collection getDescriptors() { - return statisticRegistry.getDescriptors(); - } - -} diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryConfiguration.java b/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryConfiguration.java deleted file mode 100644 index 312c3f04e4..0000000000 --- a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryConfiguration.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.registry; - -import org.ehcache.management.ManagementRegistryService; -import org.ehcache.management.ManagementRegistryServiceConfiguration; -import org.terracotta.management.model.context.Context; - -import java.util.Arrays; -import java.util.Collection; -import java.util.TreeSet; -import java.util.concurrent.atomic.AtomicLong; - -public class DefaultManagementRegistryConfiguration implements ManagementRegistryServiceConfiguration { - - private static final AtomicLong COUNTER = new AtomicLong(); - - private final Collection tags = new TreeSet<>(); - private Context context = Context.empty(); - private String collectorExecutorAlias = "collectorExecutor"; - - public DefaultManagementRegistryConfiguration() { - setCacheManagerAlias("cache-manager-" + COUNTER.getAndIncrement()); - } - - public DefaultManagementRegistryConfiguration setCacheManagerAlias(String alias) { - return setContext(Context.create("cacheManagerName", alias)); - } - - public DefaultManagementRegistryConfiguration setContext(Context context) { - if (!this.context.contains("cacheManagerName") && !context.contains("cacheManagerName")) { - throw new IllegalArgumentException("'cacheManagerName' is missing from context"); - } - this.context = this.context.with(context); - return this; - } - - public DefaultManagementRegistryConfiguration setCollectorExecutorAlias(String collectorExecutorAlias) { - this.collectorExecutorAlias = collectorExecutorAlias; - return this; - } - - public DefaultManagementRegistryConfiguration addTags(String... tags) { - this.tags.addAll(Arrays.asList(tags)); - return this; - } - - public DefaultManagementRegistryConfiguration addTag(String tag) { - return addTags(tag); - } - - @Override - public Context getContext() { - return context; - } - - public String getCacheManagerAlias() { - return getContext().get("cacheManagerName"); - } - - @Override - public String getCollectorExecutorAlias() { - return this.collectorExecutorAlias; - } - - @Override - public Collection getTags() { - return tags; - } - - @Override - public Class getServiceType() { - return ManagementRegistryService.class; - } - - @Override - public String toString() { - return "DefaultManagementRegistryConfiguration{" + "context=" + context + - ", tags=" + tags + - ", collectorExecutorAlias='" + collectorExecutorAlias + '\'' + - '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - DefaultManagementRegistryConfiguration that = (DefaultManagementRegistryConfiguration) o; - - if (!tags.equals(that.tags)) return false; - if (!context.equals(that.context)) return false; - return collectorExecutorAlias != null ? collectorExecutorAlias.equals(that.collectorExecutorAlias) : that.collectorExecutorAlias == null; - - } - - @Override - public int hashCode() { - int result = tags.hashCode(); - result = 31 * result + context.hashCode(); - result = 31 * result + (collectorExecutorAlias != null ? collectorExecutorAlias.hashCode() : 0); - return result; - } - -} diff --git a/management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java b/management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java deleted file mode 100644 index 3acbdda68b..0000000000 --- a/management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.registry; - -import org.ehcache.management.ManagementRegistryService; -import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.xml.CacheManagerServiceConfigurationParser; -import org.ehcache.xml.exceptions.XmlConfigurationException; -import org.w3c.dom.Element; - -import javax.xml.transform.Source; -import javax.xml.transform.stream.StreamSource; -import java.io.IOException; -import java.net.URI; -import java.net.URL; - -public class ManagementRegistryServiceConfigurationParser implements CacheManagerServiceConfigurationParser { - - private static final String NAMESPACE = "http://www.ehcache.org/v3/management"; - private static final URI NAMESPACE_URI = URI.create(NAMESPACE); - private static final URL XML_SCHEMA = ManagementRegistryServiceConfigurationParser.class.getResource("/ehcache-management-ext.xsd"); - - @Override - public Source getXmlSchema() throws IOException { - return new StreamSource(XML_SCHEMA.openStream()); - } - - @Override - public URI getNamespace() { - return NAMESPACE_URI; - } - - @Override - public ServiceCreationConfiguration parseServiceCreationConfiguration(Element fragment) { - if ("management".equals(fragment.getLocalName())) { - DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration(); - - // ATTR: cache-manager-alias - if (fragment.hasAttribute("cache-manager-alias")) { - registryConfiguration.setCacheManagerAlias(attr(fragment, "cache-manager-alias")); - } - - // ATTR: collector-executor-alias - if (fragment.hasAttribute("collector-executor-alias")) { - registryConfiguration.setCollectorExecutorAlias(attr(fragment, "collector-executor-alias")); - } - - // tags - for (Element tags : NodeListIterable.elements(fragment, NAMESPACE, "tags")) { - // tag - for (Element tag : NodeListIterable.elements(tags, NAMESPACE, "tag")) { - String val = val(tag); - if (val != null && !val.isEmpty()) { - registryConfiguration.addTag(val); - } - } - } - - return registryConfiguration; - - } else { - throw new XmlConfigurationException(String.format( - "XML configuration element <%s> in <%s> is not supported", - fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); - } - } - - private static String attr(Element element, String name, String def) { - String s = element.getAttribute(name); - return s == null || s.equals("") ? def : s; - } - - private static String attr(Element element, String name) { - return attr(element, name, null); - } - - private static String val(Element element) { - return element.hasChildNodes() ? element.getFirstChild().getNodeValue() : null; - } - -} diff --git a/management/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory b/management/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory deleted file mode 100644 index e014208625..0000000000 --- a/management/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory +++ /dev/null @@ -1 +0,0 @@ -org.ehcache.management.registry.DefaultManagementRegistryFactory \ No newline at end of file diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java deleted file mode 100755 index 2f1ffc8fd1..0000000000 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.providers.statistics; - -import org.ehcache.Cache; -import org.ehcache.CacheManager; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.management.ManagementRegistryService; -import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; -import org.ehcache.management.registry.DefaultManagementRegistryService; -import org.hamcrest.Matchers; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; -import org.terracotta.management.model.context.Context; -import org.terracotta.management.model.stats.ContextualStatistics; - -import java.util.Arrays; - -import static org.junit.Assert.assertThat; - -public class StandardEhcacheStatisticsTest { - - @Rule - public final Timeout globalTimeout = Timeout.seconds(10); - - @Test - public void statsClearCacheTest() throws InterruptedException { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB)) - .build(); - - DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager3"); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); - - CacheManager cacheManager = null; - - try { - cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("cCache", cacheConfiguration) - .using(managementRegistry) - .build(true); - - Cache aCache = cacheManager.getCache("cCache", Long.class, String.class); - aCache.put(1L, "one"); - aCache.get(1L); - - Context context = StatsUtil.createContext(managementRegistry); - - ContextualStatistics counter = managementRegistry.withCapability("StatisticsCapability") - .queryStatistics(Arrays.asList("Cache:HitCount")) - .on(context) - .build() - .execute() - .getSingleResult(); - - assertThat(counter.size(), Matchers.is(1)); - Number count = counter.getStatistic("Cache:HitCount"); - - assertThat(count.longValue(), Matchers.equalTo(1L)); - } - finally { - if(cacheManager != null) { - cacheManager.close(); - } - } - } -} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java deleted file mode 100755 index 23700d5641..0000000000 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.providers.statistics; - -import org.ehcache.management.ManagementRegistryService; -import org.hamcrest.Matchers; -import org.terracotta.management.model.context.Context; -import org.terracotta.management.model.context.ContextContainer; -import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.registry.ResultSet; -import org.terracotta.management.registry.StatisticQuery; - -import static java.util.Collections.singletonList; -import static org.junit.Assert.assertThat; - -public class StatsUtil { - - public static Context createContext(ManagementRegistryService managementRegistry) { - ContextContainer cacheManagerCtx = managementRegistry.getContextContainer(); - ContextContainer firstCacheCtx = cacheManagerCtx.getSubContexts().iterator().next(); - return Context.empty() - .with(cacheManagerCtx.getName(), cacheManagerCtx.getValue()) - .with(firstCacheCtx.getName(), firstCacheCtx.getValue()); - } - - /* - NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. - This should only occur if the stats value is different from your expectedResult, which may happen if the stats calculations - change, the stats value isn't accessible or if you enter the wrong expectedResult. - */ - public static long getAndAssertExpectedValueFromCounter(String statName, Context context, ManagementRegistryService managementRegistry, long expectedResult) { - - StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") - .queryStatistics(singletonList(statName)) - .on(context) - .build(); - - ResultSet counters = query.execute(); - - ContextualStatistics statisticsContext = counters.getResult(context); - - assertThat(counters.size(), Matchers.is(1)); - - Number counter = statisticsContext.getStatistic(statName); - long value = counter.longValue(); - - assertThat(value, Matchers.is(expectedResult)); - - return value; - } - -} diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java deleted file mode 100644 index 065b2f75aa..0000000000 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ /dev/null @@ -1,452 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.registry; - -import java.io.File; -import java.io.IOException; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.NoSuchElementException; -import java.util.concurrent.ExecutionException; - -import org.ehcache.Cache; -import org.ehcache.CacheManager; -import org.ehcache.PersistentCacheManager; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.management.ManagementRegistryService; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.rules.TemporaryFolder; -import org.junit.rules.Timeout; -import org.terracotta.management.model.call.ContextualReturn; -import org.terracotta.management.model.capabilities.Capability; -import org.terracotta.management.model.capabilities.descriptors.Descriptor; -import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; -import org.terracotta.management.model.context.Context; -import org.terracotta.management.model.context.ContextContainer; -import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.registry.ResultSet; -import org.terracotta.management.registry.StatisticQuery.Builder; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.config.units.MemoryUnit.MB; - -public class DefaultManagementRegistryServiceTest { - - private static final Collection ONHEAP_DESCRIPTORS = new ArrayList<>(); - private static final Collection OFFHEAP_DESCRIPTORS = new ArrayList<>(); - private static final Collection DISK_DESCRIPTORS = new ArrayList<>(); - private static final Collection CACHE_DESCRIPTORS = new ArrayList<>(); - - @Rule - public final ExpectedException expectedException = ExpectedException.none(); - - @Rule - public final TemporaryFolder diskPath = new TemporaryFolder(); - - @Rule - public final Timeout globalTimeout = Timeout.seconds(10); - - @Test - public void testCanGetContext() { - CacheManager cacheManager1 = null; - try { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .build(); - - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - - cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) - .build(true); - - ContextContainer contextContainer = managementRegistry.getContextContainer(); - assertThat(contextContainer.getName()).isEqualTo("cacheManagerName"); - assertThat(contextContainer.getValue()).isEqualTo("myCM"); - assertThat(contextContainer.getSubContexts()).hasSize(1); - - ContextContainer subcontext = contextContainer.getSubContexts().iterator().next(); - assertThat(subcontext.getName()).isEqualTo("cacheName"); - assertThat(subcontext.getValue()).isEqualTo("aCache"); - } - finally { - if(cacheManager1 != null) cacheManager1.close(); - } - } - - @Test - public void descriptorOnHeapTest() { - CacheManager cacheManager1 = null; - try { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .build(); - - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - - cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) - .build(true); - - assertThat(managementRegistry.getCapabilities()).hasSize(4); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName()).isEqualTo("ActionsCapability"); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName()).isEqualTo("SettingsCapability"); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName()).isEqualTo("StatisticCollectorCapability"); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName()).isEqualTo("StatisticsCapability"); - - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors()).hasSize(4); - - Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(3).getDescriptors(); - Collection allDescriptors = new ArrayList<>(); - allDescriptors.addAll(ONHEAP_DESCRIPTORS); - allDescriptors.addAll(CACHE_DESCRIPTORS); - - assertThat(descriptors).containsOnlyElementsOf(allDescriptors); - } - finally { - if(cacheManager1 != null) cacheManager1.close(); - } - - } - - @Test - public void descriptorOffHeapTest() { - CacheManager cacheManager1 = null; - try { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder().heap(5, MB).offheap(10, MB)) - .build(); - - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - - cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) - .build(true); - - assertThat(managementRegistry.getCapabilities()).hasSize(4); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName()).isEqualTo("ActionsCapability"); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName()).isEqualTo("SettingsCapability"); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName()).isEqualTo("StatisticCollectorCapability"); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName()).isEqualTo("StatisticsCapability"); - - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors()).hasSize(4); - - Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(3).getDescriptors(); - Collection allDescriptors = new ArrayList<>(); - allDescriptors.addAll(ONHEAP_DESCRIPTORS); - allDescriptors.addAll(OFFHEAP_DESCRIPTORS); - allDescriptors.addAll(CACHE_DESCRIPTORS); - - assertThat(descriptors).containsOnlyElementsOf(allDescriptors); - } - finally { - if(cacheManager1 != null) cacheManager1.close(); - } - - } - - @Test - public void descriptorDiskStoreTest() throws Exception { - PersistentCacheManager persistentCacheManager = null; - try { - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - - persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .with(CacheManagerBuilder.persistence(getStoragePath() + File.separator + "myData")) - .withCache("persistent-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10, EntryUnit.ENTRIES) - .disk(10, MemoryUnit.MB, true)) - ) - .using(managementRegistry) - .build(true); - - assertThat(managementRegistry.getCapabilities()).hasSize(4); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName()).isEqualTo("ActionsCapability"); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName()).isEqualTo("SettingsCapability"); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName()).isEqualTo("StatisticCollectorCapability"); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName()).isEqualTo("StatisticsCapability"); - - - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors()).hasSize(4); - - Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(3).getDescriptors(); - Collection allDescriptors = new ArrayList<>(); - allDescriptors.addAll(ONHEAP_DESCRIPTORS); - allDescriptors.addAll(DISK_DESCRIPTORS); - allDescriptors.addAll(CACHE_DESCRIPTORS); - - assertThat(descriptors).containsOnlyElementsOf(allDescriptors); - } - finally { - if(persistentCacheManager != null) persistentCacheManager.close(); - } - } - - private String getStoragePath() throws IOException { - return diskPath.newFolder().getAbsolutePath(); - } - - @Test - public void testCanGetCapabilities() { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .build(); - - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - - CacheManager cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) - .build(true); - - assertThat(managementRegistry.getCapabilities()).hasSize(4); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName()).isEqualTo("ActionsCapability"); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName()).isEqualTo("SettingsCapability"); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName()).isEqualTo("StatisticCollectorCapability"); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName()).isEqualTo("StatisticsCapability"); - - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors()).hasSize(4); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getDescriptors()).hasSize(ONHEAP_DESCRIPTORS.size() + CACHE_DESCRIPTORS.size()); - - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getCapabilityContext().getAttributes()).hasSize(2); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getCapabilityContext().getAttributes()).hasSize(2); - - cacheManager1.close(); - } - - @Test - public void testCanGetStats() { - String queryStatisticName = "Cache:HitCount"; - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .build(); - - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - - CacheManager cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache1", cacheConfiguration) - .withCache("aCache2", cacheConfiguration) - .using(managementRegistry) - .build(true); - - Context context1 = Context.empty() - .with("cacheManagerName", "myCM") - .with("cacheName", "aCache1"); - - Context context2 = Context.empty() - .with("cacheManagerName", "myCM") - .with("cacheName", "aCache2"); - - Cache cache1 = cacheManager1.getCache("aCache1", Long.class, String.class); - Cache cache2 = cacheManager1.getCache("aCache2", Long.class, String.class); - - cache1.put(1L, "one"); - cache2.put(3L, "three"); - - cache1.get(1L); - cache1.get(2L); - cache2.get(3L); - cache2.get(4L); - - Builder builder1 = managementRegistry.withCapability("StatisticsCapability") - .queryStatistic(queryStatisticName) - .on(context1); - - ContextualStatistics counters = getResultSet(builder1, context1, null, queryStatisticName).getResult(context1); - Number counterHistory1 = counters.getStatistic(queryStatisticName); - - assertThat(counters.size()).isEqualTo(1); - assertThat(counterHistory1.longValue()).isEqualTo(1L); - - Builder builder2 = managementRegistry.withCapability("StatisticsCapability") - .queryStatistic(queryStatisticName) - .on(context1) - .on(context2); - ResultSet allCounters = getResultSet(builder2, context1, context2, queryStatisticName); - - assertThat(allCounters.size()).isEqualTo(2); - assertThat(allCounters.getResult(context1).size()).isEqualTo(1); - assertThat(allCounters.getResult(context2).size()).isEqualTo(1); - - assertThat(allCounters.getResult(context1).getStatistic(queryStatisticName).longValue()).isEqualTo(1L); - assertThat(allCounters.getResult(context2).getStatistic(queryStatisticName).longValue()).isEqualTo(1L); - - cacheManager1.close(); - } - - private static ResultSet getResultSet(Builder builder, Context context1, Context context2, String statisticsName) { - ResultSet counters = null; - - while(!Thread.currentThread().isInterrupted()) //wait till Counter history(s) is initialized and contains values. - { - counters = builder.build().execute(); - - ContextualStatistics statisticsContext1 = counters.getResult(context1); - Number counterContext1 = statisticsContext1.getStatistic(statisticsName); - - if(context2 != null) - { - ContextualStatistics statisticsContext2 = counters.getResult(context2); - Number counterHistoryContext2 = statisticsContext2.getStatistic(statisticsName); - - if(counterHistoryContext2.longValue() > 0 && - counterContext1.longValue() > 0) - { - break; - } - } - else - { - if(counterContext1.longValue() > 0) - { - break; - } - } - } - - return counters; - } - - @Test - public void testCall() throws ExecutionException { - CacheManager cacheManager1 = null; - try { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .build(); - - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - - cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache1", cacheConfiguration) - .withCache("aCache2", cacheConfiguration) - .using(managementRegistry) - .build(true); - - Context context = Context.empty() - .with("cacheManagerName", "myCM") - .with("cacheName", "aCache1"); - - cacheManager1.getCache("aCache1", Long.class, String.class).put(1L, "1"); - - assertThat(cacheManager1.getCache("aCache1", Long.class, String.class).get(1L)).isEqualTo("1"); - - ContextualReturn result = managementRegistry.withCapability("ActionsCapability") - .call("clear") - .on(context) - .build() - .execute() - .getSingleResult(); - - assertThat(result.hasExecuted()).isTrue(); - assertThat(result.getValue()).isNull(); - - assertThat(cacheManager1.getCache("aCache1", Long.class, String.class).get(1L)).isNull(); - } - finally { - if(cacheManager1 != null) cacheManager1.close(); - } - - } - - @Test - public void testCallOnInexistignContext() throws ExecutionException { - CacheManager cacheManager1 = null; - try { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .build(); - - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - - cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache1", cacheConfiguration) - .withCache("aCache2", cacheConfiguration) - .using(managementRegistry) - .build(true); - - Context inexisting = Context.empty() - .with("cacheManagerName", "myCM2") - .with("cacheName", "aCache2"); - - ResultSet> results = managementRegistry.withCapability("ActionsCapability") - .call("clear") - .on(inexisting) - .build() - .execute(); - - assertThat(results.size()).isEqualTo(1); - assertThat(results.getSingleResult().hasExecuted()).isFalse(); - - expectedException.expect(NoSuchElementException.class); - results.getSingleResult().getValue(); - } - finally { - if(cacheManager1 != null) cacheManager1.close(); - } - - } - - @BeforeClass - public static void loadStatsUtil() throws ClassNotFoundException { - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionCount" , "COUNTER")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:ExpirationCount" , "COUNTER")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissCount" , "COUNTER")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , "COUNTER")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:OccupiedByteSize" , "SIZE")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitCount" , "COUNTER")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:PutCount" , "COUNTER")); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:RemovalCount" , "COUNTER")); - - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedByteSize", "SIZE")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedByteSize", "SIZE")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:ExpirationCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MaxMappingCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:PutCount", "COUNTER")); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:RemovalCount", "COUNTER")); - - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MaxMappingCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedByteSize", "SIZE")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedByteSize", "SIZE")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:ExpirationCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MappingCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:PutCount", "COUNTER")); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:RemovalCount", "COUNTER")); - - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitCount", "COUNTER")); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissCount", "COUNTER")); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:PutCount", "COUNTER")); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:RemovalCount", "COUNTER")); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:EvictionCount", "COUNTER")); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ExpirationCount", "COUNTER")); - } -} diff --git a/management/src/test/java/org/ehcache/management/registry/XmlConfigTest.java b/management/src/test/java/org/ehcache/management/registry/XmlConfigTest.java deleted file mode 100644 index 844c3b0caa..0000000000 --- a/management/src/test/java/org/ehcache/management/registry/XmlConfigTest.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.registry; - -import org.ehcache.CacheManager; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.xml.XmlConfiguration; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import java.util.Arrays; -import java.util.Collection; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.junit.Assert.assertThat; - -@RunWith(Parameterized.class) -public class XmlConfigTest { - - @Parameterized.Parameters - public static Collection data() { - return Arrays.asList(new Object[][]{ - { - "ehcache-management-1.xml", - new DefaultManagementRegistryConfiguration() - }, - { - "ehcache-management-2.xml", - new DefaultManagementRegistryConfiguration() - .setCacheManagerAlias("my-cache-manager-name") - .addTags("webapp-name", "jboss-1", "server-node-1") - }, - { - "ehcache-management-3.xml", - new DefaultManagementRegistryConfiguration() - .setCacheManagerAlias("my-cache-manager-name") - .addTags("webapp-name", "jboss-1", "server-node-1") - .setCollectorExecutorAlias("my-collectorExecutorAlias") - }, - { - "ehcache-management-4.xml", - new DefaultManagementRegistryConfiguration() - .setCacheManagerAlias("my-cache-manager-name") - .addTags("webapp-name", "jboss-1", "server-node-1") - }, - { - "ehcache-management-5.xml", - new DefaultManagementRegistryConfiguration() - .setCacheManagerAlias("my-cache-manager-name") - .addTags("webapp-name", "jboss-1", "server-node-1") - } - }); - } - - private final String xml; - private final DefaultManagementRegistryConfiguration expectedConfiguration; - - public XmlConfigTest(String xml, DefaultManagementRegistryConfiguration expectedConfiguration) { - this.xml = xml; - this.expectedConfiguration = expectedConfiguration; - } - - @Test - public void test_config_loaded() throws Exception { - CacheManager myCacheManager = CacheManagerBuilder.newCacheManager(new XmlConfiguration(getClass().getClassLoader().getResource(xml))); - myCacheManager.init(); - try { - DefaultManagementRegistryConfiguration registryConfiguration = null; - - for (ServiceCreationConfiguration configuration : myCacheManager.getRuntimeConfiguration().getServiceCreationConfigurations()) { - if (configuration instanceof DefaultManagementRegistryConfiguration) { - registryConfiguration = (DefaultManagementRegistryConfiguration) configuration; - break; - } - } - - assertThat(registryConfiguration, is(not(nullValue()))); - - // 1st test: CM alia not set, so generated - if (xml.endsWith("-1.xml")) { - expectedConfiguration.setCacheManagerAlias(registryConfiguration.getContext().get("cacheManagerName")); - } - - assertThat(registryConfiguration, equalTo(expectedConfiguration)); - - } finally { - myCacheManager.close(); - } - } - -} diff --git a/management/src/test/resources/ehcache-management-1.xml b/management/src/test/resources/ehcache-management-1.xml deleted file mode 100644 index 6dc090cfd3..0000000000 --- a/management/src/test/resources/ehcache-management-1.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - java.lang.String - java.lang.String - 20 - - - diff --git a/management/src/test/resources/ehcache-management-2.xml b/management/src/test/resources/ehcache-management-2.xml deleted file mode 100644 index 112e77d9cc..0000000000 --- a/management/src/test/resources/ehcache-management-2.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - webapp-name - jboss-1 - server-node-1 - - - - - - java.lang.String - java.lang.String - 20 - - - diff --git a/management/src/test/resources/ehcache-management-3.xml b/management/src/test/resources/ehcache-management-3.xml deleted file mode 100644 index 15ea08acfb..0000000000 --- a/management/src/test/resources/ehcache-management-3.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - webapp-name - jboss-1 - server-node-1 - - - - - - java.lang.String - java.lang.String - 20 - - - diff --git a/management/src/test/resources/ehcache-management-4.xml b/management/src/test/resources/ehcache-management-4.xml deleted file mode 100644 index 112e77d9cc..0000000000 --- a/management/src/test/resources/ehcache-management-4.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - webapp-name - jboss-1 - server-node-1 - - - - - - java.lang.String - java.lang.String - 20 - - - diff --git a/management/src/test/resources/ehcache-management-5.xml b/management/src/test/resources/ehcache-management-5.xml deleted file mode 100644 index 112e77d9cc..0000000000 --- a/management/src/test/resources/ehcache-management-5.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - webapp-name - jboss-1 - server-node-1 - - - - - - java.lang.String - java.lang.String - 20 - - - diff --git a/management/src/test/resources/settings-capability.json b/management/src/test/resources/settings-capability.json deleted file mode 100644 index d76418cbec..0000000000 --- a/management/src/test/resources/settings-capability.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"SettingsCapability","descriptors":[{"cacheName":"cache-1","keyType":"java.lang.String","valueType":"java.lang.String","resourcePools":{"heap":{"level":10000,"persistent":false,"type":"ENTRY","size":10,"unit":"entries"},"offheap":{"level":1000,"persistent":false,"type":"MEMORY","size":1,"unit":"MB"},"disk":{"level":100,"persistent":true,"type":"MEMORY","size":2,"unit":"MB"}}},{"cacheName":"cache-2","keyType":"java.lang.String","valueType":"java.lang.String","resourcePools":{"heap":{"level":10000,"persistent":false,"type":"ENTRY","size":10,"unit":"entries"},"offheap":{"level":1000,"persistent":false,"type":"MEMORY","size":1,"unit":"MB"},"disk":{"level":100,"persistent":true,"type":"MEMORY","size":2,"unit":"MB"}}},{"cacheManagerDescription":"","status":"AVAILABLE","managementContext":{"cacheManagerName":"my-cm-1"},"tags":["baz","boo","foo"]}],"capabilityContext":{"attributes":[{"name":"cacheManagerName","required":true},{"name":"cacheName","required":true}]}} \ No newline at end of file diff --git a/osgi-test/build.gradle b/osgi-test/build.gradle index 8ceae321f2..666463b594 100644 --- a/osgi-test/build.gradle +++ b/osgi-test/build.gradle @@ -14,60 +14,121 @@ * limitations under the License. */ +plugins { + id 'org.ehcache.build.conventions.java-library' +} + +configurations { + modularOsgiModule + osgiModule + lowerBoundOsgiModule.extendsFrom osgiModule + testCompileOnly.extendsFrom osgiModule +} + dependencies { - ext { - paxExamVersion = '3.5.0' - felixVersion = '4.4.0' + api ('org.ops4j.pax.exam:pax-exam-junit4:4.12.0') { + exclude group:'org.slf4j', module:'slf4j-api' } - - testCompile project(':impl'), project(':xml'), project(':107'), - "org.apache.felix:org.apache.felix.framework:$felixVersion", - "javax.cache:cache-api:$parent.jcacheVersion" - testCompile ("org.ops4j.pax.exam:pax-exam-junit4:$paxExamVersion") { - exclude group:'junit', module:'junit' + implementation 'org.apache.felix:org.apache.felix.framework:6.0.3' + runtimeOnly ('org.ops4j.pax.exam:pax-exam-link-mvn:4.12.0') { exclude group:'org.slf4j', module:'slf4j-api' } - - testRuntime "org.slf4j:slf4j-simple:$parent.slf4jVersion", - testRuntime ("org.ops4j.pax.exam:pax-exam-container-native:$paxExamVersion") { + runtimeOnly ("org.ops4j.pax.url:pax-url-wrap:2.6.1") { exclude group:'org.slf4j', module:'slf4j-api' } - testRuntime ("org.ops4j.pax.exam:pax-exam-link-mvn:$paxExamVersion") { + runtimeOnly ('org.ops4j.pax.exam:pax-exam-container-native:4.12.0') { exclude group:'org.slf4j', module:'slf4j-api' } + + modularOsgiModule project(':ehcache-api') + modularOsgiModule project(':ehcache-core') + modularOsgiModule project(':ehcache-impl') + modularOsgiModule project(':ehcache-xml') + modularOsgiModule project(':ehcache-107') + + osgiModule project(':ehcache-transactions') + osgiModule "javax.cache:cache-api:$parent.jcacheVersion" + osgiModule ('org.codehaus.btm:btm:2.1.4') { + exclude group:'org.slf4j', module:'slf4j-api' + } + + osgiModule project(':ehcache') + + osgiModule "org.slf4j:slf4j-simple:$parent.slf4jVersion" + osgiModule 'org.apache.felix:org.apache.felix.scr:2.1.6' + + osgiModule 'com.sun.activation:javax.activation:1.2.0' + osgiModule 'org.glassfish.hk2:osgi-resource-locator:1.0.2' } configurations.all { - resolutionStrategy { - force 'org.apache.maven.wagon:wagon-provider-api:2.5' - force 'org.codehaus.plexus:plexus-utils:3.0.15' - force 'org.eclipse.aether:aether-api:0.9.0.M4' - force 'org.eclipse.aether:aether-impl:0.9.0.M4' - force 'org.eclipse.aether:aether-spi:0.9.0.M4' - force 'org.eclipse.aether:aether-util:0.9.0.M4' - force 'org.sonatype.plexus:plexus-cipher:1.7' - force 'org.sonatype.plexus:plexus-sec-dispatcher:1.4' - } -} + resolutionStrategy { + dependencySubstitution { + substitute(module('org.ops4j.pax.url:pax-url-aether:2.4.5')) + .because('https://github.com/codehaus-plexus/plexus-utils/issues/3' + + ' and https://github.com/codehaus-plexus/plexus-utils/issues/4') + .with(module('org.ops4j.pax.url:pax-url-aether:2.6.3')) + substitute(module('org.ops4j.pax.url:pax-url-classpath:2.4.5')) + .because('https://ops4j1.jira.com/browse/PAXURL-341') + .with(module('org.ops4j.pax.url:pax-url-classpath:2.6.1')) + substitute(module('org.ops4j.pax.url:pax-url-link:2.4.5')) + .because('https://ops4j1.jira.com/browse/PAXURL-341') + .with(module('org.ops4j.pax.url:pax-url-link:2.6.1')) -tasks.withType(JavaCompile) { - options.compilerArgs += ['-Werror'] + substitute(module('biz.aQute.bnd:bndlib:2.4.0')) + .because('Java 9 Stuff') + .with(module('biz.aQute.bnd:biz.aQute.bndlib:5.2.0')) + substitute(module('junit:junit:4.12')) + .because('CVE-2020-15250') + .with(module('junit:junit:4.13.1')) + } + } } sourceSets { test { - // Needed for PaxExam which makes the dynamic bundle load content of a single dir - // matching the package of the test class + // Needed to allow PaxExam to see the test resources output.resourcesDir = java.outputDir } } +if (testJava.javaVersion.isJava9Compatible()) { + tasks.withType(Test) { + //https://issues.apache.org/jira/browse/FELIX-5727 - framework extensions in Java 9 are ugly + jvmArgs += '--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED' + } +} + test { - systemProperty 'ehcache.osgi.jar', project(':dist').jar.archivePath.getPath() - systemProperty 'ehcache.osgi.jcache.version', parent.jcacheVersion - systemProperty 'ehcache.osgi.slf4j.version', parent.slf4jVersion -}.doFirst { - if (testJava.javaVersion.isJava9Compatible()) throw new StopExecutionException("OSGi Tests Not Working in Java 9") + dependsOn configurations.osgiModule, configurations.modularOsgiModule + doFirst { + [configurations.modularOsgiModule, configurations.osgiModule]*.resolvedConfiguration*.resolvedArtifacts*.forEach({ + systemProperty "$it.moduleVersion.id.module:osgi-path", it.file + }) + } +} + +configurations { + lowerBoundOsgiModule { + resolutionStrategy.dependencySubstitution { + substitute module('org.glassfish.jaxb:jaxb-runtime') with module('com.sun.xml.bind:jaxb-osgi:2.2.8-b01') + } + } +} +dependencies { + lowerBoundOsgiModule 'javax.xml.bind:jaxb-api:2.2.9' } -test.dependsOn ':dist:jar' +tasks.register('lowerBoundTest', Test) { test -> + group = JavaBasePlugin.VERIFICATION_GROUP + dependsOn configurations.lowerBoundOsgiModule, configurations.modularOsgiModule + doFirst { + [configurations.modularOsgiModule, configurations.lowerBoundOsgiModule]*.resolvedConfiguration*.resolvedArtifacts*.forEach { + systemProperty "$it.moduleVersion.id.module:osgi-path", it.file + } + } +} + +tasks.named('check') { + dependsOn tasks.lowerBoundTest +} diff --git a/osgi-test/src/main/java/org/ehcache/osgi/OsgiTestUtils.java b/osgi-test/src/main/java/org/ehcache/osgi/OsgiTestUtils.java new file mode 100644 index 0000000000..90fc0623e5 --- /dev/null +++ b/osgi-test/src/main/java/org/ehcache/osgi/OsgiTestUtils.java @@ -0,0 +1,115 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.osgi; + +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.options.ProvisionOption; +import org.ops4j.pax.exam.options.WrappedUrlProvisionOption; +import org.osgi.framework.Constants; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Optional; +import java.util.jar.Attributes; +import java.util.jar.JarFile; +import java.util.jar.Manifest; + +import static java.lang.String.join; +import static java.lang.System.getProperty; +import static org.ops4j.pax.exam.CoreOptions.bundle; +import static org.ops4j.pax.exam.CoreOptions.cleanCaches; +import static org.ops4j.pax.exam.CoreOptions.composite; +import static org.ops4j.pax.exam.CoreOptions.junitBundles; +import static org.ops4j.pax.exam.CoreOptions.systemProperty; +import static org.ops4j.pax.exam.CoreOptions.workingDirectory; +import static org.ops4j.pax.exam.CoreOptions.wrappedBundle; +import static org.ops4j.pax.exam.options.WrappedUrlProvisionOption.OverwriteMode.MERGE; + +public class OsgiTestUtils { + + public static Option baseConfiguration(String ... path) { + return composite( + gradleBundle("org.slf4j:slf4j-api"), + gradleBundle("org.slf4j:slf4j-simple").noStart(), + gradleBundle("org.apache.felix:org.apache.felix.scr"), + systemProperty("pax.exam.osgi.unresolved.fail").value("true"), + cleanCaches(true), + workingDirectory(join(File.separator, "build", "osgi-container", join(File.separator, path))), + junitBundles() + ); + } + + public static Option jaxbConfiguration() { + return optionalGradleBundle("com.sun.xml.bind:jaxb-osgi") + .map(jaxb -> composite(jaxb, + gradleBundle("javax.xml.bind:jaxb-api"), + gradleBundle("com.sun.activation:javax.activation"), + gradleBundle("org.glassfish.hk2:osgi-resource-locator")) + ).orElseGet(() -> optionalGradleBundle("org.glassfish.jaxb:jaxb-runtime") + .map(jaxb -> composite(jaxb, + wrappedGradleBundle("javax.xml.bind:jaxb-api").instructions("-removeheaders=Require-Capability"), + gradleBundle("com.sun.istack:istack-commons-runtime"), + gradleBundle("com.sun.activation:javax.activation"), + gradleBundle("org.glassfish.hk2:osgi-resource-locator")) + ).orElseThrow(AssertionError::new)); + } + + public static Option jtaConfiguration() { + return composite( + wrappedGradleBundle("javax.transaction:jta").instructions("Fragment-Host=org.apache.felix.framework"), + gradleBundle("org.codehaus.btm:btm") + ); + } + + public static ProvisionOption gradleBundle(String module) { + return optionalGradleBundle(module).orElseThrow(() -> new IllegalArgumentException("Cannot find '" + module + "'")); + } + + private static final Attributes.Name BUNDLE_SYMBOLICNAME = new Attributes.Name(Constants.BUNDLE_SYMBOLICNAME); + + public static Optional> optionalGradleBundle(String module) { + return artifact(module).map(artifact -> { + try (JarFile jar = new JarFile(artifact.toFile())) { + Manifest manifest = jar.getManifest(); + if (manifest != null && manifest.getMainAttributes().containsKey(BUNDLE_SYMBOLICNAME)) { + return bundle(artifact.toUri().toString()); + } else { + return wrappedBundle(artifact.toUri().toString()); + } + } catch (IOException e) { + throw new IllegalArgumentException("Module '" + module + "' artifact " + artifact + " is broken?"); + } + }); + } + + public static WrappedUrlProvisionOption wrappedGradleBundle(String module) { + ProvisionOption provisionOption = gradleBundle(module); + if (provisionOption instanceof WrappedUrlProvisionOption) { + return (WrappedUrlProvisionOption) provisionOption; + } else { + return wrappedBundle(provisionOption.getURL()).overwriteManifest(MERGE); + } + } + + private static Optional artifact(String module) { + return Optional.ofNullable(getProperty(module + ":osgi-path")).map(Paths::get).filter(Files::isRegularFile); + } +} + diff --git a/osgi-test/src/test/java/org/ehcache/osgi/ByteSizedOnHeapOsgiTest.java b/osgi-test/src/test/java/org/ehcache/osgi/ByteSizedOnHeapOsgiTest.java index d06cd3bc09..67591f89d3 100644 --- a/osgi-test/src/test/java/org/ehcache/osgi/ByteSizedOnHeapOsgiTest.java +++ b/osgi-test/src/test/java/org/ehcache/osgi/ByteSizedOnHeapOsgiTest.java @@ -16,13 +16,6 @@ package org.ehcache.osgi; -import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ops4j.pax.exam.CoreOptions.bundle; -import static org.ops4j.pax.exam.CoreOptions.junitBundles; -import static org.ops4j.pax.exam.CoreOptions.mavenBundle; -import static org.ops4j.pax.exam.CoreOptions.options; - import org.ehcache.Cache; import org.ehcache.CacheManager; import org.ehcache.config.builders.CacheManagerBuilder; @@ -35,35 +28,59 @@ import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; import org.ops4j.pax.exam.spi.reactors.PerMethod; -/** - * - */ +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.osgi.OsgiTestUtils.baseConfiguration; +import static org.ehcache.osgi.OsgiTestUtils.gradleBundle; +import static org.ehcache.osgi.OsgiTestUtils.jaxbConfiguration; +import static org.ops4j.pax.exam.CoreOptions.options; + @RunWith(PaxExam.class) @ExamReactorStrategy(PerMethod.class) public class ByteSizedOnHeapOsgiTest { @Configuration - public Option[] config() { + public Option[] individualModules() { return options( - mavenBundle("org.slf4j", "slf4j-api", System.getProperty("ehcache.osgi.slf4j.version")), - mavenBundle("org.slf4j", "slf4j-simple", System.getProperty("ehcache.osgi.slf4j.version")).noStart(), - bundle("file:" + System.getProperty("ehcache.osgi.jar")), - junitBundles() + gradleBundle("org.ehcache.modules:ehcache-impl"), + gradleBundle("org.ehcache.modules:ehcache-core"), + gradleBundle("org.ehcache.modules:ehcache-api"), + + gradleBundle("org.terracotta:statistics"), + gradleBundle("org.ehcache:sizeof"), + gradleBundle("org.terracotta:offheap-store"), + gradleBundle("org.terracotta:terracotta-utilities-tools"), + + baseConfiguration("ByteSizedOnHeapOsgiTest", "individualModules") + ); + } + + @Configuration + public Option[] uberJar() { + return options( + gradleBundle("org.ehcache:ehcache"), jaxbConfiguration(), + + baseConfiguration("ByteSizedOnHeapOsgiTest", "uberJar") ); } @Test public void testByteSizedOnHeapInOsgi() { - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + TestMethods.testByteSizedOnHeapInOsgi(); + } + + private static class TestMethods { + public static void testByteSizedOnHeapInOsgi() { + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("myCache", newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder().heap(10, MemoryUnit.KB)) - .build()) + .build()) .build(true); - Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); - cache.put(42L, "I am out of heap!!"); + cache.put(42L, "I am out of heap!!"); - cache.get(42L); + cache.get(42L); + } } - } diff --git a/osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java b/osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java index c8cc4aad14..f96c601241 100644 --- a/osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java +++ b/osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java @@ -16,7 +16,9 @@ package org.ehcache.osgi; -import org.junit.Ignore; +import org.ehcache.core.osgi.EhcacheActivator; +import org.ehcache.core.osgi.OsgiServiceLoader; +import org.ehcache.core.spi.service.ServiceFactory; import org.junit.Test; import org.junit.runner.RunWith; import org.ops4j.pax.exam.Configuration; @@ -24,16 +26,25 @@ import org.ops4j.pax.exam.junit.PaxExam; import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; import org.ops4j.pax.exam.spi.reactors.PerMethod; +import org.osgi.framework.wiring.BundleWiring; import javax.cache.Cache; import javax.cache.CacheManager; import javax.cache.Caching; import javax.cache.spi.CachingProvider; +import java.util.ServiceLoader; +import java.util.Set; +import static java.util.Spliterators.spliterator; +import static java.util.stream.Collectors.toSet; +import static java.util.stream.Stream.of; +import static java.util.stream.StreamSupport.stream; +import static org.ehcache.osgi.OsgiTestUtils.baseConfiguration; +import static org.ehcache.osgi.OsgiTestUtils.gradleBundle; +import static org.ehcache.osgi.OsgiTestUtils.jaxbConfiguration; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.IsCollectionContaining.hasItems; import static org.junit.Assert.assertEquals; -import static org.ops4j.pax.exam.CoreOptions.bundle; -import static org.ops4j.pax.exam.CoreOptions.junitBundles; -import static org.ops4j.pax.exam.CoreOptions.mavenBundle; import static org.ops4j.pax.exam.CoreOptions.options; /** @@ -44,24 +55,67 @@ public class Jsr107OsgiTest { @Configuration - public Option[] config() { + public Option[] individualModules() { return options( - mavenBundle("org.slf4j", "slf4j-api", System.getProperty("ehcache.osgi.slf4j.version")), - mavenBundle("org.slf4j", "slf4j-simple", System.getProperty("ehcache.osgi.slf4j.version")).noStart(), - bundle("file:" + System.getProperty("ehcache.osgi.jar")), - mavenBundle("javax.cache", "cache-api", System.getProperty("ehcache.osgi.jcache.version")), - junitBundles() + gradleBundle("org.ehcache.modules:ehcache-impl"), + gradleBundle("org.ehcache.modules:ehcache-xml"), jaxbConfiguration(), + gradleBundle("org.ehcache.modules:ehcache-107"), + gradleBundle("org.ehcache.modules:ehcache-core"), + gradleBundle("org.ehcache.modules:ehcache-api"), + gradleBundle("javax.cache:cache-api"), + + gradleBundle("org.terracotta:statistics"), + gradleBundle("org.ehcache:sizeof"), + gradleBundle("org.terracotta:offheap-store"), + gradleBundle("org.terracotta:terracotta-utilities-tools"), + + baseConfiguration("Jsr107OsgiTest", "individualModules") + ); + } + + @Configuration + public Option[] uberJar() { + return options( + gradleBundle("org.ehcache:ehcache"), jaxbConfiguration(), + gradleBundle("javax.cache:cache-api"), + + baseConfiguration("Jsr107OsgiTest", "uberJar") ); } @Test - @Ignore("Needs https://github.com/jsr107/jsr107spec/issues/326 to be fixed and so will wait on javax.cache:cache-api:1.0.1 only") - @SuppressWarnings("unchecked") public void testJsr107EhcacheOsgi() throws Exception { - CachingProvider cachingProvider = Caching.getCachingProvider("org.ehcache.jsr107.EhcacheCachingProvider", getClass().getClassLoader()); - CacheManager cacheManager = cachingProvider.getCacheManager(getClass().getResource("/org/ehcache/osgi/ehcache-107-osgi.xml").toURI(), getClass().getClassLoader()); - Cache personCache = cacheManager.getCache("personCache", Long.class, Person.class); - assertEquals(Person.class, personCache.getConfiguration(javax.cache.configuration.Configuration.class).getValueType()); + TestMethods.testJsr107EhcacheOsgi(); } + @Test + public void testAllServicesAreAvailable() { + TestMethods.testAllServicesAreAvailable(); + } + + private static class TestMethods { + @SuppressWarnings("unchecked") + public static void testJsr107EhcacheOsgi() throws Exception { + CachingProvider cachingProvider = Caching.getCachingProvider("org.ehcache.jsr107.EhcacheCachingProvider", TestMethods.class.getClassLoader()); + CacheManager cacheManager = cachingProvider.getCacheManager(TestMethods.class.getResource("/org/ehcache/osgi/ehcache-107-osgi.xml").toURI(), TestMethods.class.getClassLoader()); + Cache personCache = cacheManager.getCache("personCache", Long.class, Person.class); + assertEquals(Person.class, personCache.getConfiguration(javax.cache.configuration.Configuration.class).getValueType()); + } + + public static void testAllServicesAreAvailable() { + Set osgiAvailableClasses = + stream(spliterator(OsgiServiceLoader.load(ServiceFactory.class).iterator(), Long.MAX_VALUE, 0), false) + .map(f -> f.getClass().getName()) + .collect(toSet()); + + Set jdkAvailableClasses = of(EhcacheActivator.getCoreBundle().getBundles()) + .map(b -> b.adapt(BundleWiring.class).getClassLoader()) + .flatMap(cl -> + stream(spliterator(ServiceLoader.load(ServiceFactory.class, cl).iterator(), Long.MAX_VALUE, 0), false) + .map(f -> f.getClass().getName())) + .collect(toSet()); + + assertThat(osgiAvailableClasses, hasItems(jdkAvailableClasses.toArray(new String[0]))); + } + } } diff --git a/osgi-test/src/test/java/org/ehcache/osgi/OffHeapOsgiTest.java b/osgi-test/src/test/java/org/ehcache/osgi/OffHeapOsgiTest.java index 2ada979e68..4143fbc681 100644 --- a/osgi-test/src/test/java/org/ehcache/osgi/OffHeapOsgiTest.java +++ b/osgi-test/src/test/java/org/ehcache/osgi/OffHeapOsgiTest.java @@ -33,82 +33,123 @@ import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.osgi.EhcacheActivator.OSGI_LOADING; +import static org.ehcache.osgi.OsgiTestUtils.baseConfiguration; +import static org.ehcache.osgi.OsgiTestUtils.gradleBundle; +import static org.ehcache.osgi.OsgiTestUtils.jaxbConfiguration; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import static org.ops4j.pax.exam.CoreOptions.bundle; -import static org.ops4j.pax.exam.CoreOptions.junitBundles; -import static org.ops4j.pax.exam.CoreOptions.mavenBundle; +import static org.ops4j.pax.exam.CoreOptions.frameworkProperty; import static org.ops4j.pax.exam.CoreOptions.options; -/** - * OffHeapOsgiTest - */ @RunWith(PaxExam.class) @ExamReactorStrategy(PerMethod.class) public class OffHeapOsgiTest { @Configuration - public Option[] config() { + public Option[] individualModules() { + return options( + gradleBundle("org.ehcache.modules:ehcache-api"), + gradleBundle("org.ehcache.modules:ehcache-core"), + gradleBundle("org.ehcache.modules:ehcache-impl"), + + gradleBundle("org.terracotta:statistics"), + gradleBundle("org.ehcache:sizeof"), + gradleBundle("org.terracotta:offheap-store"), + gradleBundle("org.terracotta:terracotta-utilities-tools"), + + baseConfiguration("OffHeapOsgiTest", "individualModules") + ); + } + + @Configuration + public Option[] uberJarWithOsgiServiceLoading() { return options( - mavenBundle("org.slf4j", "slf4j-api", System.getProperty("ehcache.osgi.slf4j.version")), - mavenBundle("org.slf4j", "slf4j-simple", System.getProperty("ehcache.osgi.slf4j.version")).noStart(), - bundle("file:" + System.getProperty("ehcache.osgi.jar")), - junitBundles() + gradleBundle("org.ehcache:ehcache"), jaxbConfiguration(), + + baseConfiguration("OffHeapOsgiTest", "uberJarWithOsgiServiceLoading") + ); + } + + @Configuration + public Option[] uberJarWithJdkServiceLoading() { + return options( + frameworkProperty(OSGI_LOADING).value("false"), + + gradleBundle("org.ehcache:ehcache"), jaxbConfiguration(), + + baseConfiguration("OffHeapOsgiTest", "uberJarWithJdkServiceLoading") ); } @Test public void testOffHeapInOsgi() { - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + TestMethods.testOffHeapInOsgi(); + } + + @Test + public void testOffHeapClientClass() { + TestMethods.testOffHeapClientClass(); + } + + private static class TestMethods { + + public static void testOffHeapInOsgi() { + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("myCache", newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB)) - .build()) + .build()) .build(true); - Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); - cache.put(42L, "I am out of heap!!"); + cache.put(42L, "I am out of heap!!"); - cache.get(42L); - } + cache.get(42L); + } - @Test - public void testOffHeapClientClass() { - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withClassLoader(getClass().getClassLoader()) + public static void testOffHeapClientClass() { + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withClassLoader(TestMethods.class.getClassLoader()) .withCache("myCache", newCacheConfigurationBuilder(Long.class, Order.class, newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).offheap(2, MemoryUnit.MB)) - .build()) + .build()) .build(true); - Cache cache = cacheManager.getCache("myCache", Long.class, Order.class); + Cache cache = cacheManager.getCache("myCache", Long.class, Order.class); - Order order = new Order(42L); - cache.put(42L, order); + Order order = new Order(42L); + cache.put(42L, order); - assertTrue(cache.get(42L) instanceof Order); + assertTrue(cache.get(42L) instanceof Order); - cache.replace(42L, order, new Order(-1L)); + cache.replace(42L, order, new Order(-1L)); - assertEquals(-1L, cache.get(42L).id); - } + assertEquals(-1L, cache.get(42L).id); + } - private static class Order implements Serializable { - final long id; + private static class Order implements Serializable { - Order(long id) { - this.id = id; - } + private static final long serialVersionUID = 1L; - @Override - public int hashCode() { - return (int) id; - } + final long id; + + Order(long id) { + this.id = id; + } - @Override - public boolean equals(Object obj) { - if (obj instanceof Order) { - return ((Order) obj).id == this.id; + @Override + public int hashCode() { + return (int) id; + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof Order) { + return ((Order) obj).id == this.id; + } + return false; } - return false; } + + } } diff --git a/osgi-test/src/test/java/org/ehcache/osgi/Person.java b/osgi-test/src/test/java/org/ehcache/osgi/Person.java index 9e45b3a194..568a9b4be5 100644 --- a/osgi-test/src/test/java/org/ehcache/osgi/Person.java +++ b/osgi-test/src/test/java/org/ehcache/osgi/Person.java @@ -23,6 +23,8 @@ */ public class Person implements Serializable { + private static final long serialVersionUID = 1L; + final String name; Person(String name) { diff --git a/osgi-test/src/test/java/org/ehcache/osgi/SimpleOsgiTest.java b/osgi-test/src/test/java/org/ehcache/osgi/SimpleOsgiTest.java index e591330bf0..7498a7e22d 100644 --- a/osgi-test/src/test/java/org/ehcache/osgi/SimpleOsgiTest.java +++ b/osgi-test/src/test/java/org/ehcache/osgi/SimpleOsgiTest.java @@ -19,6 +19,9 @@ import org.ehcache.Cache; import org.ehcache.CacheManager; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.core.osgi.EhcacheActivator; +import org.ehcache.core.osgi.OsgiServiceLoader; +import org.ehcache.core.spi.service.ServiceFactory; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.impl.copy.ReadWriteCopier; import org.ehcache.impl.copy.SerializingCopier; @@ -30,89 +33,165 @@ import org.ops4j.pax.exam.junit.PaxExam; import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; import org.ops4j.pax.exam.spi.reactors.PerMethod; +import org.osgi.framework.wiring.BundleWiring; +import java.util.ServiceLoader; +import java.util.Set; + +import static java.util.Spliterators.spliterator; +import static java.util.stream.Collectors.toSet; +import static java.util.stream.Stream.of; +import static java.util.stream.StreamSupport.stream; import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.osgi.EhcacheActivator.OSGI_LOADING; +import static org.ehcache.osgi.OsgiTestUtils.baseConfiguration; +import static org.ehcache.osgi.OsgiTestUtils.gradleBundle; +import static org.ehcache.osgi.OsgiTestUtils.jaxbConfiguration; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.IsCollectionContaining.hasItems; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import static org.ops4j.pax.exam.CoreOptions.bundle; -import static org.ops4j.pax.exam.CoreOptions.junitBundles; -import static org.ops4j.pax.exam.CoreOptions.mavenBundle; +import static org.ops4j.pax.exam.CoreOptions.frameworkProperty; import static org.ops4j.pax.exam.CoreOptions.options; -/** - * SimpleOsgiTest - */ @RunWith(PaxExam.class) @ExamReactorStrategy(PerMethod.class) public class SimpleOsgiTest { @Configuration - public Option[] config() { + public Option[] individualModules() { + return options( + gradleBundle("org.ehcache.modules:ehcache-api"), + gradleBundle("org.ehcache.modules:ehcache-core"), + gradleBundle("org.ehcache.modules:ehcache-impl"), + gradleBundle("org.ehcache.modules:ehcache-xml"), jaxbConfiguration(), + + gradleBundle("org.terracotta:statistics"), + gradleBundle("org.ehcache:sizeof"), + gradleBundle("org.terracotta:offheap-store"), + gradleBundle("org.terracotta:terracotta-utilities-tools"), + + baseConfiguration("SimpleOsgiTest", "individualModules") + ); + } + + @Configuration + public Option[] uberJarWithOsgiServiceLoading() { + return options( + gradleBundle("org.ehcache:ehcache"), jaxbConfiguration(), + + baseConfiguration("SimpleOsgiTest", "uberJarWithOsgiServiceLoading") + ); + } + + @Configuration + public Option[] uberJarWithJdkServiceLoading() { return options( - mavenBundle("org.slf4j", "slf4j-api", System.getProperty("ehcache.osgi.slf4j.version")), - mavenBundle("org.slf4j", "slf4j-simple", System.getProperty("ehcache.osgi.slf4j.version")).noStart(), - bundle("file:" + System.getProperty("ehcache.osgi.jar")), - junitBundles() + frameworkProperty(OSGI_LOADING).value("false"), + + gradleBundle("org.ehcache:ehcache"), jaxbConfiguration(), + + baseConfiguration("SimpleOsgiTest", "uberJarWithJdkServiceLoading") ); } @Test public void testEhcache3AsBundle() { - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("myCache", newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .build()) - .build(true); + TestMethods.testEhcache3AsBundle(); + } - Cache myCache = cacheManager.getCache("myCache", Long.class, String.class); + @Test + public void testEhcache3WithSerializationAndClientClass() { + TestMethods.testEhcache3WithSerializationAndClientClass(); + } - myCache.put(42L, "DaAnswer!"); - assertEquals("DaAnswer!", myCache.get(42L)); + @Test + public void testCustomCopier() { + TestMethods.testCustomCopier(); } @Test - public void testEhcache3WithSerializationAndClientClass() { - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + public void testEhcacheXMLConfig() throws Exception { + TestMethods.testEhcacheXMLConfig(); + } + + @Test + public void testAllServicesAreAvailable() { + TestMethods.testAllServicesAreAvailable(); + } + + private static class TestMethods { + + public static void testEhcache3AsBundle() { + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .build()) + .build(true); + + Cache myCache = cacheManager.getCache("myCache", Long.class, String.class); + + myCache.put(42L, "DaAnswer!"); + assertEquals("DaAnswer!", myCache.get(42L)); + } + + public static void testEhcache3WithSerializationAndClientClass() { + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("myCache", newCacheConfigurationBuilder(Long.class, Person.class, heap(10)) - .add(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) - .withClassLoader(getClass().getClassLoader()) - .build()) + .withService(new DefaultCopierConfiguration<>(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) + .withClassLoader(TestMethods.class.getClassLoader()) + .build()) .build(true); - Cache myCache = cacheManager.getCache("myCache", Long.class, Person.class); + Cache myCache = cacheManager.getCache("myCache", Long.class, Person.class); - myCache.put(42L, new Person("Arthur")); - assertTrue(myCache.get(42L) instanceof Person); - } + myCache.put(42L, new Person("Arthur")); + assertTrue(myCache.get(42L) instanceof Person); + } - @Test - public void testCustomCopier() { - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + public static void testCustomCopier() { + CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("myCache", newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .add(new DefaultCopierConfiguration<>(StringCopier.class, DefaultCopierConfiguration.Type.VALUE)) - .withClassLoader(getClass().getClassLoader()) - .build()) + .withService(new DefaultCopierConfiguration<>(StringCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .withClassLoader(TestMethods.class.getClassLoader()) + .build()) .build(true); - Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); - cache.put(42L, "What's the question again?"); - cache.get(42L); - } + cache.put(42L, "What's the question again?"); + cache.get(42L); + } - @Test - public void testEhcacheXMLConfig() throws Exception { - XmlConfiguration configuration = new XmlConfiguration(getClass().getResource("/org/ehcache/osgi/ehcache-osgi.xml").toURI().toURL(), getClass().getClassLoader()); + public static void testEhcacheXMLConfig() throws Exception { + XmlConfiguration configuration = new XmlConfiguration(TestMethods.class.getResource("/org/ehcache/osgi/ehcache-osgi.xml").toURI().toURL(), TestMethods.class.getClassLoader()); - assertEquals(Person.class, configuration.getCacheConfigurations().get("bar").getValueType()); - } + assertEquals(Person.class, configuration.getCacheConfigurations().get("bar").getValueType()); + } - public static class StringCopier extends ReadWriteCopier { + public static void testAllServicesAreAvailable() { + Set osgiAvailableClasses = + stream(spliterator(OsgiServiceLoader.load(ServiceFactory.class).iterator(), Long.MAX_VALUE, 0), false) + .map(f -> f.getClass().getName()) + .collect(toSet()); - @Override - public String copy(String obj) { - return new String(obj); + Set jdkAvailableClasses = of(EhcacheActivator.getCoreBundle().getBundles()) + .map(b -> b.adapt(BundleWiring.class).getClassLoader()) + .flatMap(cl -> + stream(spliterator(ServiceLoader.load(ServiceFactory.class, cl).iterator(), Long.MAX_VALUE, 0), false) + .map(f -> f.getClass().getName())) + .collect(toSet()); + + assertThat(osgiAvailableClasses, hasItems(jdkAvailableClasses.toArray(new String[0]))); } - } + public static class StringCopier extends ReadWriteCopier { + + @Override + public String copy(String obj) { + return new String(obj); + } + } + + } } diff --git a/osgi-test/src/test/java/org/ehcache/osgi/TransactionalOsgiTest.java b/osgi-test/src/test/java/org/ehcache/osgi/TransactionalOsgiTest.java new file mode 100644 index 0000000000..e5966016f8 --- /dev/null +++ b/osgi-test/src/test/java/org/ehcache/osgi/TransactionalOsgiTest.java @@ -0,0 +1,147 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.osgi; + +import bitronix.tm.BitronixTransactionManager; +import bitronix.tm.TransactionManagerServices; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; +import org.ehcache.transactions.xa.txmgr.btm.BitronixTransactionManagerLookup; +import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProviderConfiguration; +import org.ehcache.xml.XmlConfiguration; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.junit.PaxExam; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; +import org.ops4j.pax.exam.spi.reactors.PerMethod; + +import static bitronix.tm.TransactionManagerServices.getTransactionManager; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManager; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.osgi.OsgiTestUtils.baseConfiguration; +import static org.ehcache.osgi.OsgiTestUtils.gradleBundle; +import static org.ehcache.osgi.OsgiTestUtils.jaxbConfiguration; +import static org.ehcache.osgi.OsgiTestUtils.jtaConfiguration; +import static org.ops4j.pax.exam.CoreOptions.options; + +@RunWith(PaxExam.class) +@ExamReactorStrategy(PerMethod.class) +public class TransactionalOsgiTest { + + @Configuration + public Option[] individualModules() { + return options( + gradleBundle("org.ehcache.modules:ehcache-api"), + gradleBundle("org.ehcache.modules:ehcache-core"), + gradleBundle("org.ehcache.modules:ehcache-impl"), + gradleBundle("org.ehcache.modules:ehcache-xml"), jaxbConfiguration(), + gradleBundle("org.ehcache:ehcache-transactions"), jtaConfiguration(), + + gradleBundle("org.terracotta:statistics"), + gradleBundle("org.ehcache:sizeof"), + gradleBundle("org.terracotta:offheap-store"), + gradleBundle("org.terracotta:terracotta-utilities-tools"), + + baseConfiguration("TransactionalOsgiTest", "individualModules") + ); + } + + @Configuration + public Option[] uberJar() { + return options( + gradleBundle("org.ehcache:ehcache"), jaxbConfiguration(), + gradleBundle("org.ehcache:ehcache-transactions"), jtaConfiguration(), + + baseConfiguration("TransactionalOsgiTest", "uberJar") + ); + } + + @Before + public void setUp() throws Exception { + TransactionManagerServices.getConfiguration().setJournal("null").setServerId(getClass().getSimpleName()); + } + + @After + public void tearDown() throws Exception { + if (TransactionManagerServices.isTransactionManagerRunning()) { + TransactionManagerServices.getTransactionManager().shutdown(); + } + } + + @Test + public void testProgrammaticConfiguration() throws Exception { + TestMethods.testProgrammaticConfiguration(); + } + + @Test + public void testXmlConfiguration() throws Exception { + TestMethods.testXmlConfiguration(); + } + + private static class TestMethods { + + public static void testProgrammaticConfiguration() throws Exception { + BitronixTransactionManager transactionManager = getTransactionManager(); + + try (CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withClassLoader(TestMethods.class.getClassLoader()) + .using(new LookupTransactionManagerProviderConfiguration(BitronixTransactionManagerLookup.class)) + .withCache("xaCache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .withService(new XAStoreConfiguration("xaCache")).build()).build(true)) { + + Cache xaCache = cacheManager.getCache("xaCache", Long.class, String.class); + + transactionManager.begin(); + try { + xaCache.put(1L, "one"); + } catch (Throwable t) { + transactionManager.rollback(); + } + transactionManager.commit(); + } + transactionManager.shutdown(); + } + + public static void testXmlConfiguration() throws Exception { + BitronixTransactionManager transactionManager = getTransactionManager(); + + try (CacheManager cacheManager = newCacheManager( + new XmlConfiguration(TestMethods.class.getResource("ehcache-xa-osgi.xml"), TestMethods.class.getClassLoader()) + )) { + cacheManager.init(); + + Cache xaCache = cacheManager.getCache("xaCache", Long.class, String.class); + + transactionManager.begin(); + try { + xaCache.put(1L, "one"); + } catch (Throwable t) { + transactionManager.rollback(); + } + transactionManager.commit(); + } + transactionManager.shutdown(); + } + } +} diff --git a/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-107-osgi.xml b/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-107-osgi.xml index 4a6c31aec9..5c757fccc5 100644 --- a/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-107-osgi.xml +++ b/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-107-osgi.xml @@ -15,11 +15,8 @@ --> + xmlns:jsr107='http://www.ehcache.org/v3/jsr107'> @@ -40,4 +37,4 @@ 20 - \ No newline at end of file + diff --git a/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-osgi.xml b/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-osgi.xml index 704738aa26..8155f7476e 100644 --- a/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-osgi.xml +++ b/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-osgi.xml @@ -30,9 +30,7 @@ ~ limitations under the License. --> - + java.lang.String org.ehcache.osgi.Person diff --git a/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-xa-osgi.xml b/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-xa-osgi.xml new file mode 100644 index 0000000000..97624b7556 --- /dev/null +++ b/osgi-test/src/test/resources/org/ehcache/osgi/ehcache-xa-osgi.xml @@ -0,0 +1,31 @@ + + + + + + + + + java.lang.Long + java.lang.String + 10 + + + + diff --git a/settings.gradle b/settings.gradle index 3d830fcd1e..1117a15add 100644 --- a/settings.gradle +++ b/settings.gradle @@ -14,6 +14,23 @@ * limitations under the License. */ -include "api", "spi-tester", "core", "core-spi-test", "impl", "management", "transactions", "107", "xml", - "clustered", "clustered:common", "clustered:client", "clustered:server", "clustered:integration-test", "clustered:clustered-dist", "clustered:ops-tool", - "integration-test", "dist", "osgi-test", "demos", "demos:00-NoCache", "demos:01-CacheAside", "docs" +pluginManagement { + plugins { + id 'io.codearte.nexus-staging' version '0.30.0' + id 'org.owasp.dependencycheck' version '6.2.2' + id 'org.gretty' version '3.0.6' + id 'org.asciidoctor.jvm.base' version '3.3.2' + id 'org.unbroken-dome.xjc' version '2.0.0' + } + + includeBuild 'build-logic' +} + +include "ehcache-api", "ehcache-core", "ehcache-impl", "ehcache-107", "ehcache-xml", + "ehcache-management", "ehcache-transactions", "ehcache", + "spi-tester", "core-spi-test", + "clustered:ehcache-common-api", "clustered:ehcache-common", + "clustered:server:ehcache-service-api", "clustered:server:ehcache-service", "clustered:server:ehcache-entity", + "clustered:ehcache-client", "clustered:ehcache-clustered", "clustered:ops-tool", + "clustered:test-utils", "clustered:integration-test", + "integration-test", "osgi-test", "clustered:osgi-test", "demos", "demos:00-NoCache", "demos:01-CacheAside", "docs" diff --git a/spi-tester/.gitignore b/spi-tester/.gitignore deleted file mode 100755 index ae3c172604..0000000000 --- a/spi-tester/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/bin/ diff --git a/spi-tester/build.gradle b/spi-tester/build.gradle new file mode 100644 index 0000000000..f119623938 --- /dev/null +++ b/spi-tester/build.gradle @@ -0,0 +1,3 @@ +plugins { + id 'org.ehcache.build.conventions.java-library' +} diff --git a/spi-tester/gradle.properties b/spi-tester/gradle.properties deleted file mode 100644 index 607a9322c8..0000000000 --- a/spi-tester/gradle.properties +++ /dev/null @@ -1,2 +0,0 @@ -subPomName = Ehcache 3 SPI Tester module -subPomDesc = SPI Tester diff --git a/spi-tester/src/main/java/org/ehcache/spi/test/After.java b/spi-tester/src/main/java/org/ehcache/spi/test/After.java index 59b30f5bce..3a124e163a 100644 --- a/spi-tester/src/main/java/org/ehcache/spi/test/After.java +++ b/spi-tester/src/main/java/org/ehcache/spi/test/After.java @@ -16,16 +16,15 @@ package org.ehcache.spi.test; -/** - * Indicates the annotated method has to be run after each SPI Test - * Created by rism on 19-02-2015. - */ - import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +/** + * Indicates the annotated method has to be run after each SPI Test + * Created by rism on 19-02-2015. + */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) public @interface After { diff --git a/spi-tester/src/main/java/org/ehcache/spi/test/Before.java b/spi-tester/src/main/java/org/ehcache/spi/test/Before.java index e3fe2f34ab..53acbc80d1 100644 --- a/spi-tester/src/main/java/org/ehcache/spi/test/Before.java +++ b/spi-tester/src/main/java/org/ehcache/spi/test/Before.java @@ -16,16 +16,15 @@ package org.ehcache.spi.test; -/** - * Indicates the annotated method has to be run before each SPI Test - * Created by rism on 19-02-2015. - */ - import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +/** + * Indicates the annotated method has to be run before each SPI Test + * Created by rism on 19-02-2015. + */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) public @interface Before { diff --git a/spi-tester/src/main/java/org/ehcache/spi/test/LegalSPITesterException.java b/spi-tester/src/main/java/org/ehcache/spi/test/LegalSPITesterException.java index 7093c922ea..585bff6b0c 100644 --- a/spi-tester/src/main/java/org/ehcache/spi/test/LegalSPITesterException.java +++ b/spi-tester/src/main/java/org/ehcache/spi/test/LegalSPITesterException.java @@ -23,6 +23,8 @@ */ public class LegalSPITesterException extends Exception { + private static final long serialVersionUID = -8258017920644785486L; + /** * Creates a new exception wrapping the {@link Throwable cause} passed in. * diff --git a/spi-tester/src/main/java/org/ehcache/spi/test/Result.java b/spi-tester/src/main/java/org/ehcache/spi/test/Result.java index 763883b319..fee7f4db9e 100755 --- a/spi-tester/src/main/java/org/ehcache/spi/test/Result.java +++ b/spi-tester/src/main/java/org/ehcache/spi/test/Result.java @@ -24,7 +24,6 @@ */ public class Result { private long startTime; - private long endTime; private long runTime; private int runCount; private final List failedTests = new ArrayList<>(); @@ -48,7 +47,7 @@ public void testRunStarted() { } public void testRunFinished() { - endTime = System.nanoTime(); + long endTime = System.nanoTime(); runTime = (endTime - startTime) / 1000L; } diff --git a/start_next_version.sh b/start_next_version.sh new file mode 100755 index 0000000000..325a68cc63 --- /dev/null +++ b/start_next_version.sh @@ -0,0 +1,47 @@ +#!/usr/bin/env bash + +### +# Call this script when starting a new major or minor version. +# It will create the branch for the previous version and update all the required files to start the new version. +# +# See https://github.com/ehcache/ehcache3/wiki/dev.release for details. +# +### + +# to exit in case of error +set -e +# to see what's going on +set -v + +function pause { + echo + read -p "Press [enter] to continue" +} + +echo 'Welcome to the Ehcache next version wizard' +echo +echo 'This wizard will guide you through moving from a major.minor version to another. Some steps will be performed automatically, some will require your help' +echo + +read -e -p "Enter the next version (x.y): " version + +short_version=${version//[.]/} + +echo "Upgrading gradle.properties to ${version}" +sed -i '' "s/ehcacheVersion = .*/ehcacheVersion = ${version}-SNAPSHOT/" gradle.properties + +echo "Update docs sourcedir to sourcedir${short_version}" +find docs -type f -name '*.adoc' -exec sed -i '' "s/sourcedir[0-9][0-9]/sourcedir${short_version}/g" {} \; + +echo "Update version in site content to ${version}" +find docs -type f -name '*.adoc' -exec sed -i '' "s/version: [0-9]\.[0-9]/version: ${version}/" {} \; + +echo "Add new XSDs for ${version}" +sed -i '' "s/\/\/ needle_for_core_xsd/** Location for ${version}: \`http:\/\/www.ehcache.org\/schema\/ehcache-core-${version}.xsd\`\\ +\/\/ needle_for_core_xsd/" docs/src/docs/asciidoc/user/xsds.adoc +sed -i '' "s/\/\/ needle_for_107_xsd/** Location for ${version}: \`http:\/\/www.ehcache.org\/schema\/ehcache-107-ext-${version}.xsd\`\\ +\/\/ needle_for_107_xsd/" docs/src/docs/asciidoc/user/xsds.adoc +sed -i '' "s/\/\/ needle_for_transactions_xsd/** Location for ${version}: \`http:\/\/www.ehcache.org\/schema\/ehcache-tx-ext-${version}.xsd\`\\ +\/\/ needle_for_transactions_xsd/" docs/src/docs/asciidoc/user/xsds.adoc +sed -i '' "s/\/\/ needle_for_clustered_xsd/** Location for ${version}: \`http:\/\/www.ehcache.org\/schema\/ehcache-clustered-ext-${version}.xsd\`\\ +\/\/ needle_for_clustered_xsd/" docs/src/docs/asciidoc/user/xsds.adoc diff --git a/transactions/build.gradle b/transactions/build.gradle deleted file mode 100644 index 90fbf51d10..0000000000 --- a/transactions/build.gradle +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -group = 'org.ehcache' - -apply plugin: EhOsgi -apply plugin: EhPomMangle - -dependencies { - compile project(':impl'), project(':xml') - compile group: 'javax.transaction', name: 'jta', version: '1.1' - testCompile project(path: ':core-spi-test') - compile (group: 'org.codehaus.btm', name: 'btm', version: '2.1.4') { - exclude group:'org.slf4j', module:'slf4j-api' - } -} - -// For EhPomMangle -dependencies { - pomOnlyCompile "org.ehcache:ehcache:$parent.baseVersion" - pomOnlyProvided 'javax.transaction:jta:1.1', 'org.codehaus.btm:btm:2.1.4' -} - -test { - if (testJava.javaVersion.isJava9Compatible()) { - jvmArgs += ['--add-modules', 'java.xml.bind'] - } -} - -tasks.withType(JavaCompile) { - options.compilerArgs += ['-Werror'] -} - -project.signing { - required { project.isReleaseVersion && project.gradle.taskGraph.hasTask("uploadArchives") } - sign project.configurations.getByName('archives') -} diff --git a/transactions/gradle.properties b/transactions/gradle.properties deleted file mode 100644 index e685b39fac..0000000000 --- a/transactions/gradle.properties +++ /dev/null @@ -1,5 +0,0 @@ -subPomName = Ehcache 3 transactions module -subPomDesc = The transactions module of Ehcache 3 -osgi = {"Export-Package" : ["!org.ehcache.transactions.xa.internal.*"],\ - "Import-Package" : ["bitronix.tm.*;resolution:=optional", "javax.transaction.*;resolution:=optional",\ - "!sun.misc.*", "!sun.security.action.*", "!org.ehcache.transactions.*"]} diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/XAStoreAccessException.java b/transactions/src/main/java/org/ehcache/transactions/xa/XAStoreAccessException.java deleted file mode 100644 index f2d9a34b6e..0000000000 --- a/transactions/src/main/java/org/ehcache/transactions/xa/XAStoreAccessException.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.transactions.xa; - -import org.ehcache.core.internal.resilience.ResilienceStrategy; -import org.ehcache.core.internal.resilience.RethrowingStoreAccessException; -import org.ehcache.core.spi.store.StoreAccessException; -import org.ehcache.transactions.xa.internal.XAStore; - -/** - * A {@link StoreAccessException} thrown by the {@link XAStore} that is not handled by the - * {@link ResilienceStrategy} but used to throw a {@link RuntimeException} to the user of the cache. - * - * @author Ludovic Orban - */ -public class XAStoreAccessException extends RethrowingStoreAccessException { - public XAStoreAccessException(RuntimeException cause) { - super(cause); - } -} diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java deleted file mode 100644 index 0321f8d2ef..0000000000 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.transactions.xa.internal; - -import org.ehcache.core.spi.store.AbstractValueHolder; -import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.serialization.Serializer; -import org.terracotta.offheapstore.util.FindbugsSuppressWarnings; - -import java.io.ObjectStreamException; -import java.io.Serializable; -import java.nio.ByteBuffer; -import java.util.concurrent.TimeUnit; - -/** - * The {@link XAStore} {@link Store.ValueHolder} implementation. - * - * @author Ludovic Orban - */ -@FindbugsSuppressWarnings("SE_NO_SUITABLE_CONSTRUCTOR") -public class XAValueHolder extends AbstractValueHolder implements Serializable { - - static final TimeUnit NATIVE_TIME_UNIT = TimeUnit.MILLISECONDS; - - private final V value; - private final byte[] valueSerialized; - - public XAValueHolder(Store.ValueHolder> valueHolder, V value) { - super(-1, valueHolder.creationTime(TimeUnit.MILLISECONDS), valueHolder.expirationTime(TimeUnit.MILLISECONDS)); - this.value = value; - this.valueSerialized = null; - } - - public XAValueHolder(V value, long creationTime) { - super(-1, creationTime, NO_EXPIRE); - if (value == null) { - throw new NullPointerException("null value"); - } - this.value = value; - this.valueSerialized = null; - } - - private XAValueHolder(XAValueHolder valueHolder, ByteBuffer serializedValue) { - super(-1, valueHolder.creationTime(TimeUnit.MILLISECONDS), valueHolder.expirationTime(TimeUnit.MILLISECONDS)); - this.value = null; - this.valueSerialized = new byte[serializedValue.remaining()]; - serializedValue.get(this.valueSerialized); - } - - public XAValueHolder(XAValueHolder valueHolder, V value) { - super(-1, valueHolder.creationTime(TimeUnit.MILLISECONDS), valueHolder.expirationTime(TimeUnit.MILLISECONDS)); - this.value = value; - this.valueSerialized = null; - } - - private XAValueHolder(long id, long creationTime, long lastAccessTime, long expirationTime, long hits, V value, byte[] valueSerialized) { - super(id, creationTime, expirationTime); - setLastAccessTime(lastAccessTime, NATIVE_TIME_UNIT); - setHits(hits); - this.value = value; - this.valueSerialized = valueSerialized; - } - - protected XAValueHolder copyForSerialization(Serializer valueSerializer) { - ByteBuffer serializedValue = valueSerializer.serialize(value); - return new XAValueHolder<>(this, serializedValue); - } - - protected XAValueHolder copyAfterDeserialization(Serializer valueSerializer) throws ClassNotFoundException { - return new XAValueHolder<>(this, valueSerializer.read(ByteBuffer.wrap(valueSerialized))); - } - - @Override - protected TimeUnit nativeTimeUnit() { - return NATIVE_TIME_UNIT; - } - - @Override - public V value() { - return value; - } - - @Override - public int hashCode() { - int result = 1; - result = 31 * result + value.hashCode(); - result = 31 * result + super.hashCode(); - return result; - } - - @Override - public boolean equals(Object other) { - if (this == other) return true; - if (other == null || getClass() != other.getClass()) return false; - - @SuppressWarnings("unchecked") - XAValueHolder that = (XAValueHolder) other; - - if (!super.equals(that)) return false; - return value.equals(that.value); - } - - private Object writeReplace() throws ObjectStreamException { - return new SerializedXAValueHolder<>(getId(), creationTime(NATIVE_TIME_UNIT), lastAccessTime(NATIVE_TIME_UNIT), expirationTime(NATIVE_TIME_UNIT), - hits(), value(), valueSerialized); - } - - /** - * Synthetic type used as serialized form of XAValueHolder - * - * @param the value type - */ - private static class SerializedXAValueHolder implements Serializable { - private final long id; - private final long creationTime; - private final long lastAccessTime; - private final long expirationTime; - private final long hits; - private final V value; - private final byte[] valueSerialized; - - SerializedXAValueHolder(long id, long creationTime, long lastAccessTime, long expirationTime, long hits, V value, byte[] valueSerialized) { - this.id = id; - this.creationTime = creationTime; - this.lastAccessTime = lastAccessTime; - this.expirationTime = expirationTime; - this.hits = hits; - this.value = value; - this.valueSerialized = valueSerialized; - } - - private Object readResolve() throws ObjectStreamException { - return new XAValueHolder<>(id, creationTime, lastAccessTime, expirationTime, hits, value, valueSerialized); - } - } - -} diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java deleted file mode 100644 index 8024a7b418..0000000000 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.transactions.xa.internal.journal; - -import org.ehcache.CachePersistenceException; -import org.ehcache.core.spi.service.DiskResourceService; -import org.ehcache.spi.persistence.PersistableResourceService; -import org.ehcache.spi.service.ServiceDependencies; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.spi.service.Service; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * @author Ludovic Orban - */ -@ServiceDependencies(DiskResourceService.class) -public class DefaultJournalProvider implements JournalProvider { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultJournalProvider.class); - - private volatile DiskResourceService diskResourceService; - - @Override - public void start(ServiceProvider serviceProvider) { - this.diskResourceService = serviceProvider.getService(DiskResourceService.class); - } - - @Override - public void stop() { - this.diskResourceService = null; - } - - @Override - public Journal getJournal(PersistableResourceService.PersistenceSpaceIdentifier persistentSpaceId, Serializer keySerializer) { - if (persistentSpaceId == null) { - LOGGER.info("Using transient XAStore journal"); - return new TransientJournal<>(); - } - - try { - LOGGER.info("Using persistent XAStore journal"); - FileBasedPersistenceContext persistenceContext = diskResourceService.createPersistenceContextWithin(persistentSpaceId, "XAJournal"); - return new PersistentJournal<>(persistenceContext.getDirectory(), keySerializer); - } catch (CachePersistenceException cpe) { - throw new RuntimeException(cpe); - } - } -} diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java deleted file mode 100644 index fcbbd82636..0000000000 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.transactions.xa.internal.xml; - -import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProviderConfiguration; -import org.ehcache.transactions.xa.txmgr.provider.TransactionManagerLookup; -import org.ehcache.transactions.xa.txmgr.provider.TransactionManagerProvider; -import org.ehcache.xml.CacheManagerServiceConfigurationParser; -import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.core.internal.util.ClassLoading; -import org.ehcache.xml.exceptions.XmlConfigurationException; -import org.w3c.dom.Element; - -import javax.xml.transform.Source; -import javax.xml.transform.stream.StreamSource; -import java.io.IOException; -import java.net.URI; -import java.net.URL; - -/** - * @author Ludovic Orban - */ -public class TxCacheManagerServiceConfigurationParser implements CacheManagerServiceConfigurationParser { - - private static final URI NAMESPACE = URI.create("http://www.ehcache.org/v3/tx"); - private static final URL XML_SCHEMA = TxCacheManagerServiceConfigurationParser.class.getResource("/ehcache-tx-ext.xsd"); - - @Override - public Source getXmlSchema() throws IOException { - return new StreamSource(XML_SCHEMA.openStream()); - } - - @Override - public URI getNamespace() { - return NAMESPACE; - } - - @Override - public ServiceCreationConfiguration parseServiceCreationConfiguration(Element fragment) { - String localName = fragment.getLocalName(); - if ("jta-tm".equals(localName)) { - String transactionManagerProviderConfigurationClassName = fragment.getAttribute("transaction-manager-lookup-class"); - try { - ClassLoader defaultClassLoader = ClassLoading.getDefaultClassLoader(); - Class aClass = Class.forName(transactionManagerProviderConfigurationClassName, true, defaultClassLoader); - @SuppressWarnings("unchecked") - Class clazz = (Class) aClass; - return new LookupTransactionManagerProviderConfiguration(clazz); - } catch (Exception e) { - throw new XmlConfigurationException("Error configuring XA transaction manager", e); - } - } else { - throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported", - fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); - } - } -} diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheServiceConfigurationParser.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheServiceConfigurationParser.java deleted file mode 100644 index f611c8cd69..0000000000 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheServiceConfigurationParser.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.transactions.xa.internal.xml; - -import org.ehcache.xml.CacheServiceConfigurationParser; -import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.transactions.xa.internal.XAStore; -import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; -import org.ehcache.xml.exceptions.XmlConfigurationException; -import org.w3c.dom.Element; - -import javax.xml.transform.Source; -import javax.xml.transform.stream.StreamSource; -import java.io.IOException; -import java.net.URI; -import java.net.URL; - -/** - * @author Ludovic Orban - */ -public class TxCacheServiceConfigurationParser implements CacheServiceConfigurationParser { - - private static final URI NAMESPACE = URI.create("http://www.ehcache.org/v3/tx"); - private static final URL XML_SCHEMA = TxCacheManagerServiceConfigurationParser.class.getResource("/ehcache-tx-ext.xsd"); - - @Override - public Source getXmlSchema() throws IOException { - return new StreamSource(XML_SCHEMA.openStream()); - } - - @Override - public URI getNamespace() { - return NAMESPACE; - } - - @Override - public ServiceConfiguration parseServiceConfiguration(Element fragment) { - String localName = fragment.getLocalName(); - if ("xa-store".equals(localName)) { - String uniqueXAResourceId = fragment.getAttribute("unique-XAResource-id"); - return new XAStoreConfiguration(uniqueXAResourceId); - } else { - throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported", - fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName()))); - } - } -} diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java b/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java deleted file mode 100644 index 1b7ea5217e..0000000000 --- a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.transactions.xa.txmgr.provider; - -import org.ehcache.spi.service.ServiceCreationConfiguration; - -/** - * Specialized {@link ServiceCreationConfiguration} for the {@link LookupTransactionManagerProvider}. - */ -public class LookupTransactionManagerProviderConfiguration implements ServiceCreationConfiguration { - - private final Class lookupClass; - - @SuppressWarnings("unchecked") - public LookupTransactionManagerProviderConfiguration(String className) throws ClassNotFoundException { - this.lookupClass = (Class) Class.forName(className); - } - - public LookupTransactionManagerProviderConfiguration(Class clazz) { - this.lookupClass = clazz; - } - - /** - * Returns the class to be used for transaction manager lookup. - * - * @return the transaction manager lookup class - */ - Class getTransactionManagerLookup() { - return lookupClass; - } - - @Override - public Class getServiceType() { - return TransactionManagerProvider.class; - } -} diff --git a/transactions/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreLifecycleHelper.java b/transactions/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreLifecycleHelper.java deleted file mode 100644 index 781e18cad6..0000000000 --- a/transactions/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreLifecycleHelper.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.offheap; - -/** - * @author Ludovic Orban - */ -public class OffHeapStoreLifecycleHelper { - - private OffHeapStoreLifecycleHelper() { - } - - public static void init(OffHeapStore offHeapStore) { - OffHeapStore.Provider.init(offHeapStore); - } - - public static void close(OffHeapStore offHeapStore) { - OffHeapStore.Provider.close(offHeapStore); - } - -} diff --git a/transactions/src/test/java/org/ehcache/transactions/NonXACacheTest.java b/transactions/src/test/java/org/ehcache/transactions/NonXACacheTest.java deleted file mode 100644 index 66163bd1ea..0000000000 --- a/transactions/src/test/java/org/ehcache/transactions/NonXACacheTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.transactions; - -import org.ehcache.CacheManager; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.internal.util.ClassLoading; -import org.ehcache.core.spi.service.ServiceFactory; -import org.ehcache.transactions.xa.internal.XAStore; -import org.ehcache.transactions.xa.txmgr.provider.TransactionManagerProvider; -import org.hamcrest.Matchers; -import org.junit.Test; - -import java.util.HashSet; -import java.util.Set; - -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; - -/** - * Ensures that a non-XA {@code CacheManager} can be created when XA classes are - * available in classpath. - */ -public class NonXACacheTest { - - @Test - public void testNonXA() throws Exception { - - /* - * Ensure the XA provider classes are loadable through the ServiceLoader mechanism. - */ - Set> targetProviders = new HashSet<>(); - targetProviders.add(XAStore.Provider.class); - targetProviders.add(TransactionManagerProvider.class); - for (ServiceFactory factory : ClassLoading.libraryServiceLoaderFor(ServiceFactory.class)) { - if (targetProviders.remove(factory.getServiceType())) { - if (targetProviders.isEmpty()) { - break; - } - } - } - assertThat(targetProviders, is(Matchers.empty())); - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder( - String.class, - String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10, EntryUnit.ENTRIES) - .offheap(1, MemoryUnit.MB) - .build()) - .build(); - - - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().build(true); - - cacheManager.createCache("cache-1", cacheConfiguration); - cacheManager.createCache("cache-2", cacheConfiguration); - - cacheManager.close(); - } -} diff --git a/transactions/src/test/java/org/ehcache/transactions/XmlConfigTest.java b/transactions/src/test/java/org/ehcache/transactions/XmlConfigTest.java deleted file mode 100644 index 46912b8fff..0000000000 --- a/transactions/src/test/java/org/ehcache/transactions/XmlConfigTest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.transactions; - -import bitronix.tm.BitronixTransactionManager; -import bitronix.tm.TransactionManagerServices; -import org.ehcache.CacheManager; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.Configuration; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.core.spi.service.ServiceUtils; -import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; -import org.ehcache.xml.XmlConfiguration; -import org.junit.Test; - -import java.net.URL; - -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - -/** - * @author Ludovic Orban - */ -public class XmlConfigTest { - - @Test - public void testSimpleConfig() throws Exception { - TransactionManagerServices.getConfiguration().setJournal("null").setServerId("XmlConfigTest"); - BitronixTransactionManager transactionManager = TransactionManagerServices.getTransactionManager(); - - final URL myUrl = this.getClass().getResource("/configs/simple-xa.xml"); - Configuration xmlConfig = new XmlConfiguration(myUrl); - CacheManager myCacheManager = CacheManagerBuilder.newCacheManager(xmlConfig); - myCacheManager.init(); - - myCacheManager.close(); - transactionManager.shutdown(); - } - - @Test - public void testTemplateConfigOverride() throws Exception { - final URL myUrl = this.getClass().getResource("/configs/template-xa.xml"); - Configuration xmlConfig = new XmlConfiguration(myUrl); - CacheConfiguration cacheConfiguration = xmlConfig.getCacheConfigurations().get("xaCache1"); - XAStoreConfiguration xaStoreConfiguration = ServiceUtils.findSingletonAmongst(XAStoreConfiguration.class, cacheConfiguration - .getServiceConfigurations()); - - assertThat(xaStoreConfiguration.getUniqueXAResourceId(), is("xaCache1")); - } -} diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java deleted file mode 100644 index 85ca31621c..0000000000 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java +++ /dev/null @@ -1,1570 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.transactions.xa.internal; - -import org.ehcache.Cache; -import org.ehcache.ValueSupplier; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePool; -import org.ehcache.config.ResourceType; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.core.events.StoreEventDispatcher; -import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.service.DiskResourceService; -import org.ehcache.core.spi.store.Store; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; -import org.ehcache.core.events.NullStoreEventDispatcher; -import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; -import org.ehcache.impl.internal.spi.copy.DefaultCopyProvider; -import org.ehcache.impl.internal.store.heap.OnHeapStore; -import org.ehcache.impl.internal.store.offheap.MemorySizeParser; -import org.ehcache.impl.internal.store.offheap.OffHeapStore; -import org.ehcache.impl.internal.store.offheap.OffHeapStoreLifecycleHelper; -import org.ehcache.impl.internal.store.tiering.TieredStore; -import org.ehcache.internal.TestTimeSource; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.copy.CopyProvider; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.transactions.xa.XACacheException; -import org.ehcache.transactions.xa.configuration.XAStoreConfiguration; -import org.ehcache.transactions.xa.internal.journal.Journal; -import org.ehcache.transactions.xa.internal.journal.TransientJournal; -import org.ehcache.transactions.xa.internal.txmgr.NullXAResourceRegistry; -import org.ehcache.transactions.xa.txmgr.TransactionManagerWrapper; -import org.ehcache.transactions.xa.txmgr.provider.TransactionManagerProvider; -import org.ehcache.transactions.xa.utils.JavaSerializer; -import org.ehcache.transactions.xa.utils.TestXid; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TestName; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.IdentityHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.Supplier; - -import javax.transaction.HeuristicMixedException; -import javax.transaction.HeuristicRollbackException; -import javax.transaction.InvalidTransactionException; -import javax.transaction.NotSupportedException; -import javax.transaction.RollbackException; -import javax.transaction.Status; -import javax.transaction.Synchronization; -import javax.transaction.SystemException; -import javax.transaction.Transaction; -import javax.transaction.TransactionManager; -import javax.transaction.xa.XAException; -import javax.transaction.xa.XAResource; - -import static java.util.Collections.emptySet; -import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; -import static org.ehcache.expiry.Duration.of; -import static org.ehcache.expiry.Expirations.timeToLiveExpiration; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.startsWith; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; - -/** - * Tests for {@link XAStore} and {@link org.ehcache.transactions.xa.internal.XAStore.Provider XAStore.Provider}. - */ -public class XAStoreTest { - - @Rule - public TestName testName = new TestName(); - - @SuppressWarnings("unchecked") - private final Class> valueClass = (Class) SoftLock.class; - private final TestTransactionManager testTransactionManager = new TestTransactionManager(); - private TransactionManagerWrapper transactionManagerWrapper; - private OnHeapStore> onHeapStore; - private Journal journal; - private TestTimeSource testTimeSource; - private ClassLoader classLoader; - private Serializer keySerializer; - private Serializer> valueSerializer; - private StoreEventDispatcher> eventDispatcher; - private final Expiry expiry = timeToLiveExpiration(of(1, TimeUnit.SECONDS)); - private Copier keyCopier; - private Copier> valueCopier; - - @Before - public void setUp() { - transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - classLoader = ClassLoader.getSystemClassLoader(); - keySerializer = new JavaSerializer<>(classLoader); - valueSerializer = new JavaSerializer<>(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - valueCopier = copyProvider.createValueCopier(valueClass, valueSerializer); - Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10, EntryUnit.ENTRIES) - .build(), - 0, keySerializer, valueSerializer); - testTimeSource = new TestTimeSource(); - eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - journal = new TransientJournal<>(); - } - - @Test - public void testXAStoreProviderFailsToRankWhenNoTMProviderConfigured() throws Exception { - XAStore.Provider provider = new XAStore.Provider(); - provider.start(new ServiceProvider() { - @Override - public U getService(Class serviceType) { - return null; - } - - @Override - public Collection getServicesOfType(Class serviceType) { - return emptySet(); - } - }); - try { - Set> resources = emptySet(); - provider.rank(resources, Collections.>singleton(mock(XAStoreConfiguration.class))); - fail("Expected exception"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), containsString("TransactionManagerProvider")); - } - } - - @Test - public void testSimpleGetPutRemove() throws Exception { - XAStore xaStore = getXAStore(onHeapStore); - - testTransactionManager.begin(); - { - assertThat(xaStore.remove(1L), equalTo(false)); - assertThat(xaStore.get(1L), is(nullValue())); - assertThat(xaStore.put(1L, "1"), equalTo(Store.PutStatus.PUT)); - assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); - assertThat(xaStore.get(1L).value(), equalTo("one")); - } - testTransactionManager.rollback(); - - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - assertThat(xaStore.get(1L), is(nullValue())); - assertThat(xaStore.put(1L, "1"), equalTo(Store.PutStatus.PUT)); - assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); - assertThat(xaStore.get(1L).value(), equalTo("one")); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "one"); - - testTransactionManager.begin(); - { - assertThat(xaStore.remove(1L), equalTo(true)); - assertThat(xaStore.remove(1L), equalTo(false)); - assertThat(xaStore.get(1L), is(nullValue())); - assertThat(xaStore.put(1L, "1"), equalTo(Store.PutStatus.PUT)); - } - testTransactionManager.rollback(); - - assertMapping(xaStore, 1L, "one"); - - testTransactionManager.begin(); - { - assertThat(xaStore.put(1L, "un"), equalTo(Store.PutStatus.PUT)); - assertThat(xaStore.remove(1L), equalTo(true)); - assertThat(xaStore.remove(1L), equalTo(false)); - assertThat(xaStore.get(1L), is(nullValue())); - assertThat(xaStore.put(1L, "un"), equalTo(Store.PutStatus.PUT)); - assertThat(xaStore.get(1L).value(), equalTo("un")); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "un"); - } - - @Test - public void testConflictingGetPutRemove() throws Exception { - final XAStore xaStore = getXAStore(onHeapStore); - final AtomicReference exception = new AtomicReference<>(); - - testTransactionManager.begin(); - { - xaStore.put(1L, "one"); - } - testTransactionManager.commit(); - - testTransactionManager.begin(); - { - assertThat(xaStore.put(1L, "un"), equalTo(Store.PutStatus.PUT)); - - executeWhileIn2PC(exception, () -> { - testTransactionManager.begin(); - assertThat(xaStore.put(1L, "uno"), equalTo(Store.PutStatus.NOOP)); - testTransactionManager.commit(); - return null; - }); - - assertThat(xaStore.put(1L, "eins"), equalTo(Store.PutStatus.PUT)); - } - testTransactionManager.commit(); - assertThat(exception.get(), is(nullValue())); - - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - xaStore.put(1L, "one"); - } - testTransactionManager.commit(); - - testTransactionManager.begin(); - { - assertThat(xaStore.put(1L, "un"), equalTo(Store.PutStatus.PUT)); - - executeWhileIn2PC(exception, () -> { - testTransactionManager.begin(); - - assertThat(xaStore.remove(1L), is(false)); - - testTransactionManager.commit(); - return null; - }); - - assertThat(xaStore.put(1L, "een"), equalTo(Store.PutStatus.PUT)); - } - testTransactionManager.commit(); - - assertThat(exception.get(), is(nullValue())); - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - xaStore.put(1L, "one"); - } - testTransactionManager.commit(); - - testTransactionManager.begin(); - { - assertThat(xaStore.put(1L, "un"), equalTo(Store.PutStatus.PUT)); - - executeWhileIn2PC(exception, () -> { - testTransactionManager.begin(); - - assertThat(xaStore.get(1L), is(nullValue())); - - testTransactionManager.commit(); - return null; - }); - - assertThat(xaStore.put(1L, "yksi"), equalTo(Store.PutStatus.PUT)); - } - testTransactionManager.commit(); - - assertThat(exception.get(), is(nullValue())); - assertMapping(xaStore, 1L, null); - } - - private void executeWhileIn2PC(final AtomicReference exception, final Callable callable) { - testTransactionManager.getCurrentTransaction().registerTwoPcListener(() -> { - try { - Thread t = new Thread() { - @Override - public void run() { - try { - // this runs while the committing TX is in-doubt - callable.call(); - } catch (Throwable t) { - exception.set(t); - } - } - }; - t.start(); - t.join(); - } catch (Throwable e) { - exception.set(e); - } - }); - } - - @Test - public void testIterate() throws Exception { - XAStore xaStore = getXAStore(onHeapStore); - - testTransactionManager.begin(); - { - xaStore.put(1L, "one"); - xaStore.put(2L, "two"); - xaStore.put(3L, "three"); - } - testTransactionManager.commit(); - - testTransactionManager.begin(); - { - xaStore.put(0L, "zero"); - xaStore.put(1L, "un"); - xaStore.put(2L, "two"); - xaStore.remove(3L); - - Map iterated = new HashMap<>(); - Store.Iterator>> iterator = xaStore.iterator(); - while (iterator.hasNext()) { - Cache.Entry> next = iterator.next(); - iterated.put(next.getKey(), next.getValue().value()); - } - assertThat(iterated.size(), is(3)); - assertThat(iterated.get(0L), equalTo("zero")); - assertThat(iterated.get(1L), equalTo("un")); - assertThat(iterated.get(2L), equalTo("two")); - } - testTransactionManager.commit(); - - testTransactionManager.begin(); - { - Map iterated = new HashMap<>(); - Store.Iterator>> iterator = xaStore.iterator(); - while (iterator.hasNext()) { - Cache.Entry> next = iterator.next(); - iterated.put(next.getKey(), next.getValue().value()); - } - assertThat(iterated.size(), is(3)); - assertThat(iterated.get(0L), equalTo("zero")); - assertThat(iterated.get(1L), equalTo("un")); - assertThat(iterated.get(2L), equalTo("two")); - } - testTransactionManager.commit(); - - Store.Iterator>> iterator; - testTransactionManager.begin(); - { - iterator = xaStore.iterator(); - iterator.next(); - } - testTransactionManager.commit(); - - // cannot use iterator outside of tx context - try { - iterator.hasNext(); - fail(); - } catch (XACacheException e) { - // expected - } - try { - iterator.next(); - fail(); - } catch (XACacheException e) { - // expected - } - - // cannot use iterator outside of original tx context - testTransactionManager.begin(); - { - try { - iterator.hasNext(); - fail(); - } catch (IllegalStateException e) { - // expected - } - try { - iterator.next(); - fail(); - } catch (IllegalStateException e) { - // expected - } - } - testTransactionManager.commit(); - } - - @Test - public void testPutIfAbsent() throws Exception { - final XAStore xaStore = getXAStore(onHeapStore); - final AtomicReference exception = new AtomicReference<>(); - - testTransactionManager.begin(); - { - assertThat(xaStore.putIfAbsent(1L, "one"), is(nullValue())); - assertThat(xaStore.get(1L).value(), equalTo("one")); - assertThat(xaStore.putIfAbsent(1L, "un").value(), equalTo("one")); - assertThat(xaStore.get(1L).value(), equalTo("one")); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "one"); - - testTransactionManager.begin(); - { - assertThat(xaStore.putIfAbsent(1L, "un").value(), equalTo("one")); - assertThat(xaStore.get(1L).value(), equalTo("one")); - assertThat(xaStore.remove(1L), equalTo(true)); - assertThat(xaStore.putIfAbsent(1L, "uno"), is(nullValue())); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "uno"); - - testTransactionManager.begin(); - { - xaStore.put(1L, "eins"); - executeWhileIn2PC(exception, () -> { - testTransactionManager.begin(); - - assertThat(xaStore.putIfAbsent(1L, "un"), is(nullValue())); - - testTransactionManager.commit(); - return null; - }); - } - testTransactionManager.commit(); - assertThat(exception.get(), is(nullValue())); - - assertMapping(xaStore, 1L, null); - } - - @Test - public void testRemove2Args() throws Exception { - final XAStore xaStore = getXAStore(onHeapStore); - final AtomicReference exception = new AtomicReference<>(); - - testTransactionManager.begin(); - { - assertThat(xaStore.remove(1L, "one"), equalTo(Store.RemoveStatus.KEY_MISSING)); - assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); - assertThat(xaStore.remove(1L, "un"), equalTo(Store.RemoveStatus.KEY_PRESENT)); - assertThat(xaStore.remove(1L, "one"), equalTo(Store.RemoveStatus.REMOVED)); - assertThat(xaStore.remove(1L, "eins"), equalTo(Store.RemoveStatus.KEY_MISSING)); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "one"); - - testTransactionManager.begin(); - { - assertThat(xaStore.remove(1L, "een"), equalTo(Store.RemoveStatus.KEY_PRESENT)); - assertThat(xaStore.remove(1L, "one"), equalTo(Store.RemoveStatus.REMOVED)); - assertThat(xaStore.remove(1L, "eins"), equalTo(Store.RemoveStatus.KEY_MISSING)); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - xaStore.put(1L, "eins"); - executeWhileIn2PC(exception, () -> { - testTransactionManager.begin(); - - assertThat(xaStore.remove(1L, "un"), equalTo(Store.RemoveStatus.KEY_MISSING)); - - testTransactionManager.commit(); - return null; - }); - } - testTransactionManager.commit(); - assertThat(exception.get(), is(nullValue())); - - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "one"); - - testTransactionManager.begin(); - { - xaStore.put(1L, "eins"); - executeWhileIn2PC(exception, () -> { - testTransactionManager.begin(); - - assertThat(xaStore.remove(1L, "un"), equalTo(Store.RemoveStatus.KEY_MISSING)); - - testTransactionManager.commit(); - return null; - }); - } - testTransactionManager.commit(); - assertThat(exception.get(), is(nullValue())); - - assertMapping(xaStore, 1L, null); - } - - @Test - public void testReplace2Args() throws Exception { - final XAStore xaStore = getXAStore(onHeapStore); - final AtomicReference exception = new AtomicReference<>(); - - testTransactionManager.begin(); - { - assertThat(xaStore.replace(1L, "one"), is(nullValue())); - assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); - assertThat(xaStore.replace(1L, "un").value(), equalTo("one")); - assertThat(xaStore.replace(1L, "uno").value(), equalTo("un")); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "uno"); - - testTransactionManager.begin(); - { - assertThat(xaStore.replace(1L, "een").value(), equalTo("uno")); - assertThat(xaStore.replace(1L, "eins").value(), equalTo("een")); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "eins"); - - testTransactionManager.begin(); - { - assertThat(xaStore.remove(1L), is(true)); - assertThat(xaStore.replace(1L, "yksi"), is(nullValue())); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - xaStore.put(1L, "eins"); - executeWhileIn2PC(exception, () -> { - testTransactionManager.begin(); - - assertThat(xaStore.replace(1L, "un"), is(nullValue())); - - testTransactionManager.commit(); - return null; - }); - } - testTransactionManager.commit(); - assertThat(exception.get(), is(nullValue())); - - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - assertThat(xaStore.put(1L, "one"), is(Store.PutStatus.PUT)); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "one"); - - testTransactionManager.begin(); - { - xaStore.put(1L, "eins"); - executeWhileIn2PC(exception, () -> { - testTransactionManager.begin(); - - assertThat(xaStore.replace(1L, "un"), is(nullValue())); - - testTransactionManager.commit(); - return null; - }); - } - testTransactionManager.commit(); - assertThat(exception.get(), is(nullValue())); - - assertMapping(xaStore, 1L, null); - } - - @Test - public void testReplace3Args() throws Exception { - final XAStore xaStore = getXAStore(onHeapStore); - final AtomicReference exception = new AtomicReference<>(); - - testTransactionManager.begin(); - { - assertThat(xaStore.replace(1L, "one", "un"), equalTo(Store.ReplaceStatus.MISS_NOT_PRESENT)); - assertThat(xaStore.put(1L, "one"), equalTo(Store.PutStatus.PUT)); - assertThat(xaStore.replace(1L, "eins", "un"), equalTo(Store.ReplaceStatus.MISS_PRESENT)); - assertThat(xaStore.replace(1L, "one", "un"), equalTo(Store.ReplaceStatus.HIT)); - assertThat(xaStore.get(1L).value(), equalTo("un")); - assertThat(xaStore.replace(1L, "eins", "een"), equalTo(Store.ReplaceStatus.MISS_PRESENT)); - assertThat(xaStore.replace(1L, "un", "uno"), equalTo(Store.ReplaceStatus.HIT)); - assertThat(xaStore.get(1L).value(), equalTo("uno")); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "uno"); - - testTransactionManager.begin(); - { - assertThat(xaStore.replace(1L, "one", "uno"), equalTo(Store.ReplaceStatus.MISS_PRESENT)); - assertThat(xaStore.replace(1L, "uno", "un"), equalTo(Store.ReplaceStatus.HIT)); - assertThat(xaStore.get(1L).value(), equalTo("un")); - assertThat(xaStore.remove(1L), equalTo(true)); - assertThat(xaStore.replace(1L, "un", "eins"), equalTo(Store.ReplaceStatus.MISS_NOT_PRESENT)); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - xaStore.put(1L, "eins"); - executeWhileIn2PC(exception, () -> { - testTransactionManager.begin(); - - assertThat(xaStore.replace(1L, "eins", "one"), is(Store.ReplaceStatus.MISS_NOT_PRESENT)); - - testTransactionManager.commit(); - return null; - }); - } - testTransactionManager.commit(); - assertThat(exception.get(), is(nullValue())); - - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - assertThat(xaStore.put(1L, "one"), is(Store.PutStatus.PUT)); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "one"); - - testTransactionManager.begin(); - { - xaStore.put(1L, "eins"); - executeWhileIn2PC(exception, () -> { - testTransactionManager.begin(); - - assertThat(xaStore.replace(1L, "one", "un"), is(Store.ReplaceStatus.MISS_NOT_PRESENT)); - - testTransactionManager.commit(); - return null; - }); - } - testTransactionManager.commit(); - assertThat(exception.get(), is(nullValue())); - - assertMapping(xaStore, 1L, null); - } - - @Test - public void testCompute() throws Exception { - Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder() - .offheap(10, MemoryUnit.MB) - .build(), - 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser - .parse("10M")); - OffHeapStoreLifecycleHelper.init(offHeapStore); - TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); - - XAStore xaStore = getXAStore(tieredStore); - - testTransactionManager.begin(); - { - Store.ValueHolder computed1 = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, is(nullValue())); - return "one"; - }); - assertThat(computed1.value(), equalTo("one")); - Store.ValueHolder computed2 = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, equalTo("one")); - return "un"; - }); - assertThat(computed2.value(), equalTo("un")); - Store.ValueHolder computed3 = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, equalTo("un")); - return null; - }); - assertThat(computed3, is(nullValue())); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - Store.ValueHolder computed1 = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, is(nullValue())); - return "one"; - }, () -> Boolean.FALSE); - assertThat(computed1.value(), equalTo("one")); - Store.ValueHolder computed2 = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, equalTo("one")); - return null; - }, () -> Boolean.FALSE); - assertThat(computed2, is(nullValue())); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - Store.ValueHolder computed1 = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, is(nullValue())); - return "one"; - }); - assertThat(computed1.value(), equalTo("one")); - Store.ValueHolder computed2 = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, equalTo("one")); - return null; - }); - assertThat(computed2, is(nullValue())); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, null); - - testTransactionManager.begin(); - { - Store.ValueHolder computed1 = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, is(nullValue())); - return "one"; - }); - assertThat(computed1.value(), equalTo("one")); - Store.ValueHolder computed2 = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, equalTo("one")); - return "un"; - }); - assertThat(computed2.value(), equalTo("un")); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "un"); - - testTransactionManager.begin(); - { - Store.ValueHolder computed = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, equalTo("un")); - return "eins"; - }); - assertThat(computed.value(), equalTo("eins")); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "eins"); - - testTransactionManager.begin(); - { - Store.ValueHolder computed = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, equalTo("eins")); - return null; - }); - assertThat(computed, is(nullValue())); - } - testTransactionManager.rollback(); - - assertMapping(xaStore, 1L, "eins"); - - testTransactionManager.begin(); - { - Store.ValueHolder computed1 = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, equalTo("eins")); - return null; - }); - assertThat(computed1, is(nullValue())); - Store.ValueHolder computed2 = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, is(nullValue())); - return null; - }); - assertThat(computed2, is(nullValue())); - Store.ValueHolder computed3 = xaStore.compute(1L, (aLong, s) -> { - assertThat(aLong, is(1L)); - assertThat(s, is(nullValue())); - return "uno"; - }); - assertThat(computed3.value(), equalTo("uno")); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "uno"); - - testTransactionManager.begin(); - { - xaStore.remove(1L); - } - testTransactionManager.commit(); - - testTransactionManager.begin(); - { - assertThat(xaStore.containsKey(1L), is(false)); - xaStore.put(1L, "uno"); - assertThat(xaStore.containsKey(1L), is(true)); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "uno"); - - testTransactionManager.begin(); - { - assertThat(xaStore.containsKey(1L), is(true)); - xaStore.remove(1L); - assertThat(xaStore.containsKey(1L), is(false)); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, null); - - OffHeapStoreLifecycleHelper.close(offHeapStore); - } - - @Test - public void testComputeIfAbsent() throws Exception { - Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, - classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder() - .offheap(10, MemoryUnit.MB) - .build(), - 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser - .parse("10M")); - OffHeapStoreLifecycleHelper.init(offHeapStore); - TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); - - XAStore xaStore = getXAStore(tieredStore); - - testTransactionManager.begin(); - { - Store.ValueHolder computed1 = xaStore.computeIfAbsent(1L, aLong -> { - assertThat(aLong, is(1L)); - return "one"; - }); - assertThat(computed1.value(), equalTo("one")); - Store.ValueHolder computed2 = xaStore.computeIfAbsent(1L, aLong -> { - fail("should not be absent"); - throw new AssertionError(); - }); - assertThat(computed2.value(), equalTo("one")); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "one"); - - testTransactionManager.begin(); - { - Store.ValueHolder computed1 = xaStore.computeIfAbsent(1L, aLong -> { - fail("should not be absent"); - throw new AssertionError(); - }); - assertThat(computed1.value(), equalTo("one")); - - xaStore.remove(1L); - - Store.ValueHolder computed2 = xaStore.computeIfAbsent(1L, aLong -> { - assertThat(aLong, is(1L)); - return "un"; - }); - assertThat(computed2.value(), equalTo("un")); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "un"); - - OffHeapStoreLifecycleHelper.close(offHeapStore); - } - - @Test - public void testExpiry() throws Exception { - Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, - null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), - 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser - .parse("10M")); - OffHeapStoreLifecycleHelper.init(offHeapStore); - TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); - - XAStore xaStore = getXAStore(tieredStore); - - testTransactionManager.begin(); - { - xaStore.put(1L, "one"); - } - testTransactionManager.commit(); - - assertMapping(xaStore, 1L, "one"); - - testTimeSource.advanceTime(2000); - - assertMapping(xaStore, 1L, null); - - OffHeapStoreLifecycleHelper.close(offHeapStore); - } - - @Test - public void testExpiryCreateException() throws Exception { - Expiry expiry = new Expiry() { - - @Override - public Duration getExpiryForCreation(Object key, Object value) { - throw new RuntimeException(); - } - - @Override - public Duration getExpiryForAccess(Object key, ValueSupplier value) { - throw new AssertionError(); - } - - @Override - public Duration getExpiryForUpdate(Object key, ValueSupplier oldValue, Object newValue) { - throw new AssertionError(); - } - }; - Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - OnHeapStore> onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), - 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser - .parse("10M")); - OffHeapStoreLifecycleHelper.init(offHeapStore); - TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); - - XAStore xaStore = getXAStore(tieredStore); - - testTransactionManager.begin(); - xaStore.put(1L, "one"); - testTransactionManager.commit(); - assertMapping(xaStore, 1L, null); - } - - @Test - public void testExpiryAccessException() throws Exception { - String uniqueXAResourceId = "testExpiryAccessException"; - Expiry expiry = new Expiry() { - - @Override - public Duration getExpiryForCreation(Object key, Object value) { - return Duration.INFINITE; - } - - @Override - public Duration getExpiryForAccess(Object key, ValueSupplier value) { - if (testTimeSource.getTimeMillis() > 0) { - throw new RuntimeException(); - } - return Duration.INFINITE; - } - - @Override - public Duration getExpiryForUpdate(Object key, ValueSupplier oldValue, Object newValue) { - return Duration.INFINITE; - } - }; - Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - OnHeapStore> onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), - 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser - .parse("10M")); - OffHeapStoreLifecycleHelper.init(offHeapStore); - TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); - - XAStore xaStore = getXAStore(tieredStore); - - testTransactionManager.begin(); - xaStore.put(1L, "one"); - testTransactionManager.commit(); - - testTimeSource.advanceTime(1000); - testTransactionManager.begin(); - assertThat(xaStore.get(1L).value(), is("one")); - testTransactionManager.commit(); - - testTransactionManager.begin(); - assertThat(xaStore.get(1L), nullValue()); - testTransactionManager.commit(); - } - - @Test - public void testExpiryUpdateException() throws Exception{ - Expiry expiry = new Expiry() { - - @Override - public Duration getExpiryForCreation(Object key, Object value) { - return Duration.INFINITE; - } - - @Override - public Duration getExpiryForAccess(Object key, ValueSupplier value) { - return Duration.INFINITE; - } - - @Override - public Duration getExpiryForUpdate(Object key, ValueSupplier oldValue, Object newValue) { - if (testTimeSource.getTimeMillis() > 0) { - throw new RuntimeException(); - } - return Duration.INFINITE; - } - }; - Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - OnHeapStore> onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), - 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser - .parse("10M")); - OffHeapStoreLifecycleHelper.init(offHeapStore); - TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); - - XAStore xaStore = getXAStore(tieredStore); - - testTransactionManager.begin(); - xaStore.put(1L, "one"); - xaStore.get(1L); - testTransactionManager.commit(); - - testTimeSource.advanceTime(1000); - testTransactionManager.begin(); - xaStore.put(1L, "two"); - testTransactionManager.commit(); - assertMapping(xaStore, 1L, null); - } - - @Test - public void testBulkCompute() throws Exception { - String uniqueXAResourceId = "testBulkCompute"; - Expiry expiry = Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS)); - Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - OnHeapStore> onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), - 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser - .parse("10M")); - OffHeapStoreLifecycleHelper.init(offHeapStore); - TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); - - XAStore xaStore = getXAStore(tieredStore); - - testTransactionManager.begin(); - { - Map> computedMap = xaStore.bulkCompute(asSet(1L, 2L, 3L), entries -> { - Map result = new HashMap<>(); - for (Map.Entry entry : entries) { - Long key = entry.getKey(); - String value = entry.getValue(); - assertThat(value, is(nullValue())); - result.put(key, "stuff#" + key); - } - return result.entrySet(); - }); - - assertThat(computedMap.size(), is(3)); - assertThat(computedMap.get(1L).value(), equalTo("stuff#1")); - assertThat(computedMap.get(2L).value(), equalTo("stuff#2")); - assertThat(computedMap.get(3L).value(), equalTo("stuff#3")); - - - computedMap = xaStore.bulkCompute(asSet(0L, 1L, 3L), entries -> { - Map result = new HashMap<>(); - for (Map.Entry entry : entries) { - Long key = entry.getKey(); - String value = entry.getValue(); - - switch (key.intValue()) { - case 0: - assertThat(value, is(nullValue())); - break; - case 1: - case 3: - assertThat(value, equalTo("stuff#" + key)); - break; - } - - if (key != 3L) { - result.put(key, "otherStuff#" + key); - } else { - result.put(key, null); - } - } - return result.entrySet(); - }); - - assertThat(computedMap.size(), is(3)); - assertThat(computedMap.get(0L).value(), equalTo("otherStuff#0")); - assertThat(computedMap.get(1L).value(), equalTo("otherStuff#1")); - assertThat(computedMap.get(3L), is(nullValue())); - } - testTransactionManager.commit(); - - assertSize(xaStore, 3); - assertMapping(xaStore, 0L, "otherStuff#0"); - assertMapping(xaStore, 1L, "otherStuff#1"); - assertMapping(xaStore, 2L, "stuff#2"); - - OffHeapStoreLifecycleHelper.close(offHeapStore); - } - - @Test - public void testBulkComputeIfAbsent() throws Exception { - Expiry expiry = Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS)); - Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - OnHeapStore> onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration> offHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), - 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = new OffHeapStore<>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser - .parse("10M")); - OffHeapStoreLifecycleHelper.init(offHeapStore); - TieredStore> tieredStore = new TieredStore<>(onHeapStore, offHeapStore); - - XAStore xaStore = getXAStore(tieredStore); - - testTransactionManager.begin(); - { - Map> computedMap = xaStore.bulkComputeIfAbsent(asSet(1L, 2L, 3L), keys -> { - Map result = new HashMap<>(); - for (Long key : keys) { - result.put(key, "stuff#" + key); - } - return result.entrySet(); - }); - - assertThat(computedMap.size(), is(3)); - assertThat(computedMap.get(1L).value(), equalTo("stuff#1")); - assertThat(computedMap.get(2L).value(), equalTo("stuff#2")); - assertThat(computedMap.get(3L).value(), equalTo("stuff#3")); - - computedMap = xaStore.bulkComputeIfAbsent(asSet(0L, 1L, 3L), keys -> { - Map result = new HashMap<>(); - for (Long key : keys) { - switch (key.intValue()) { - case 0: - result.put(key, "otherStuff#" + key); - break; - case 1: - case 3: - fail("key " + key + " should not be absent"); - break; - } - } - return result.entrySet(); - }); - - assertThat(computedMap.size(), is(3)); - assertThat(computedMap.get(0L).value(), equalTo("otherStuff#0")); - assertThat(computedMap.get(1L).value(), equalTo("stuff#1")); - assertThat(computedMap.get(3L).value(), equalTo("stuff#3")); - } - testTransactionManager.commit(); - - assertSize(xaStore, 4); - assertMapping(xaStore, 0L, "otherStuff#0"); - assertMapping(xaStore, 1L, "stuff#1"); - assertMapping(xaStore, 2L, "stuff#2"); - assertMapping(xaStore, 3L, "stuff#3"); - - OffHeapStoreLifecycleHelper.close(offHeapStore); - } - - @Test - public void testCustomEvictionAdvisor() throws Exception { - final AtomicBoolean invoked = new AtomicBoolean(); - - EvictionAdvisor evictionAdvisor = (key, value) -> { - invoked.set(true); - return false; - }; - Store.Configuration> onHeapConfig = new StoreConfigurationImpl<>(Long.class, valueClass, - evictionAdvisor, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder() - .heap(10, EntryUnit.ENTRIES) - .build(), - 0, keySerializer, valueSerializer); - OnHeapStore> onHeapStore = new OnHeapStore<>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - - final XAStore xaStore = getXAStore(onHeapStore); - - testTransactionManager.begin(); - { - xaStore.put(1L, "1"); - } - testTransactionManager.rollback(); - assertThat(invoked.get(), is(false)); - - testTransactionManager.begin(); - { - xaStore.put(1L, "1"); - } - testTransactionManager.commit(); - assertThat(invoked.get(), is(true)); - } - - @Test - public void testRank() throws Exception { - XAStore.Provider provider = new XAStore.Provider(); - XAStoreConfiguration configuration = new XAStoreConfiguration("testXAResourceId"); - ServiceLocator serviceLocator = dependencySet() - .with(provider) - .with(Store.Provider.class) - .with(mock(DiskResourceService.class)) - .with(mock(TransactionManagerProvider.class)).build(); - - serviceLocator.startAllServices(); - - final Set> xaStoreConfigs = Collections.>singleton(configuration); - assertRank(provider, 1001, xaStoreConfigs, ResourceType.Core.HEAP); - assertRank(provider, 1001, xaStoreConfigs, ResourceType.Core.OFFHEAP); - assertRank(provider, 1001, xaStoreConfigs, ResourceType.Core.DISK); - assertRank(provider, 1002, xaStoreConfigs, ResourceType.Core.OFFHEAP, ResourceType.Core.HEAP); - assertRank(provider, -1, xaStoreConfigs, ResourceType.Core.DISK, ResourceType.Core.OFFHEAP); - assertRank(provider, 1002, xaStoreConfigs, ResourceType.Core.DISK, ResourceType.Core.HEAP); - assertRank(provider, 1003, xaStoreConfigs, ResourceType.Core.DISK, ResourceType.Core.OFFHEAP, ResourceType.Core.HEAP); - - final Set> emptyConfigs = emptySet(); - assertRank(provider, 0, emptyConfigs, ResourceType.Core.DISK, ResourceType.Core.OFFHEAP, ResourceType.Core.HEAP); - - final ResourceType unmatchedResourceType = new ResourceType() { - @Override - public Class getResourcePoolClass() { - return ResourcePool.class; - } - @Override - public boolean isPersistable() { - return true; - } - @Override - public boolean requiresSerialization() { - return true; - } - @Override - public int getTierHeight() { - return 10; - } - }; - - assertRank(provider, -1, xaStoreConfigs, unmatchedResourceType); - assertRank(provider, -1, xaStoreConfigs, ResourceType.Core.DISK, ResourceType.Core.OFFHEAP, ResourceType.Core.HEAP, unmatchedResourceType); - } - - private void assertRank(final Store.Provider provider, final int expectedRank, - final Collection> serviceConfigs, final ResourceType... resources) { - if (expectedRank == -1) { - try { - provider.rank(new HashSet<>(Arrays.asList(resources)), serviceConfigs); - fail(); - } catch (IllegalStateException e) { - // Expected - assertThat(e.getMessage(), startsWith("No Store.Provider ")); - } - } else { - assertThat(provider.rank(new HashSet<>(Arrays.asList(resources)), serviceConfigs), is(expectedRank)); - } - } - - private Set asSet(Long... longs) { - return new HashSet<>(Arrays.asList(longs)); - } - - private void assertMapping(XAStore xaStore, long key, String value) throws Exception { - testTransactionManager.begin(); - - Store.ValueHolder valueHolder = xaStore.get(key); - if (value != null) { - assertThat(valueHolder.value(), equalTo(value)); - } else { - assertThat(valueHolder, is(nullValue())); - } - - testTransactionManager.commit(); - } - - private void assertSize(XAStore xaStore, int expectedSize) throws Exception { - testTransactionManager.begin(); - - int counter = 0; - Store.Iterator>> iterator = xaStore.iterator(); - while (iterator.hasNext()) { - Cache.Entry> next = iterator.next(); - counter++; - } - assertThat(counter, is(expectedSize)); - - testTransactionManager.commit(); - } - - private XAStore getXAStore(Store> store) { - return new XAStore<>(Long.class, String.class, store, transactionManagerWrapper, testTimeSource, journal, testName.getMethodName()); - } - - static class TestTransactionManager implements TransactionManager { - - volatile TestTransaction currentTransaction; - final AtomicLong gtridGenerator = new AtomicLong(); - - public TestTransaction getCurrentTransaction() { - return currentTransaction; - } - - @Override - public void begin() throws NotSupportedException, SystemException { - currentTransaction = new TestTransaction(gtridGenerator.incrementAndGet()); - } - - @Override - public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { - currentTransaction.commit(); - currentTransaction = null; - } - - @Override - public int getStatus() throws SystemException { - return 0; - } - - @Override - public Transaction getTransaction() throws SystemException { - return currentTransaction; - } - - @Override - public void resume(Transaction tobj) throws InvalidTransactionException, IllegalStateException, SystemException { - - } - - @Override - public void rollback() throws IllegalStateException, SecurityException, SystemException { - currentTransaction.rollback(); - currentTransaction = null; - } - - @Override - public void setRollbackOnly() throws IllegalStateException, SystemException { - - } - - @Override - public void setTransactionTimeout(int seconds) throws SystemException { - - } - - @Override - public Transaction suspend() throws SystemException { - return null; - } - } - - static class TestTransaction implements Transaction { - - final long gtrid; - final Map xids = new IdentityHashMap<>(); - final AtomicLong bqualGenerator = new AtomicLong(); - final List synchronizations = new CopyOnWriteArrayList<>(); - final List twoPcListeners = new CopyOnWriteArrayList<>(); - - public TestTransaction(long gtrid) { - this.gtrid = gtrid; - } - - @Override - public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { - try { - Set> entries = xids.entrySet(); - - // delist - for (Map.Entry entry : entries) { - try { - entry.getKey().end(entry.getValue(), XAResource.TMSUCCESS); - } catch (XAException e) { - throw (SystemException) new SystemException(XAException.XAER_RMERR).initCause(e); - } - } - - fireBeforeCompletion(); - - Set preparedResources = new HashSet<>(); - - // prepare - for (Map.Entry entry : entries) { - try { - int prepareStatus = entry.getKey().prepare(entry.getValue()); - if (prepareStatus != XAResource.XA_RDONLY) { - preparedResources.add(entry.getKey()); - } - } catch (XAException e) { - throw (SystemException) new SystemException(XAException.XAER_RMERR).initCause(e); - } - } - - fireInMiddleOf2PC(); - - // commit - for (Map.Entry entry : entries) { - try { - if (preparedResources.contains(entry.getKey())) { - entry.getKey().commit(entry.getValue(), false); - } - } catch (XAException e) { - throw (SystemException) new SystemException(XAException.XAER_RMERR).initCause(e); - } - } - } finally { - fireAfterCompletion(Status.STATUS_COMMITTED); - } - } - - @Override - public boolean delistResource(XAResource xaRes, int flag) throws IllegalStateException, SystemException { - return true; - } - - @Override - public boolean enlistResource(XAResource xaRes) throws RollbackException, IllegalStateException, SystemException { - TestXid testXid = xids.get(xaRes); - if (testXid == null) { - testXid = new TestXid(gtrid, bqualGenerator.incrementAndGet()); - xids.put(xaRes, testXid); - } - - try { - xaRes.start(testXid, XAResource.TMNOFLAGS); - } catch (XAException e) { - throw (SystemException) new SystemException(XAException.XAER_RMERR).initCause(e); - } - return true; - } - - @Override - public int getStatus() throws SystemException { - return 0; - } - - public void registerTwoPcListener(TwoPcListener listener) { - twoPcListeners.add(listener); - } - - @Override - public void registerSynchronization(Synchronization sync) throws RollbackException, IllegalStateException, SystemException { - synchronizations.add(sync); - } - - @Override - public void rollback() throws IllegalStateException, SystemException { - try { - Set> entries = xids.entrySet(); - - // delist - for (Map.Entry entry : entries) { - try { - entry.getKey().end(entry.getValue(), XAResource.TMSUCCESS); - } catch (XAException e) { - throw (SystemException) new SystemException(XAException.XAER_RMERR).initCause(e); - } - } - - // rollback - for (Map.Entry entry : entries) { - try { - entry.getKey().rollback(entry.getValue()); - } catch (XAException e) { - throw (SystemException) new SystemException(XAException.XAER_RMERR).initCause(e); - } - } - } finally { - fireAfterCompletion(Status.STATUS_ROLLEDBACK); - } - } - - private void fireBeforeCompletion() { - for (Synchronization synchronization : synchronizations) { - synchronization.beforeCompletion(); - } - } - - private void fireAfterCompletion(int status) { - for (Synchronization synchronization : synchronizations) { - synchronization.afterCompletion(status); - } - } - - private void fireInMiddleOf2PC() { - for (TwoPcListener twoPcListener : twoPcListeners) { - twoPcListener.inMiddleOf2PC(); - } - } - - @Override - public void setRollbackOnly() throws IllegalStateException, SystemException { - - } - } - - interface TwoPcListener { - void inMiddleOf2PC(); - } - -} diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java deleted file mode 100644 index 1434f6edd3..0000000000 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java +++ /dev/null @@ -1,662 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.transactions.xa.internal; - -import org.ehcache.internal.TestTimeSource; -import org.ehcache.core.spi.store.AbstractValueHolder; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.store.Store.RemoveStatus; -import org.ehcache.transactions.xa.internal.commands.StoreEvictCommand; -import org.ehcache.transactions.xa.internal.commands.StorePutCommand; -import org.ehcache.transactions.xa.internal.commands.StoreRemoveCommand; -import org.ehcache.transactions.xa.internal.journal.Journal; -import org.ehcache.core.spi.store.Store.ReplaceStatus; -import org.ehcache.transactions.xa.utils.TestXid; -import org.hamcrest.BaseMatcher; -import org.hamcrest.Description; -import org.hamcrest.Matcher; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyCollection; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.isA; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -/** - * @author Ludovic Orban - */ -public class XATransactionContextTest { - - @Mock - private Store> underlyingStore; - @Mock - private Journal journal; - - @Before - public void setUp() { - MockitoAnnotations.initMocks(this); - } - - @Test - public void testSimpleCommands() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), null, null, timeSource, timeSource - .getTimeMillis() + 30000); - - assertThat(xaTransactionContext.touched(1L), is(false)); - assertThat(xaTransactionContext.removed(1L), is(false)); - assertThat(xaTransactionContext.updated(1L), is(false)); - assertThat(xaTransactionContext.evicted(1L), is(false)); - assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); - assertThat(xaTransactionContext.oldValueOf(1L), is(nullValue())); - assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>("old", new XAValueHolder<>("new", timeSource.getTimeMillis()))); - assertThat(xaTransactionContext.touched(1L), is(true)); - assertThat(xaTransactionContext.removed(1L), is(false)); - assertThat(xaTransactionContext.updated(1L), is(true)); - assertThat(xaTransactionContext.evicted(1L), is(false)); - assertThat(xaTransactionContext.newValueHolderOf(1L).value(), equalTo("new")); - assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); - assertThat(xaTransactionContext.newValueOf(1L), equalTo("new")); - - xaTransactionContext.addCommand(1L, new StoreRemoveCommand<>("old")); - assertThat(xaTransactionContext.touched(1L), is(true)); - assertThat(xaTransactionContext.removed(1L), is(true)); - assertThat(xaTransactionContext.updated(1L), is(false)); - assertThat(xaTransactionContext.evicted(1L), is(false)); - assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); - assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); - assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); - - xaTransactionContext.addCommand(1L, new StoreEvictCommand<>("old")); - assertThat(xaTransactionContext.touched(1L), is(true)); - assertThat(xaTransactionContext.removed(1L), is(false)); - assertThat(xaTransactionContext.updated(1L), is(false)); - assertThat(xaTransactionContext.evicted(1L), is(true)); - assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); - assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); - assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); - } - - @Test - public void testCommandsOverrideEachOther() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), null, null, timeSource, timeSource - .getTimeMillis() + 30000); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>("old", new XAValueHolder<>("new", timeSource.getTimeMillis()))); - assertThat(xaTransactionContext.touched(1L), is(true)); - assertThat(xaTransactionContext.removed(1L), is(false)); - assertThat(xaTransactionContext.updated(1L), is(true)); - assertThat(xaTransactionContext.evicted(1L), is(false)); - assertThat(xaTransactionContext.newValueHolderOf(1L).value(), equalTo("new")); - assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); - assertThat(xaTransactionContext.newValueOf(1L), equalTo("new")); - - xaTransactionContext.addCommand(1L, new StoreRemoveCommand<>("old")); - assertThat(xaTransactionContext.touched(1L), is(true)); - assertThat(xaTransactionContext.removed(1L), is(true)); - assertThat(xaTransactionContext.updated(1L), is(false)); - assertThat(xaTransactionContext.evicted(1L), is(false)); - assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); - assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); - assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); - - xaTransactionContext.addCommand(1L, new StoreRemoveCommand<>("old2")); - assertThat(xaTransactionContext.touched(1L), is(true)); - assertThat(xaTransactionContext.removed(1L), is(true)); - assertThat(xaTransactionContext.updated(1L), is(false)); - assertThat(xaTransactionContext.evicted(1L), is(false)); - assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); - assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old2")); - assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>("old2", new XAValueHolder<>("new2", timeSource.getTimeMillis()))); - assertThat(xaTransactionContext.touched(1L), is(true)); - assertThat(xaTransactionContext.removed(1L), is(false)); - assertThat(xaTransactionContext.updated(1L), is(true)); - assertThat(xaTransactionContext.evicted(1L), is(false)); - assertThat(xaTransactionContext.newValueHolderOf(1L).value(), equalTo("new2")); - assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old2")); - assertThat(xaTransactionContext.newValueOf(1L), equalTo("new2")); - } - - @Test - public void testEvictCommandCannotBeOverridden() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), null, null, timeSource, timeSource - .getTimeMillis() + 30000); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>("old", new XAValueHolder<>("new", timeSource.getTimeMillis()))); - assertThat(xaTransactionContext.touched(1L), is(true)); - assertThat(xaTransactionContext.removed(1L), is(false)); - assertThat(xaTransactionContext.updated(1L), is(true)); - assertThat(xaTransactionContext.evicted(1L), is(false)); - assertThat(xaTransactionContext.newValueHolderOf(1L).value(), equalTo("new")); - assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); - assertThat(xaTransactionContext.newValueOf(1L), equalTo("new")); - - xaTransactionContext.addCommand(1L, new StoreEvictCommand<>("old")); - assertThat(xaTransactionContext.touched(1L), is(true)); - assertThat(xaTransactionContext.removed(1L), is(false)); - assertThat(xaTransactionContext.updated(1L), is(false)); - assertThat(xaTransactionContext.evicted(1L), is(true)); - assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); - assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); - assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>("old2", new XAValueHolder<>("new2", timeSource.getTimeMillis()))); - assertThat(xaTransactionContext.touched(1L), is(true)); - assertThat(xaTransactionContext.removed(1L), is(false)); - assertThat(xaTransactionContext.updated(1L), is(false)); - assertThat(xaTransactionContext.evicted(1L), is(true)); - assertThat(xaTransactionContext.newValueHolderOf(1L), is(nullValue())); - assertThat(xaTransactionContext.oldValueOf(1L), equalTo("old")); - assertThat(xaTransactionContext.newValueOf(1L), is(nullValue())); - } - - @Test - public void testHasTimedOut() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), null, null, timeSource, timeSource - .getTimeMillis() + 30000); - - assertThat(xaTransactionContext.hasTimedOut(), is(false)); - timeSource.advanceTime(30000); - assertThat(xaTransactionContext.hasTimedOut(), is(true)); - } - - @Test - public void testPrepareReadOnly() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - assertThat(xaTransactionContext.prepare(), is(0)); - - verify(journal, times(1)).saveInDoubt(eq(new TransactionId(new TestXid(0, 0))), eq(Collections.emptySet())); - verify(journal, times(0)).saveCommitted(eq(new TransactionId(new TestXid(0, 0))), anyBoolean()); - verify(journal, times(1)).saveRolledBack(eq(new TransactionId(new TestXid(0, 0))), eq(false)); - } - - @Test - @SuppressWarnings("unchecked") - public void testPrepare() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>(null, new XAValueHolder<>("un", timeSource.getTimeMillis()))); - xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); - xaTransactionContext.addCommand(3L, new StoreEvictCommand<>("three")); - - Store.ValueHolder> mockValueHolder = mock(Store.ValueHolder.class); - when(mockValueHolder.value()).thenReturn(new SoftLock<>(null, "two", null)); - when(underlyingStore.get(eq(2L))).thenReturn(mockValueHolder); - when(underlyingStore.replace(eq(2L), eq(new SoftLock<>(null, "two", null)), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null)))).thenReturn(ReplaceStatus.HIT); - - final AtomicReference> savedInDoubt = new AtomicReference<>(); - // doAnswer is required to make a copy of the keys collection because xaTransactionContext.prepare() clears it before the verify(journal, times(1)).saveInDoubt(...) assertion can be made. - // See: http://stackoverflow.com/questions/17027368/mockito-what-if-argument-passed-to-mock-is-modified - doAnswer(invocation -> { - Collection o = (Collection) invocation.getArguments()[1]; - savedInDoubt.set(new HashSet<>(o)); - return null; - }).when(journal).saveInDoubt(eq(new TransactionId(new TestXid(0, 0))), any(Collection.class)); - - assertThat(xaTransactionContext.prepare(), is(3)); - - Assert.assertThat(savedInDoubt.get(), containsInAnyOrder(1L, 2L, 3L)); - - verify(journal, times(1)).saveInDoubt(eq(new TransactionId(new TestXid(0, 0))), any(Collection.class)); - verify(journal, times(0)).saveCommitted(eq(new TransactionId(new TestXid(0, 0))), anyBoolean()); - verify(journal, times(0)).saveRolledBack(eq(new TransactionId(new TestXid(0, 0))), anyBoolean()); - - verify(underlyingStore, times(0)).get(1L); - verify(underlyingStore, times(1)).putIfAbsent(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), null, new XAValueHolder<>("un", timeSource - .getTimeMillis())))); - verify(underlyingStore, times(0)).get(2L); - verify(underlyingStore, times(1)).replace(eq(2L), eq(new SoftLock<>(null, "two", null)), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null))); - verify(underlyingStore, times(0)).get(3L); - verify(underlyingStore, times(1)).remove(eq(3L)); - } - - @Test - public void testCommitNotPreparedInFlightThrows() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); - xaTransactionContext.addCommand(2L, new StorePutCommand<>("two", new XAValueHolder<>("deux", timeSource.getTimeMillis()))); - - @SuppressWarnings("unchecked") - Store.ValueHolder> mockValueHolder = mock(Store.ValueHolder.class); - when(mockValueHolder.value()).thenReturn(new SoftLock<>(null, "two", null)); - when(underlyingStore.get(eq(2L))).thenReturn(mockValueHolder); - - try { - xaTransactionContext.commit(false); - fail("expected IllegalArgumentException"); - } catch (IllegalArgumentException ise) { - // expected - } - } - - @Test - @SuppressWarnings("unchecked") - public void testCommit() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); - xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); - xaTransactionContext.addCommand(3L, new StoreEvictCommand<>("three")); - - Store.ValueHolder> mockValueHolder1 = mock(Store.ValueHolder.class); - when(mockValueHolder1.value()).thenReturn(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "one", new XAValueHolder<>("un", timeSource - .getTimeMillis()))); - when(underlyingStore.get(eq(1L))).thenReturn(mockValueHolder1); - Store.ValueHolder> mockValueHolder2 = mock(Store.ValueHolder.class); - when(mockValueHolder2.value()).thenReturn(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null)); - when(underlyingStore.get(eq(2L))).thenReturn(mockValueHolder2); - Store.ValueHolder> mockValueHolder3 = mock(Store.ValueHolder.class); - when(mockValueHolder3.value()).thenReturn(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "three", null)); - when(underlyingStore.get(eq(3L))).thenReturn(mockValueHolder3); - - when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); - when(journal.getInDoubtKeys(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(Arrays.asList(1L, 2L, 3L)); - - when(underlyingStore.replace(any(Long.class), any(SoftLock.class), any(SoftLock.class))).thenReturn(ReplaceStatus.MISS_NOT_PRESENT); - when(underlyingStore.remove(any(Long.class), any(SoftLock.class))).thenReturn(RemoveStatus.KEY_MISSING); - - xaTransactionContext.commit(false); - verify(journal, times(1)).saveCommitted(eq(new TransactionId(new TestXid(0, 0))), eq(false)); - verify(journal, times(0)).saveRolledBack(eq(new TransactionId(new TestXid(0, 0))), anyBoolean()); - verify(journal, times(0)).saveInDoubt(eq(new TransactionId(new TestXid(0, 0))), any(Collection.class)); - - verify(underlyingStore, times(1)).get(1L); - verify(underlyingStore, times(1)).replace(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "one", new XAValueHolder<>("un", timeSource - .getTimeMillis()))), eq(new SoftLock<>(null, "un", null))); - verify(underlyingStore, times(1)).get(2L); - verify(underlyingStore, times(1)).remove(eq(2L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null))); - verify(underlyingStore, times(1)).get(3L); - verify(underlyingStore, times(1)).remove(eq(3L)); - } - - @Test - public void testCommitInOnePhasePreparedThrows() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); - - try { - xaTransactionContext.commitInOnePhase(); - fail("expected IllegalStateException"); - } catch (IllegalStateException ise) { - // expected - } - } - - @Test - @SuppressWarnings("unchecked") - public void testCommitInOnePhase() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>(null, new XAValueHolder<>("un", timeSource.getTimeMillis()))); - xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); - xaTransactionContext.addCommand(3L, new StoreEvictCommand<>("three")); - - Store.ValueHolder> mockValueHolder = mock(Store.ValueHolder.class); - when(mockValueHolder.value()).thenReturn(new SoftLock<>(null, "two", null)); - when(underlyingStore.get(eq(2L))).thenReturn(mockValueHolder); - - final AtomicReference> savedInDoubtCollectionRef = new AtomicReference<>(); - doAnswer(invocation -> { - savedInDoubtCollectionRef.set(new HashSet<>((Collection) invocation.getArguments()[1])); - return null; - }).when(journal).saveInDoubt(eq(new TransactionId(new TestXid(0, 0))), any(Collection.class)); - when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).then(invocation -> savedInDoubtCollectionRef.get() != null); - when(journal.getInDoubtKeys(eq(new TransactionId(new TestXid(0, 0))))).then(invocation -> savedInDoubtCollectionRef.get()); - final AtomicReference softLock1Ref = new AtomicReference<>(); - when(underlyingStore.get(eq(1L))).then(invocation -> softLock1Ref.get() == null ? null : new AbstractValueHolder(-1, -1) { - @Override - public Object value() { - return softLock1Ref.get(); - } - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } - }); - when(underlyingStore.putIfAbsent(eq(1L), isA(SoftLock.class))).then(invocation -> { - softLock1Ref.set((SoftLock) invocation.getArguments()[1]); - return null; - }); - when(underlyingStore.replace(eq(1L), isA(SoftLock.class), isA(SoftLock.class))).then(invocation -> { - if (softLock1Ref.get() != null) { - return ReplaceStatus.HIT; - } - return ReplaceStatus.MISS_PRESENT; - }); - final AtomicReference softLock2Ref = new AtomicReference<>(new SoftLock(null, "two", null)); - when(underlyingStore.get(eq(2L))).then(invocation -> softLock2Ref.get() == null ? null : new AbstractValueHolder(-1, -1) { - @Override - public Object value() { - return softLock2Ref.get(); - } - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } - }); - when(underlyingStore.replace(eq(2L), isA(SoftLock.class), isA(SoftLock.class))).then(invocation -> { - softLock2Ref.set((SoftLock) invocation.getArguments()[2]); - return ReplaceStatus.HIT; - }); - - when(underlyingStore.remove(any(Long.class), any(SoftLock.class))).thenReturn(RemoveStatus.REMOVED); - - xaTransactionContext.commitInOnePhase(); - - Assert.assertThat(savedInDoubtCollectionRef.get(), containsInAnyOrder(1L, 2L, 3L)); - - verify(journal, times(1)).saveCommitted(eq(new TransactionId(new TestXid(0, 0))), eq(false)); - verify(journal, times(0)).saveRolledBack(eq(new TransactionId(new TestXid(0, 0))), anyBoolean()); - verify(journal, times(1)).saveInDoubt(eq(new TransactionId(new TestXid(0, 0))), any(Collection.class)); - - verify(underlyingStore, times(1)).putIfAbsent(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), null, new XAValueHolder<>("un", timeSource - .getTimeMillis())))); - verify(underlyingStore, times(1)).replace(eq(2L), eq(new SoftLock<>(null, "two", null)), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null))); - verify(underlyingStore, times(1)).remove(eq(3L)); - - verify(underlyingStore, times(1)).get(1L); - verify(underlyingStore, times(1)).replace(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), null, new XAValueHolder<>("un", timeSource - .getTimeMillis()))), eq(new SoftLock<>(null, "un", null))); - verify(underlyingStore, times(1)).get(2L); - verify(underlyingStore, times(1)).remove(eq(2L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null))); - verify(underlyingStore, times(1)).get(3L); - verify(underlyingStore, times(1)).remove(eq(3L)); - } - - @Test - public void testRollbackPhase1() throws Exception { - TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); - xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); - - xaTransactionContext.rollback(false); - - verifyNoMoreInteractions(underlyingStore); - } - - @Test - @SuppressWarnings("unchecked") - public void testRollbackPhase2() throws Exception { - final TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); - xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); - - when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); - when(journal.getInDoubtKeys(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(Arrays.asList(1L, 2L)); - - when(underlyingStore.get(1L)).thenReturn(new AbstractValueHolder>(-1, -1) { - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } - @Override - public SoftLock value() { - return new SoftLock<>(new TransactionId(new TestXid(0, 0)), "one", new XAValueHolder<>("un", timeSource.getTimeMillis())); - } - }); - when(underlyingStore.get(2L)).thenReturn(new AbstractValueHolder>(-1, -1) { - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } - @Override - public SoftLock value() { - return new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null); - } - }); - - when(underlyingStore.replace(any(Long.class), any(SoftLock.class), any(SoftLock.class))).thenReturn(ReplaceStatus.HIT); - xaTransactionContext.rollback(false); - - verify(underlyingStore, times(1)).get(1L); - verify(underlyingStore, times(1)).replace(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "one", new XAValueHolder<>("un", timeSource - .getTimeMillis()))), eq(new SoftLock<>(null, "one", null))); - verify(underlyingStore, times(1)).get(2L); - verify(underlyingStore, times(1)).replace(eq(2L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null)), eq(new SoftLock<>(null, "two", null))); - } - - @Test - public void testCommitInOnePhaseTimeout() throws Exception { - final TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); - xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); - - timeSource.advanceTime(30000); - - try { - xaTransactionContext.commitInOnePhase(); - fail("expected TransactionTimeoutException"); - } catch (XATransactionContext.TransactionTimeoutException tte) { - // expected - } - } - - @Test - public void testPrepareTimeout() throws Exception { - final TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); - xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); - - timeSource.advanceTime(30000); - - try { - xaTransactionContext.prepare(); - fail("expected TransactionTimeoutException"); - } catch (XATransactionContext.TransactionTimeoutException tte) { - // expected - } - } - - @Test - @SuppressWarnings("unchecked") - public void testCommitConflictsEvicts() throws Exception { - final TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); - when(journal.getInDoubtKeys(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(Arrays.asList(1L, 2L)); - when(underlyingStore.get(eq(1L))).thenReturn(new AbstractValueHolder>(-1, -1) { - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } - @Override - public SoftLock value() { - return new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old1", new XAValueHolder<>("new1", timeSource - .getTimeMillis())); - } - }); - when(underlyingStore.get(eq(2L))).thenReturn(new AbstractValueHolder>(-1, -1) { - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } - @Override - public SoftLock value() { - return new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old2", null); - } - }); - - when(underlyingStore.replace(any(Long.class), any(SoftLock.class), any(SoftLock.class))).thenReturn(ReplaceStatus.MISS_NOT_PRESENT); - when(underlyingStore.remove(any(Long.class), any(SoftLock.class))).thenReturn(RemoveStatus.KEY_MISSING); - - xaTransactionContext.commit(false); - - verify(underlyingStore, times(1)).replace(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old1", new XAValueHolder<>("new1", timeSource - .getTimeMillis()))), eq(new SoftLock<>(null, "new1", null))); - verify(underlyingStore, times(1)).remove(eq(1L)); - verify(underlyingStore, times(1)).remove(eq(2L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old2", null))); - verify(underlyingStore, times(1)).remove(eq(2L)); - } - - @Test - @SuppressWarnings("unchecked") - public void testPrepareConflictsEvicts() throws Exception { - final TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - xaTransactionContext.addCommand(1L, new StorePutCommand<>("one", new XAValueHolder<>("un", timeSource.getTimeMillis()))); - xaTransactionContext.addCommand(2L, new StoreRemoveCommand<>("two")); - - when(underlyingStore.replace(any(Long.class), any(SoftLock.class), any(SoftLock.class))).thenReturn(ReplaceStatus.MISS_NOT_PRESENT); - - xaTransactionContext.prepare(); - - verify(underlyingStore).replace(eq(1L), eq(new SoftLock<>(null, "one", null)), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "one", new XAValueHolder<>("un", timeSource - .getTimeMillis())))); - verify(underlyingStore).remove(1L); - verify(underlyingStore).replace(eq(2L), eq(new SoftLock<>(null, "two", null)), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "two", null))); - verify(underlyingStore).remove(2L); - } - - @Test - @SuppressWarnings("unchecked") - public void testRollbackConflictsEvicts() throws Exception { - final TestTimeSource timeSource = new TestTimeSource(); - - XATransactionContext xaTransactionContext = new XATransactionContext<>(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource - .getTimeMillis() + 30000); - - when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); - when(journal.getInDoubtKeys(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(Arrays.asList(1L, 2L)); - when(underlyingStore.get(eq(1L))).thenReturn(new AbstractValueHolder>(-1, -1) { - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } - @Override - public SoftLock value() { - return new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old1", new XAValueHolder<>("new1", timeSource - .getTimeMillis())); - } - }); - when(underlyingStore.get(eq(2L))).thenReturn(new AbstractValueHolder>(-1, -1) { - @Override - protected TimeUnit nativeTimeUnit() { - return TimeUnit.MILLISECONDS; - } - @Override - public SoftLock value() { - return new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old2", null); - } - }); - - when(underlyingStore.replace(any(Long.class), any(SoftLock.class), any(SoftLock.class))).thenReturn(ReplaceStatus.MISS_NOT_PRESENT); - when(underlyingStore.remove(any(Long.class), any(SoftLock.class))).thenReturn(RemoveStatus.KEY_MISSING); - - xaTransactionContext.rollback(false); - - verify(underlyingStore, times(1)).replace(eq(1L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old1", new XAValueHolder<>("new1", timeSource - .getTimeMillis()))), eq(new SoftLock<>(null, "old1", null))); - verify(underlyingStore, times(1)).remove(eq(1L)); - verify(underlyingStore, times(1)).replace(eq(2L), eq(new SoftLock<>(new TransactionId(new TestXid(0, 0)), "old2", null)), eq(new SoftLock<>(null, "old2", null))); - verify(underlyingStore, times(1)).remove(eq(2L)); - } - - - private static Matcher> isACollectionThat( - final Matcher> matcher) { - return new BaseMatcher>() { - @Override public boolean matches(Object item) { - return matcher.matches(item); - } - - @Override public void describeTo(Description description) { - matcher.describeTo(description); - } - }; - } - -} diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java deleted file mode 100644 index b19a4a985e..0000000000 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.transactions.xa.internal; - -import org.ehcache.expiry.Duration; -import org.junit.Test; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.util.concurrent.TimeUnit; - -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - -/** - * XAValueHolderTest - */ -public class XAValueHolderTest { - - @Test - public void testSerialization() throws Exception { - - long now = System.currentTimeMillis(); - XAValueHolder valueHolder = new XAValueHolder<>("value", now - 1000); - valueHolder.accessed(now, new Duration(100, TimeUnit.SECONDS)); - - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - ObjectOutputStream outputStream = new ObjectOutputStream(baos); - outputStream.writeObject(valueHolder); - outputStream.close(); - - @SuppressWarnings("unchecked") - XAValueHolder result = (XAValueHolder) new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray())).readObject(); - - assertThat(result.getId(), is(valueHolder.getId())); - assertThat(result.creationTime(TimeUnit.MILLISECONDS), is(valueHolder.creationTime(TimeUnit.MILLISECONDS))); - assertThat(result.lastAccessTime(TimeUnit.MILLISECONDS), is(valueHolder.lastAccessTime(TimeUnit.MILLISECONDS))); - assertThat(result.expirationTime(TimeUnit.MILLISECONDS), is(valueHolder.expirationTime(TimeUnit.MILLISECONDS))); - assertThat(result.value(), is(valueHolder.value())); - assertThat(result.hits(), is(valueHolder.hits())); - } -} diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/utils/JavaSerializer.java b/transactions/src/test/java/org/ehcache/transactions/xa/utils/JavaSerializer.java deleted file mode 100644 index 24c455443c..0000000000 --- a/transactions/src/test/java/org/ehcache/transactions/xa/utils/JavaSerializer.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.transactions.xa.utils; - -import org.ehcache.spi.serialization.SerializerException; -import org.ehcache.impl.internal.util.ByteBufferInputStream; -import org.ehcache.spi.serialization.Serializer; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.ObjectStreamClass; -import java.lang.reflect.Proxy; -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Map; - -/** - * - * @author cdennis - */ -public class JavaSerializer implements Serializer { - - private final ClassLoader classLoader; - - public JavaSerializer(ClassLoader classLoader) { - this.classLoader = classLoader; - } - - @Override - public ByteBuffer serialize(T object) { - ByteArrayOutputStream bout = new ByteArrayOutputStream(); - try { - ObjectOutputStream oout = new ObjectOutputStream(bout); - oout.writeObject(object); - } catch (IOException e) { - throw new SerializerException(e); - } finally { - try { - bout.close(); - } catch (IOException e) { - throw new AssertionError(e); - } - } - return ByteBuffer.wrap(bout.toByteArray()); - } - - @SuppressWarnings("unchecked") - @Override - public T read(ByteBuffer entry) throws SerializerException, ClassNotFoundException { - ByteBufferInputStream bin = new ByteBufferInputStream(entry); - try { - try (OIS ois = new OIS(bin, classLoader)) { - return (T) ois.readObject(); - } - } catch (IOException e) { - throw new SerializerException(e); - } finally { - try { - bin.close(); - } catch (IOException e) { - throw new AssertionError(e); - } - } - } - - @Override - public boolean equals(T object, ByteBuffer binary) throws SerializerException, ClassNotFoundException { - return object.equals(read(binary)); - } - - private static class OIS extends ObjectInputStream { - - private final ClassLoader classLoader; - - public OIS(InputStream in, ClassLoader classLoader) throws IOException { - super(in); - this.classLoader = classLoader; - } - - @Override - protected Class resolveClass(ObjectStreamClass desc) throws IOException, ClassNotFoundException { - try { - return Class.forName(desc.getName(), false, classLoader); - } catch (ClassNotFoundException cnfe) { - Class primitive = primitiveClasses.get(desc.getName()); - if (primitive != null) { - return primitive; - } - throw cnfe; - } - } - - @Override - protected Class resolveProxyClass(String[] interfaces) throws IOException, ClassNotFoundException { - Class[] interfaceClasses = new Class[interfaces.length]; - for (int i = 0; i < interfaces.length; i++) { - interfaceClasses[i] = Class.forName(interfaces[i], false, classLoader); - } - - return Proxy.getProxyClass(classLoader, interfaceClasses); - } - - private static final Map> primitiveClasses = new HashMap<>(); - static { - primitiveClasses.put("boolean", boolean.class); - primitiveClasses.put("byte", byte.class); - primitiveClasses.put("char", char.class); - primitiveClasses.put("double", double.class); - primitiveClasses.put("float", float.class); - primitiveClasses.put("int", int.class); - primitiveClasses.put("long", long.class); - primitiveClasses.put("short", short.class); - primitiveClasses.put("void", void.class); - } - } - -} diff --git a/xml/.gitignore b/xml/.gitignore deleted file mode 100755 index ae3c172604..0000000000 --- a/xml/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/bin/ diff --git a/xml/build.gradle b/xml/build.gradle deleted file mode 100644 index 28e4c2d809..0000000000 --- a/xml/build.gradle +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -plugins { - id 'com.github.jacobono.jaxb' version '1.3.5' -} - -apply plugin: EhDeploy - -dependencies { - compile project(':api'), project(':core'), project(':impl') - - // Constrained to JAXB 2.1 due to JDK 1.6 compatibility requirements - jaxb 'com.sun.xml.bind:jaxb-xjc:2.1.17' - jaxb 'com.sun.xml.bind:jaxb-impl:2.1.17' - jaxb 'javax.xml:jaxb-api:2.1' -} - -def generatedSources = "src/generated/java" - -sourceSets.main.java { - srcDir generatedSources -} - -test { - if (testJava.javaVersion.isJava9Compatible()) { - jvmArgs += ['--add-modules', 'java.xml.bind'] - } -} - -jaxb { - xsdDir = "$name/src/main/resources" - episodesDir = "$name/build/xsd/episodes" - bindingsDir = "$name/build/xsd/bindings" - xjc { - destinationDir = generatedSources - generatePackage = "org.ehcache.xml.model" - } - - compileJava.dependsOn "xjc" -} - -clean { - delete generatedSources -} diff --git a/xml/config/checkstyle-suppressions.xml b/xml/config/checkstyle-suppressions.xml deleted file mode 100644 index b156d0e18d..0000000000 --- a/xml/config/checkstyle-suppressions.xml +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/xml/gradle.properties b/xml/gradle.properties deleted file mode 100644 index ce44fae982..0000000000 --- a/xml/gradle.properties +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright Terracotta, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -subPomName = Ehcache 3 XML Parsing module -subPomDesc = The module containing all XML parsing logic Ehcache 3 -osgi = {"Export-Package" : ["!org.ehcache.xml.model.*"],\ - "Import-Package" : ["!sun.misc.*", "!sun.security.action.*"]} diff --git a/xml/src/main/java/org/ehcache/xml/ConfigurationParser.java b/xml/src/main/java/org/ehcache/xml/ConfigurationParser.java deleted file mode 100644 index 93005f1c8b..0000000000 --- a/xml/src/main/java/org/ehcache/xml/ConfigurationParser.java +++ /dev/null @@ -1,1060 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.xml; - -import org.ehcache.config.ResourcePool; -import org.ehcache.config.ResourceUnit; -import org.ehcache.config.SizedResourcePool; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.config.SizedResourcePoolImpl; -import org.ehcache.xml.exceptions.XmlConfigurationException; -import org.ehcache.xml.model.BaseCacheType; -import org.ehcache.xml.model.CacheLoaderWriterType; -import org.ehcache.xml.model.CacheTemplateType; -import org.ehcache.xml.model.CacheType; -import org.ehcache.xml.model.ConfigType; -import org.ehcache.xml.model.CopierType; -import org.ehcache.xml.model.Disk; -import org.ehcache.xml.model.DiskStoreSettingsType; -import org.ehcache.xml.model.EventFiringType; -import org.ehcache.xml.model.EventOrderingType; -import org.ehcache.xml.model.EventType; -import org.ehcache.xml.model.ExpiryType; -import org.ehcache.xml.model.Heap; -import org.ehcache.xml.model.ListenersType; -import org.ehcache.xml.model.MemoryType; -import org.ehcache.xml.model.ObjectFactory; -import org.ehcache.xml.model.Offheap; -import org.ehcache.xml.model.PersistableMemoryType; -import org.ehcache.xml.model.PersistenceType; -import org.ehcache.xml.model.ResourceType; -import org.ehcache.xml.model.ResourcesType; -import org.ehcache.xml.model.SerializerType; -import org.ehcache.xml.model.ServiceType; -import org.ehcache.xml.model.SizeofType; -import org.ehcache.xml.model.TimeType; -import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.core.internal.util.ClassLoading; -import org.w3c.dom.Element; -import org.w3c.dom.NamedNodeMap; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; -import org.xml.sax.ErrorHandler; -import org.xml.sax.SAXException; -import org.xml.sax.SAXParseException; - -import javax.xml.XMLConstants; -import javax.xml.bind.JAXBContext; -import javax.xml.bind.JAXBException; -import javax.xml.bind.Unmarshaller; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import javax.xml.transform.Source; -import javax.xml.transform.stream.StreamSource; -import javax.xml.validation.Schema; -import javax.xml.validation.SchemaFactory; - -import java.io.IOException; -import java.math.BigInteger; -import java.net.URI; -import java.net.URL; -import java.time.temporal.TemporalUnit; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.Stack; -import java.util.concurrent.TimeUnit; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.ehcache.xml.model.ThreadPoolReferenceType; -import org.ehcache.xml.model.ThreadPoolsType; - -import static java.util.Collections.emptySet; -import static java.util.Collections.singleton; - -/** - * Provides support for parsing a cache configuration expressed in XML. - */ -class ConfigurationParser { - - private static final Pattern SYSPROP = Pattern.compile("\\$\\{([^}]+)\\}"); - private static final SchemaFactory XSD_SCHEMA_FACTORY = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); - private static Schema newSchema(Source[] schemas) throws SAXException { - synchronized (XSD_SCHEMA_FACTORY) { - return XSD_SCHEMA_FACTORY.newSchema(schemas); - } - } - - private static final URL CORE_SCHEMA_URL = XmlConfiguration.class.getResource("/ehcache-core.xsd"); - private static final String CORE_SCHEMA_NAMESPACE = "http://www.ehcache.org/v3"; - private static final String CORE_SCHEMA_ROOT_ELEMENT = "config"; - private static final String CORE_SCHEMA_JAXB_MODEL_PACKAGE = ConfigType.class.getPackage().getName(); - - private final Map> xmlParsers = new HashMap<>(); - private final Map> cacheXmlParsers = new HashMap<>(); - private final Unmarshaller unmarshaller; - private final Map resourceXmlParsers = new HashMap<>(); - private final ConfigType config; - - static String replaceProperties(String originalValue, final Properties properties) { - Matcher matcher = SYSPROP.matcher(originalValue); - - StringBuffer sb = new StringBuffer(); - while (matcher.find()) { - final String property = matcher.group(1); - final String value = properties.getProperty(property); - if (value == null) { - throw new IllegalStateException(String.format("Replacement for ${%s} not found!", property)); - } - matcher.appendReplacement(sb, Matcher.quoteReplacement(value)); - } - matcher.appendTail(sb); - final String resolvedValue = sb.toString(); - return resolvedValue.equals(originalValue) ? null : resolvedValue; - } - - public ConfigurationParser(String xml) throws IOException, SAXException, JAXBException, ParserConfigurationException { - Collection schemaSources = new ArrayList<>(); - schemaSources.add(new StreamSource(CORE_SCHEMA_URL.openStream())); - - for (CacheManagerServiceConfigurationParser parser : ClassLoading.libraryServiceLoaderFor(CacheManagerServiceConfigurationParser.class)) { - schemaSources.add(parser.getXmlSchema()); - xmlParsers.put(parser.getNamespace(), parser); - } - for (CacheServiceConfigurationParser parser : ClassLoading.libraryServiceLoaderFor(CacheServiceConfigurationParser.class)) { - schemaSources.add(parser.getXmlSchema()); - cacheXmlParsers.put(parser.getNamespace(), parser); - } - // Parsers for /config/cache/resources extensions - for (CacheResourceConfigurationParser parser : ClassLoading.libraryServiceLoaderFor(CacheResourceConfigurationParser.class)) { - schemaSources.add(parser.getXmlSchema()); - resourceXmlParsers.put(parser.getNamespace(), parser); - } - - DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); - factory.setNamespaceAware(true); - factory.setIgnoringComments(true); - factory.setIgnoringElementContentWhitespace(true); - factory.setSchema(newSchema(schemaSources.toArray(new Source[schemaSources.size()]))); - - DocumentBuilder domBuilder = factory.newDocumentBuilder(); - domBuilder.setErrorHandler(new FatalErrorHandler()); - Element dom = domBuilder.parse(xml).getDocumentElement(); - - substituteSystemProperties(dom); - - if (!CORE_SCHEMA_ROOT_ELEMENT.equals(dom.getLocalName()) || !CORE_SCHEMA_NAMESPACE.equals(dom.getNamespaceURI())) { - throw new XmlConfigurationException("Expecting {" + CORE_SCHEMA_NAMESPACE + "}" + CORE_SCHEMA_ROOT_ELEMENT - + " element; found {" + dom.getNamespaceURI() + "}" + dom.getLocalName()); - } - - Class configTypeClass = ConfigType.class; - JAXBContext jc = JAXBContext.newInstance(CORE_SCHEMA_JAXB_MODEL_PACKAGE, configTypeClass.getClassLoader()); - this.unmarshaller = jc.createUnmarshaller(); - this.config = unmarshaller.unmarshal(dom, configTypeClass).getValue(); - } - - private void substituteSystemProperties(final Element dom) { - final Properties properties = System.getProperties(); - Stack nodeLists = new Stack<>(); - nodeLists.push(dom.getChildNodes()); - while (!nodeLists.isEmpty()) { - NodeList nodeList = nodeLists.pop(); - for (int i = 0; i < nodeList.getLength(); ++i) { - Node currentNode = nodeList.item(i); - if (currentNode.hasChildNodes()) { - nodeLists.push(currentNode.getChildNodes()); - } - final NamedNodeMap attributes = currentNode.getAttributes(); - if (attributes != null) { - for (int j = 0; j < attributes.getLength(); ++j) { - final Node attributeNode = attributes.item(j); - final String newValue = replaceProperties(attributeNode.getNodeValue(), properties); - if (newValue != null) { - attributeNode.setNodeValue(newValue); - } - } - } - if (currentNode.getNodeType() == Node.TEXT_NODE) { - final String newValue = replaceProperties(currentNode.getNodeValue(), properties); - if (newValue != null) { - currentNode.setNodeValue(newValue); - } - } - } - } - } - - public Iterable getServiceElements() { - return config.getService(); - } - - public SerializerType getDefaultSerializers() { - return config.getDefaultSerializers(); - } - - public CopierType getDefaultCopiers() { - return config.getDefaultCopiers(); - } - - public PersistenceType getPersistence() { - return config.getPersistence(); - } - - public ThreadPoolReferenceType getEventDispatch() { - return config.getEventDispatch(); - } - - public ThreadPoolReferenceType getWriteBehind() { - return config.getWriteBehind(); - } - - public ThreadPoolReferenceType getDiskStore() { - return config.getDiskStore(); - } - - public ThreadPoolsType getThreadPools() { - return config.getThreadPools(); - } - - public SizeOfEngineLimits getHeapStore() { - SizeofType type = config.getHeapStore(); - return type == null ? null : new XmlSizeOfEngineLimits(type); - } - - public Iterable getCacheElements() { - List cacheCfgs = new ArrayList<>(); - final List cacheOrCacheTemplate = config.getCacheOrCacheTemplate(); - for (BaseCacheType baseCacheType : cacheOrCacheTemplate) { - if(baseCacheType instanceof CacheType) { - final CacheType cacheType = (CacheType)baseCacheType; - - final BaseCacheType[] sources; - if(cacheType.getUsesTemplate() != null) { - sources = new BaseCacheType[2]; - sources[0] = cacheType; - sources[1] = (BaseCacheType) cacheType.getUsesTemplate(); - } else { - sources = new BaseCacheType[1]; - sources[0] = cacheType; - } - - cacheCfgs.add(new CacheDefinition() { - @Override - public String id() { - return cacheType.getAlias(); - } - - @Override - public String keyType() { - String value = null; - for (BaseCacheType source : sources) { - value = source.getKeyType() != null ? source.getKeyType().getValue() : null; - if (value != null) break; - } - if (value == null) { - for (BaseCacheType source : sources) { - value = JaxbHelper.findDefaultValue(source, "keyType"); - if (value != null) break; - } - } - return value; - } - - @Override - public String keySerializer() { - String value = null; - for (BaseCacheType source : sources) { - value = source.getKeyType() != null ? source.getKeyType().getSerializer() : null; - if (value != null) break; - } - return value; - } - - @Override - public String keyCopier() { - String value = null; - for (BaseCacheType source : sources) { - value = source.getKeyType() != null ? source.getKeyType().getCopier() : null; - if (value != null) break; - } - return value; - } - - @Override - public String valueType() { - String value = null; - for (BaseCacheType source : sources) { - value = source.getValueType() != null ? source.getValueType().getValue() : null; - if (value != null) break; - } - if (value == null) { - for (BaseCacheType source : sources) { - value = JaxbHelper.findDefaultValue(source, "valueType"); - if (value != null) break; - } - } - return value; - } - - @Override - public String valueSerializer() { - String value = null; - for (BaseCacheType source : sources) { - value = source.getValueType() != null ? source.getValueType().getSerializer() : null; - if (value != null) break; - } - return value; - } - - @Override - public String valueCopier() { - String value = null; - for (BaseCacheType source : sources) { - value = source.getValueType() != null ? source.getValueType().getCopier() : null; - if (value != null) break; - } - return value; - } - - @Override - public String evictionAdvisor() { - String value = null; - for (BaseCacheType source : sources) { - value = source.getEvictionAdvisor(); - if (value != null) break; - } - return value; - } - - @Override - public Expiry expiry() { - ExpiryType value = null; - for (BaseCacheType source : sources) { - value = source.getExpiry(); - if (value != null) break; - } - if (value != null) { - return new XmlExpiry(value); - } else { - return null; - } - } - - @Override - public String loaderWriter() { - String configClass = null; - for (BaseCacheType source : sources) { - final CacheLoaderWriterType loaderWriter = source.getLoaderWriter(); - if (loaderWriter != null) { - configClass = loaderWriter.getClazz(); - break; - } - } - return configClass; - } - - @Override - public ListenersConfig listenersConfig() { - ListenersType base = null; - ArrayList additionals = new ArrayList<>(); - for (BaseCacheType source : sources) { - if (source.getListeners() != null) { - if (base == null) { - base = source.getListeners(); - } else { - additionals.add(source.getListeners()); - } - } - } - return base != null ? new XmlListenersConfig(base, additionals.toArray(new ListenersType[0])) : null; - } - - - @Override - public Iterable> serviceConfigs() { - Map, ServiceConfiguration> configsMap = - new HashMap<>(); - for (BaseCacheType source : sources) { - for (Element child : source.getServiceConfiguration()) { - ServiceConfiguration serviceConfiguration = parseCacheExtension(child); - if (!configsMap.containsKey(serviceConfiguration.getClass())) { - configsMap.put(serviceConfiguration.getClass(), serviceConfiguration); - } - } - } - return configsMap.values(); - } - - @Override - public Collection resourcePools() { - for (BaseCacheType source : sources) { - Heap heapResource = source.getHeap(); - if (heapResource != null) { - return singleton(parseResource(heapResource)); - } else { - ResourcesType resources = source.getResources(); - if (resources != null) { - return parseResources(resources); - } - } - } - return emptySet(); - } - - @Override - public WriteBehind writeBehind() { - for (BaseCacheType source : sources) { - final CacheLoaderWriterType loaderWriter = source.getLoaderWriter(); - final CacheLoaderWriterType.WriteBehind writebehind = loaderWriter != null ? loaderWriter.getWriteBehind() : null; - if (writebehind != null) { - return new XmlWriteBehind(writebehind); - } - } - return null; - } - - @Override - public DiskStoreSettings diskStoreSettings() { - DiskStoreSettingsType value = null; - for (BaseCacheType source : sources) { - value = source.getDiskStoreSettings(); - if (value != null) break; - } - if (value != null) { - return new XmlDiskStoreSettings(value); - } else { - return null; - } - } - - @Override - public SizeOfEngineLimits heapStoreSettings() { - SizeofType sizeofType = null; - for (BaseCacheType source : sources) { - sizeofType = source.getHeapStoreSettings(); - if (sizeofType != null) break; - } - return sizeofType != null ? new XmlSizeOfEngineLimits(sizeofType) : null; - } - }); - } - } - - return Collections.unmodifiableList(cacheCfgs); - } - - public Map getTemplates() { - final Map templates = new HashMap<>(); - final List cacheOrCacheTemplate = config.getCacheOrCacheTemplate(); - for (BaseCacheType baseCacheType : cacheOrCacheTemplate) { - if (baseCacheType instanceof CacheTemplateType) { - final CacheTemplateType cacheTemplate = (CacheTemplateType)baseCacheType; - templates.put(cacheTemplate.getName(), new CacheTemplate() { - - @Override - public String keyType() { - String keyType = cacheTemplate.getKeyType() != null ? cacheTemplate.getKeyType().getValue() : null; - if (keyType == null) { - keyType = JaxbHelper.findDefaultValue(cacheTemplate, "keyType"); - } - return keyType; - } - - @Override - public String keySerializer() { - return cacheTemplate.getKeyType() != null ? cacheTemplate.getKeyType().getSerializer() : null; - } - - @Override - public String keyCopier() { - return cacheTemplate.getKeyType() != null ? cacheTemplate.getKeyType().getCopier() : null; - } - - @Override - public String valueType() { - String valueType = cacheTemplate.getValueType() != null ? cacheTemplate.getValueType().getValue() : null; - if (valueType == null) { - valueType = JaxbHelper.findDefaultValue(cacheTemplate, "valueType"); - } - return valueType; - } - - @Override - public String valueSerializer() { - return cacheTemplate.getValueType() != null ? cacheTemplate.getValueType().getSerializer() : null; - } - - @Override - public String valueCopier() { - return cacheTemplate.getValueType() != null ? cacheTemplate.getValueType().getCopier() : null; - } - - @Override - public String evictionAdvisor() { - return cacheTemplate.getEvictionAdvisor(); - } - - @Override - public Expiry expiry() { - ExpiryType cacheTemplateExpiry = cacheTemplate.getExpiry(); - if (cacheTemplateExpiry != null) { - return new XmlExpiry(cacheTemplateExpiry); - } else { - return null; - } - } - - @Override - public ListenersConfig listenersConfig() { - final ListenersType integration = cacheTemplate.getListeners(); - return integration != null ? new XmlListenersConfig(integration) : null; - } - - @Override - public String loaderWriter() { - final CacheLoaderWriterType loaderWriter = cacheTemplate.getLoaderWriter(); - return loaderWriter != null ? loaderWriter.getClazz() : null; - } - - @Override - public Iterable> serviceConfigs() { - Collection> configs = new ArrayList<>(); - for (Element child : cacheTemplate.getServiceConfiguration()) { - configs.add(parseCacheExtension(child)); - } - return configs; - } - - @Override - public Collection resourcePools() { - Heap heapResource = cacheTemplate.getHeap(); - if (heapResource != null) { - return singleton(parseResource(heapResource)); - } else { - ResourcesType resources = cacheTemplate.getResources(); - if (resources != null) { - return parseResources(resources); - } - } - - return emptySet(); - } - - @Override - public WriteBehind writeBehind() { - final CacheLoaderWriterType loaderWriter = cacheTemplate.getLoaderWriter(); - final CacheLoaderWriterType.WriteBehind writebehind = loaderWriter != null ? loaderWriter.getWriteBehind(): null; - return writebehind != null ? new XmlWriteBehind(writebehind) : null; - } - - @Override - public DiskStoreSettings diskStoreSettings() { - final DiskStoreSettingsType diskStoreSettings = cacheTemplate.getDiskStoreSettings(); - return diskStoreSettings == null ? null : new XmlDiskStoreSettings(diskStoreSettings); - } - - @Override - public SizeOfEngineLimits heapStoreSettings() { - SizeofType type = cacheTemplate.getHeapStoreSettings(); - return type == null ? null : new XmlSizeOfEngineLimits(type); - } - }); - } - } - return Collections.unmodifiableMap(templates); - } - - private Collection parseResources(ResourcesType resources) { - Collection resourcePools = new ArrayList<>(); - for (Element resource : resources.getResource()) { - resourcePools.add(parseResource(resource)); - } - return resourcePools; - } - - private ResourcePool parseResource(Heap resource) { - ResourceType heapResource = resource.getValue(); - return new SizedResourcePoolImpl<>(org.ehcache.config.ResourceType.Core.HEAP, - heapResource.getValue().longValue(), parseUnit(heapResource), false); - } - - private ResourcePool parseResource(Element element) { - if (!CORE_SCHEMA_NAMESPACE.equals(element.getNamespaceURI())) { - return parseResourceExtension(element); - } - try { - Object resource = unmarshaller.unmarshal(element); - if (resource instanceof Heap) { - ResourceType heapResource = ((Heap) resource).getValue(); - return new SizedResourcePoolImpl<>(org.ehcache.config.ResourceType.Core.HEAP, - heapResource.getValue().longValue(), parseUnit(heapResource), false); - } else if (resource instanceof Offheap) { - MemoryType offheapResource = ((Offheap) resource).getValue(); - return new SizedResourcePoolImpl<>(org.ehcache.config.ResourceType.Core.OFFHEAP, - offheapResource.getValue().longValue(), parseMemory(offheapResource), false); - } else if (resource instanceof Disk) { - PersistableMemoryType diskResource = ((Disk) resource).getValue(); - return new SizedResourcePoolImpl<>(org.ehcache.config.ResourceType.Core.DISK, - diskResource.getValue().longValue(), parseMemory(diskResource), diskResource.isPersistent()); - } else { - // Someone updated the core resources without updating *this* code ... - throw new AssertionError("Unrecognized resource: " + element + " / " + resource.getClass().getName()); - } - } catch (JAXBException e) { - throw new IllegalArgumentException("Can't find parser for resource: " + element, e); - } - } - - private static ResourceUnit parseUnit(ResourceType resourceType) { - if (resourceType.getUnit().value().equalsIgnoreCase("entries")) { - return EntryUnit.ENTRIES; - } else { - return MemoryUnit.valueOf(resourceType.getUnit().value().toUpperCase()); - } - } - - private static MemoryUnit parseMemory(MemoryType memoryType) { - return MemoryUnit.valueOf(memoryType.getUnit().value().toUpperCase()); - } - - ServiceCreationConfiguration parseExtension(final Element element) { - URI namespace = URI.create(element.getNamespaceURI()); - final CacheManagerServiceConfigurationParser cacheManagerServiceConfigurationParser = xmlParsers.get(namespace); - if(cacheManagerServiceConfigurationParser == null) { - throw new IllegalArgumentException("Can't find parser for namespace: " + namespace); - } - return cacheManagerServiceConfigurationParser.parseServiceCreationConfiguration(element); - } - - ServiceConfiguration parseCacheExtension(final Element element) { - URI namespace = URI.create(element.getNamespaceURI()); - final CacheServiceConfigurationParser xmlConfigurationParser = cacheXmlParsers.get(namespace); - if(xmlConfigurationParser == null) { - throw new IllegalArgumentException("Can't find parser for namespace: " + namespace); - } - return xmlConfigurationParser.parseServiceConfiguration(element); - } - - ResourcePool parseResourceExtension(final Element element) { - URI namespace = URI.create(element.getNamespaceURI()); - final CacheResourceConfigurationParser xmlConfigurationParser = resourceXmlParsers.get(namespace); - if (xmlConfigurationParser == null) { - throw new XmlConfigurationException("Can't find parser for namespace: " + namespace); - } - return xmlConfigurationParser.parseResourceConfiguration(element); - } - - static class FatalErrorHandler implements ErrorHandler { - - @Override - public void warning(SAXParseException exception) throws SAXException { - throw exception; - } - - @Override - public void error(SAXParseException exception) throws SAXException { - throw exception; - } - - @Override - public void fatalError(SAXParseException exception) throws SAXException { - throw exception; - } - } - - interface CacheTemplate { - - String keyType(); - - String keySerializer(); - - String keyCopier(); - - String valueType(); - - String valueSerializer(); - - String valueCopier(); - - String evictionAdvisor(); - - Expiry expiry(); - - String loaderWriter(); - - ListenersConfig listenersConfig(); - - Iterable> serviceConfigs(); - - Collection resourcePools(); - - WriteBehind writeBehind(); - - DiskStoreSettings diskStoreSettings(); - - SizeOfEngineLimits heapStoreSettings(); - - } - - interface CacheDefinition extends CacheTemplate { - - String id(); - - } - - interface ListenersConfig { - - int dispatcherConcurrency(); - - String threadPool(); - - Iterable listeners(); - } - - interface Listener { - - String className(); - - EventFiringType eventFiring(); - - EventOrderingType eventOrdering(); - - List fireOn(); - - } - - interface Expiry { - - boolean isUserDef(); - - boolean isTTI(); - - boolean isTTL(); - - String type(); - - long value(); - - TimeUnit unit(); - - } - - interface WriteBehind { - - int maxQueueSize(); - - int concurrency(); - - String threadPool(); - - Batching batching(); - } - - interface Batching { - - boolean isCoalesced(); - - int batchSize(); - - long maxDelay(); - - TimeUnit maxDelayUnit(); - } - - interface DiskStoreSettings { - - int writerConcurrency(); - - String threadPool(); - - int diskSegments(); - } - - - interface SizeOfEngineLimits { - - long getMaxObjectGraphSize(); - - long getMaxObjectSize(); - - MemoryUnit getUnit(); - } - - private static class XmlListenersConfig implements ListenersConfig { - - final int dispatcherConcurrency; - final String threadPool; - final Iterable listeners; - - private XmlListenersConfig(final ListenersType type, final ListenersType... others) { - this.dispatcherConcurrency = type.getDispatcherConcurrency().intValue(); - String threadPool = type.getDispatcherThreadPool(); - Set listenerSet = new HashSet<>(); - final List xmlListeners = type.getListener(); - extractListeners(listenerSet, xmlListeners); - - for (ListenersType other : others) { - if (threadPool == null && other.getDispatcherThreadPool() != null) { - threadPool = other.getDispatcherThreadPool(); - } - extractListeners(listenerSet, other.getListener()); - } - - this.threadPool = threadPool; - this.listeners = !listenerSet.isEmpty() ? listenerSet : null; - } - - private void extractListeners(Set listenerSet, List xmlListeners) { - if(xmlListeners != null) { - for(final ListenersType.Listener listener : xmlListeners) { - listenerSet.add(new Listener() { - @Override - public String className() { - return listener.getClazz(); - } - - @Override - public EventFiringType eventFiring() { - return listener.getEventFiringMode(); - } - - @Override - public EventOrderingType eventOrdering() { - return listener.getEventOrderingMode(); - } - - @Override - public List fireOn() { - return listener.getEventsToFireOn(); - } - }); - } - } - } - - @Override - public int dispatcherConcurrency() { - return dispatcherConcurrency; - } - - @Override - public String threadPool() { - return threadPool; - } - - @Override - public Iterable listeners() { - return listeners; - } - - } - - private static class XmlExpiry implements Expiry { - - final ExpiryType type; - - private XmlExpiry(final ExpiryType type) { - this.type = type; - } - - @Override - public boolean isUserDef() { - return type != null && type.getClazz() != null; - } - - @Override - public boolean isTTI() { - return type != null && type.getTti() != null; - } - - @Override - public boolean isTTL() { - return type != null && type.getTtl() != null; - } - - @Override - public String type() { - return type.getClazz(); - } - - @Override - public long value() { - final TimeType time; - if(isTTI()) { - time = type.getTti(); - } else { - time = type.getTtl(); - } - return time == null ? 0L : time.getValue().longValue(); - } - - @Override - public TimeUnit unit() { - final TimeType time; - if(isTTI()) { - time = type.getTti(); - } else { - time = type.getTtl(); - } - if(time != null) { - return XmlModel.convertToJUCTimeUnit(time.getUnit()); - } - return null; - } - } - - private static class XmlSizeOfEngineLimits implements SizeOfEngineLimits { - - private final SizeofType sizeoflimits; - - private XmlSizeOfEngineLimits(SizeofType sizeoflimits) { - this.sizeoflimits = sizeoflimits; - } - - @Override - public long getMaxObjectGraphSize() { - SizeofType.MaxObjectGraphSize value = sizeoflimits.getMaxObjectGraphSize(); - if (value == null) { - return new BigInteger(JaxbHelper.findDefaultValue(sizeoflimits, "maxObjectGraphSize")).longValue(); - } else { - return value.getValue().longValue(); - } - } - - @Override - public long getMaxObjectSize() { - MemoryType value = sizeoflimits.getMaxObjectSize(); - if (value == null) { - return new BigInteger(JaxbHelper.findDefaultValue(sizeoflimits, "maxObjectSize")).longValue(); - } else { - return value.getValue().longValue(); - } - } - - @Override - public MemoryUnit getUnit() { - MemoryType value = sizeoflimits.getMaxObjectSize(); - if (value == null) { - return MemoryUnit.valueOf(new ObjectFactory().createMemoryType().getUnit().value().toUpperCase()); - } else { - return MemoryUnit.valueOf(value.getUnit().value().toUpperCase()); - } - } - - } - - private static class XmlWriteBehind implements WriteBehind { - - private final CacheLoaderWriterType.WriteBehind writebehind; - - private XmlWriteBehind(CacheLoaderWriterType.WriteBehind writebehind) { - this.writebehind = writebehind; - } - - @Override - public int maxQueueSize() { - return this.writebehind.getSize().intValue(); - } - - @Override - public int concurrency() { - return this.writebehind.getConcurrency().intValue() ; - } - - @Override - public String threadPool() { - return this.writebehind.getThreadPool(); - } - - @Override - public Batching batching() { - CacheLoaderWriterType.WriteBehind.Batching batching = writebehind.getBatching(); - if (batching == null) { - return null; - } else { - return new XmlBatching(batching); - } - } - - } - - private static class XmlBatching implements Batching { - - private final CacheLoaderWriterType.WriteBehind.Batching batching; - - private XmlBatching(CacheLoaderWriterType.WriteBehind.Batching batching) { - this.batching = batching; - } - - @Override - public boolean isCoalesced() { - return this.batching.isCoalesce(); - } - - @Override - public int batchSize() { - return this.batching.getBatchSize().intValue(); - } - - @Override - public long maxDelay() { - return this.batching.getMaxWriteDelay().getValue().longValue(); - } - - @Override - public TimeUnit maxDelayUnit() { - return XmlModel.convertToJUCTimeUnit(this.batching.getMaxWriteDelay().getUnit()); - } - - } - - private static class XmlDiskStoreSettings implements DiskStoreSettings { - - private final DiskStoreSettingsType diskStoreSettings; - - private XmlDiskStoreSettings(DiskStoreSettingsType diskStoreSettings) { - this.diskStoreSettings = diskStoreSettings; - } - - @Override - public int writerConcurrency() { - return this.diskStoreSettings.getWriterConcurrency().intValue(); - } - - @Override - public String threadPool() { - return this.diskStoreSettings.getThreadPool(); - } - - @Override - public int diskSegments() { - return this.diskStoreSettings.getDiskSegments().intValue(); - } - } - -} diff --git a/xml/src/main/java/org/ehcache/xml/XmlConfiguration.java b/xml/src/main/java/org/ehcache/xml/XmlConfiguration.java deleted file mode 100644 index 6dcc1f9f3b..0000000000 --- a/xml/src/main/java/org/ehcache/xml/XmlConfiguration.java +++ /dev/null @@ -1,611 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.xml; - -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.Configuration; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePool; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.Builder; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.builders.WriteBehindConfigurationBuilder; -import org.ehcache.config.builders.WriteBehindConfigurationBuilder.BatchedWriteBehindConfigurationBuilder; -import org.ehcache.core.internal.util.ClassLoading; -import org.ehcache.event.CacheEventListener; -import org.ehcache.event.EventFiring; -import org.ehcache.event.EventOrdering; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.config.copy.DefaultCopierConfiguration; -import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; -import org.ehcache.impl.config.event.CacheEventDispatcherFactoryConfiguration; -import org.ehcache.impl.config.event.DefaultCacheEventDispatcherConfiguration; -import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration; -import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; -import org.ehcache.impl.config.loaderwriter.writebehind.WriteBehindProviderConfiguration; -import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; -import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; -import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; -import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; -import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; -import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; -import org.ehcache.impl.config.store.disk.OffHeapDiskStoreProviderConfiguration; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.xml.ConfigurationParser.Batching; -import org.ehcache.xml.ConfigurationParser.WriteBehind; -import org.ehcache.xml.exceptions.XmlConfigurationException; -import org.ehcache.xml.model.CopierType; -import org.ehcache.xml.model.EventType; -import org.ehcache.xml.model.SerializerType; -import org.ehcache.xml.model.ServiceType; -import org.ehcache.xml.model.ThreadPoolReferenceType; -import org.ehcache.xml.model.ThreadPoolsType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.xml.sax.SAXException; - -import javax.xml.bind.JAXBException; -import javax.xml.parsers.ParserConfigurationException; -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; - -/** - * Exposes {@link org.ehcache.config.Configuration} and {@link CacheConfigurationBuilder} expressed - * in a XML file that obeys the core Ehcache schema. - *

                                      - * Instances of this class are not thread-safe. - */ -public class XmlConfiguration implements Configuration { - - private static final Logger LOGGER = LoggerFactory.getLogger(XmlConfiguration.class); - - private final URL xml; - private final ClassLoader classLoader; - private final Map cacheClassLoaders; - - private final Collection> serviceConfigurations = new ArrayList<>(); - private final Map> cacheConfigurations = new HashMap<>(); - private final Map templates = new HashMap<>(); - - /** - * Constructs an instance of XmlConfiguration mapping to the XML file located at {@code url} - *

                                      - * Parses the XML file at the {@code url} provided. - * - * @param url URL pointing to the XML file's location - * - * @throws XmlConfigurationException if anything went wrong parsing the XML - */ - public XmlConfiguration(URL url) - throws XmlConfigurationException { - this(url, ClassLoading.getDefaultClassLoader()); - } - - /** - * Constructs an instance of XmlConfiguration mapping to the XML file located at {@code url} and using the provided - * {@code classLoader} to load user types (e.g. key and value Class instances). - *

                                      - * Parses the XML file at the {@code url} provided. - * - * @param url URL pointing to the XML file's location - * @param classLoader ClassLoader to use to load user types. - * - * @throws XmlConfigurationException if anything went wrong parsing the XML - */ - public XmlConfiguration(URL url, final ClassLoader classLoader) - throws XmlConfigurationException { - this(url, classLoader, Collections.emptyMap()); - } - - /** - * Constructs an instance of XmlConfiguration mapping to the XML file located at {@code url} and using the provided - * {@code classLoader} to load user types (e.g. key and value Class instances). The {@code cacheClassLoaders} will - * let you specify a different {@link java.lang.ClassLoader} to use for each {@link org.ehcache.Cache} managed by - * the {@link org.ehcache.CacheManager} configured using this {@link org.ehcache.xml.XmlConfiguration} - *

                                      - * Parses the XML file at the {@code url} provided. - * - * @param url URL pointing to the XML file's location - * @param classLoader ClassLoader to use to load user types. - * @param cacheClassLoaders the map with mappings between cache names and the corresponding class loaders - * - * @throws XmlConfigurationException if anything went wrong parsing the XML - */ - public XmlConfiguration(URL url, final ClassLoader classLoader, final Map cacheClassLoaders) - throws XmlConfigurationException { - if(url == null) { - throw new NullPointerException("The url can not be null"); - } - if(classLoader == null) { - throw new NullPointerException("The classLoader can not be null"); - } - if(cacheClassLoaders == null) { - throw new NullPointerException("The cacheClassLoaders map can not be null"); - } - this.xml = url; - this.classLoader = classLoader; - this.cacheClassLoaders = new HashMap<>(cacheClassLoaders); - try { - parseConfiguration(); - } catch (XmlConfigurationException e) { - throw e; - } catch (Exception e) { - throw new XmlConfigurationException("Error parsing XML configuration at " + url, e); - } - } - - @SuppressWarnings({ "rawtypes", "unchecked" }) - private void parseConfiguration() - throws ClassNotFoundException, IOException, SAXException, InstantiationException, IllegalAccessException, JAXBException, ParserConfigurationException { - LOGGER.info("Loading Ehcache XML configuration from {}.", xml.getPath()); - ConfigurationParser configurationParser = new ConfigurationParser(xml.toExternalForm()); - - final ArrayList> serviceConfigs = new ArrayList<>(); - - for (ServiceType serviceType : configurationParser.getServiceElements()) { - final ServiceCreationConfiguration serviceConfiguration = configurationParser.parseExtension(serviceType.getServiceCreationConfiguration()); - serviceConfigs.add(serviceConfiguration); - } - - if (configurationParser.getDefaultSerializers() != null) { - DefaultSerializationProviderConfiguration configuration = new DefaultSerializationProviderConfiguration(); - - for (SerializerType.Serializer serializer : configurationParser.getDefaultSerializers().getSerializer()) { - configuration.addSerializerFor(getClassForName(serializer.getType(), classLoader), (Class) getClassForName(serializer.getValue(), classLoader)); - } - serviceConfigs.add(configuration); - } - if (configurationParser.getDefaultCopiers() != null) { - DefaultCopyProviderConfiguration configuration = new DefaultCopyProviderConfiguration(); - - for (CopierType.Copier copier : configurationParser.getDefaultCopiers().getCopier()) { - configuration.addCopierFor(getClassForName(copier.getType(), classLoader), (Class)getClassForName(copier.getValue(), classLoader)); - } - serviceConfigs.add(configuration); - } - if (configurationParser.getHeapStore() != null) { - DefaultSizeOfEngineProviderConfiguration configuration = new DefaultSizeOfEngineProviderConfiguration( - configurationParser.getHeapStore().getMaxObjectSize(), configurationParser.getHeapStore().getUnit(), - configurationParser.getHeapStore().getMaxObjectGraphSize()); - serviceConfigs.add(configuration); - } - if (configurationParser.getPersistence() != null) { - serviceConfigs.add(new CacheManagerPersistenceConfiguration(new File(configurationParser.getPersistence().getDirectory()))); - } - if (configurationParser.getThreadPools() != null) { - PooledExecutionServiceConfiguration poolsConfiguration = new PooledExecutionServiceConfiguration(); - for (ThreadPoolsType.ThreadPool pool : configurationParser.getThreadPools().getThreadPool()) { - if (pool.isDefault()) { - poolsConfiguration.addDefaultPool(pool.getAlias(), pool.getMinSize().intValue(), pool.getMaxSize().intValue()); - } else { - poolsConfiguration.addPool(pool.getAlias(), pool.getMinSize().intValue(), pool.getMaxSize().intValue()); - } - } - serviceConfigs.add(poolsConfiguration); - } - if (configurationParser.getEventDispatch() != null) { - ThreadPoolReferenceType eventDispatchThreading = configurationParser.getEventDispatch(); - serviceConfigs.add(new CacheEventDispatcherFactoryConfiguration(eventDispatchThreading.getThreadPool())); - } - if (configurationParser.getWriteBehind() != null) { - ThreadPoolReferenceType writeBehindThreading = configurationParser.getWriteBehind(); - serviceConfigs.add(new WriteBehindProviderConfiguration(writeBehindThreading.getThreadPool())); - } - if (configurationParser.getDiskStore() != null) { - ThreadPoolReferenceType diskStoreThreading = configurationParser.getDiskStore(); - serviceConfigs.add(new OffHeapDiskStoreProviderConfiguration(diskStoreThreading.getThreadPool())); - } - - for (ServiceCreationConfiguration serviceConfiguration : Collections.unmodifiableList(serviceConfigs)) { - serviceConfigurations.add(serviceConfiguration); - } - - for (ConfigurationParser.CacheDefinition cacheDefinition : configurationParser.getCacheElements()) { - String alias = cacheDefinition.id(); - if(cacheConfigurations.containsKey(alias)) { - throw new XmlConfigurationException("Two caches defined with the same alias: " + alias); - } - - ClassLoader cacheClassLoader = cacheClassLoaders.get(alias); - boolean classLoaderConfigured = false; - if (cacheClassLoader != null) { - classLoaderConfigured = true; - } - - if (cacheClassLoader == null) { - if (classLoader != null) { - cacheClassLoader = classLoader; - } else { - cacheClassLoader = ClassLoading.getDefaultClassLoader(); - } - } - - Class keyType = getClassForName(cacheDefinition.keyType(), cacheClassLoader); - Class valueType = getClassForName(cacheDefinition.valueType(), cacheClassLoader); - ResourcePoolsBuilder resourcePoolsBuilder = newResourcePoolsBuilder(); - for (ResourcePool resourcePool : cacheDefinition.resourcePools()) { - resourcePoolsBuilder = resourcePoolsBuilder.with(resourcePool); - } - CacheConfigurationBuilder builder = newCacheConfigurationBuilder(keyType, valueType, resourcePoolsBuilder); - if (classLoaderConfigured) { - builder = builder.withClassLoader(cacheClassLoader); - } - - if (cacheDefinition.keySerializer() != null) { - Class keySerializer = getClassForName(cacheDefinition.keySerializer(), cacheClassLoader); - builder = builder.add(new DefaultSerializerConfiguration(keySerializer, DefaultSerializerConfiguration.Type.KEY)); - } - if (cacheDefinition.keyCopier() != null) { - Class keyCopier = getClassForName(cacheDefinition.keyCopier(), cacheClassLoader); - builder = builder.add(new DefaultCopierConfiguration(keyCopier, DefaultCopierConfiguration.Type.KEY)); - } - if (cacheDefinition.valueSerializer() != null) { - Class valueSerializer = getClassForName(cacheDefinition.valueSerializer(), cacheClassLoader); - builder = builder.add(new DefaultSerializerConfiguration(valueSerializer, DefaultSerializerConfiguration.Type.VALUE)); - } - if (cacheDefinition.valueCopier() != null) { - Class valueCopier = getClassForName(cacheDefinition.valueCopier(), cacheClassLoader); - builder = builder.add(new DefaultCopierConfiguration(valueCopier, DefaultCopierConfiguration.Type.VALUE)); - } - if (cacheDefinition.heapStoreSettings() != null) { - builder = builder.add(new DefaultSizeOfEngineConfiguration(cacheDefinition.heapStoreSettings().getMaxObjectSize(), cacheDefinition.heapStoreSettings().getUnit(), - cacheDefinition.heapStoreSettings().getMaxObjectGraphSize())); - } - EvictionAdvisor evictionAdvisor = getInstanceOfName(cacheDefinition.evictionAdvisor(), cacheClassLoader, EvictionAdvisor.class); - builder = builder.withEvictionAdvisor(evictionAdvisor); - final ConfigurationParser.Expiry parsedExpiry = cacheDefinition.expiry(); - if (parsedExpiry != null) { - builder = builder.withExpiry(getExpiry(cacheClassLoader, parsedExpiry)); - } - final ConfigurationParser.DiskStoreSettings parsedDiskStoreSettings = cacheDefinition.diskStoreSettings(); - if (parsedDiskStoreSettings != null) { - builder = builder.add(new OffHeapDiskStoreConfiguration(parsedDiskStoreSettings.threadPool(), parsedDiskStoreSettings.writerConcurrency(), parsedDiskStoreSettings.diskSegments())); - } - for (ServiceConfiguration serviceConfig : cacheDefinition.serviceConfigs()) { - builder = builder.add(serviceConfig); - } - if(cacheDefinition.loaderWriter()!= null) { - final Class> cacheLoaderWriterClass = (Class>)getClassForName(cacheDefinition.loaderWriter(), cacheClassLoader); - builder = builder.add(new DefaultCacheLoaderWriterConfiguration(cacheLoaderWriterClass)); - if(cacheDefinition.writeBehind() != null) { - WriteBehind writeBehind = cacheDefinition.writeBehind(); - WriteBehindConfigurationBuilder writeBehindConfigurationBuilder; - if (writeBehind.batching() == null) { - writeBehindConfigurationBuilder = WriteBehindConfigurationBuilder.newUnBatchedWriteBehindConfiguration(); - } else { - Batching batching = writeBehind.batching(); - writeBehindConfigurationBuilder = WriteBehindConfigurationBuilder - .newBatchedWriteBehindConfiguration(batching.maxDelay(), batching.maxDelayUnit(), batching.batchSize()); - if (batching.isCoalesced()) { - writeBehindConfigurationBuilder = ((BatchedWriteBehindConfigurationBuilder) writeBehindConfigurationBuilder).enableCoalescing(); - } - } - builder = builder.add(writeBehindConfigurationBuilder - .useThreadPool(writeBehind.threadPool()) - .concurrencyLevel(writeBehind.concurrency()) - .queueSize(writeBehind.maxQueueSize())); - } - } - builder = handleListenersConfig(cacheDefinition.listenersConfig(), cacheClassLoader, builder); - final CacheConfiguration config = builder.build(); - cacheConfigurations.put(alias, config); - } - - templates.putAll(configurationParser.getTemplates()); - } - - @SuppressWarnings("unchecked") - private Expiry getExpiry(ClassLoader cacheClassLoader, ConfigurationParser.Expiry parsedExpiry) - throws ClassNotFoundException, InstantiationException, IllegalAccessException { - final Expiry expiry; - if (parsedExpiry.isUserDef()) { - expiry = getInstanceOfName(parsedExpiry.type(), cacheClassLoader, Expiry.class); - } else if (parsedExpiry.isTTL()) { - expiry = Expirations.timeToLiveExpiration(new Duration(parsedExpiry.value(), parsedExpiry.unit())); - } else if (parsedExpiry.isTTI()) { - expiry = Expirations.timeToIdleExpiration(new Duration(parsedExpiry.value(), parsedExpiry.unit())); - } else { - expiry = Expirations.noExpiration(); - } - return expiry; - } - - private static T getInstanceOfName(String name, ClassLoader classLoader, Class type) throws ClassNotFoundException, InstantiationException, IllegalAccessException { - if (name == null) { - return null; - } - Class klazz = getClassForName(name, classLoader); - return klazz.asSubclass(type).newInstance(); - } - - private static Class getClassForName(String name, ClassLoader classLoader) throws ClassNotFoundException { - return Class.forName(name, true, classLoader); - } - - /** - * Exposes the URL where the XML file parsed or yet to be parsed was or will be sourced from. - * @return The URL provided at object instantiation - */ - public URL getURL() { - return xml; - } - - /** - * Creates a new {@link CacheConfigurationBuilder} seeded with the cache-template configuration - * by the given {@code name} in the XML configuration parsed using {@link #parseConfiguration()}. - *

                                      - * Note that this version does not specify resources, which are mandatory to create a - * {@link CacheConfigurationBuilder}. So if the template does not define resources, this will throw. - * - * @param name the unique name identifying the cache-template element in the XML - * @param keyType the type of keys for the {@link CacheConfigurationBuilder} to use, must - * match the {@code key-type} declared in the template if declared in XML - * @param valueType the type of values for the {@link CacheConfigurationBuilder} to use, must - * match the {@code value-type} declared in the template if declared in XML - * @param type of keys - * @param type of values - * - * @return the preconfigured {@link CacheConfigurationBuilder} - * or {@code null} if no cache-template for the provided {@code name} - * - * @throws IllegalStateException if {@link #parseConfiguration()} hasn't yet been successfully invoked or the template - * does not configure resources. - * @throws IllegalArgumentException if {@code keyType} or {@code valueType} don't match the declared type(s) of the template - * @throws ClassNotFoundException if a {@link java.lang.Class} declared in the XML couldn't be found - * @throws InstantiationException if a user provided {@link java.lang.Class} couldn't get instantiated - * @throws IllegalAccessException if a method (including constructor) couldn't be invoked on a user provided type - */ - @SuppressWarnings("unchecked") - public CacheConfigurationBuilder newCacheConfigurationBuilderFromTemplate(final String name, - final Class keyType, - final Class valueType) - throws InstantiationException, IllegalAccessException, ClassNotFoundException { - return internalCacheConfigurationBuilderFromTemplate(name, keyType, valueType, null); - } - - /** - * Creates a new {@link CacheConfigurationBuilder} seeded with the cache-template configuration - * by the given {@code name} in the XML configuration parsed using {@link #parseConfiguration()}. - * - * @param name the unique name identifying the cache-template element in the XML - * @param keyType the type of keys for the {@link CacheConfigurationBuilder} to use, must - * match the {@code key-type} declared in the template if declared in XML - * @param valueType the type of values for the {@link CacheConfigurationBuilder} to use, must - * match the {@code value-type} declared in the template if declared in XML - * @param resourcePools Resources definitions that will be used - * @param type of keys - * @param type of values - * - * @return the preconfigured {@link CacheConfigurationBuilder} - * or {@code null} if no cache-template for the provided {@code name} - * - * @throws IllegalStateException if {@link #parseConfiguration()} hasn't yet been successfully invoked - * @throws IllegalArgumentException if {@code keyType} or {@code valueType} don't match the declared type(s) of the template - * @throws ClassNotFoundException if a {@link java.lang.Class} declared in the XML couldn't be found - * @throws InstantiationException if a user provided {@link java.lang.Class} couldn't get instantiated - * @throws IllegalAccessException if a method (including constructor) couldn't be invoked on a user provided type - */ - @SuppressWarnings("unchecked") - public CacheConfigurationBuilder newCacheConfigurationBuilderFromTemplate(final String name, - final Class keyType, - final Class valueType, - final ResourcePools resourcePools) - throws InstantiationException, IllegalAccessException, ClassNotFoundException { - if (resourcePools == null || resourcePools.getResourceTypeSet().isEmpty()) { - throw new IllegalArgumentException("ResourcePools parameter must define at least one resource"); - } - return internalCacheConfigurationBuilderFromTemplate(name, keyType, valueType, resourcePools); - } - - /** - * Creates a new {@link CacheConfigurationBuilder} seeded with the cache-template configuration - * by the given {@code name} in the XML configuration parsed using {@link #parseConfiguration()}. - * - * @param name the unique name identifying the cache-template element in the XML - * @param keyType the type of keys for the {@link CacheConfigurationBuilder} to use, must - * match the {@code key-type} declared in the template if declared in XML - * @param valueType the type of values for the {@link CacheConfigurationBuilder} to use, must - * match the {@code value-type} declared in the template if declared in XML - * @param resourcePoolsBuilder Resources definitions that will be used - * @param type of keys - * @param type of values - * - * @return the preconfigured {@link CacheConfigurationBuilder} - * or {@code null} if no cache-template for the provided {@code name} - * - * @throws IllegalStateException if {@link #parseConfiguration()} hasn't yet been successfully invoked - * @throws IllegalArgumentException if {@code keyType} or {@code valueType} don't match the declared type(s) of the template - * @throws ClassNotFoundException if a {@link java.lang.Class} declared in the XML couldn't be found - * @throws InstantiationException if a user provided {@link java.lang.Class} couldn't get instantiated - * @throws IllegalAccessException if a method (including constructor) couldn't be invoked on a user provided type - */ - @SuppressWarnings("unchecked") - public CacheConfigurationBuilder newCacheConfigurationBuilderFromTemplate(final String name, - final Class keyType, - final Class valueType, - final Builder resourcePoolsBuilder) - throws InstantiationException, IllegalAccessException, ClassNotFoundException { - return internalCacheConfigurationBuilderFromTemplate(name, keyType, valueType, resourcePoolsBuilder.build()); - } - - @SuppressWarnings("unchecked") - private CacheConfigurationBuilder internalCacheConfigurationBuilderFromTemplate(final String name, - final Class keyType, - final Class valueType, - final ResourcePools resourcePools) - throws InstantiationException, IllegalAccessException, ClassNotFoundException { - - final ConfigurationParser.CacheTemplate cacheTemplate = templates.get(name); - if (cacheTemplate == null) { - return null; - } - final ClassLoader defaultClassLoader = ClassLoading.getDefaultClassLoader(); - Class keyClass = getClassForName(cacheTemplate.keyType(), defaultClassLoader); - Class valueClass = getClassForName(cacheTemplate.valueType(), defaultClassLoader); - if(keyType != null && cacheTemplate.keyType() != null && !keyClass.isAssignableFrom(keyType)) { - throw new IllegalArgumentException("CacheTemplate '" + name + "' declares key type of " + cacheTemplate.keyType()); - } - if(valueType != null && cacheTemplate.valueType() != null && !valueClass.isAssignableFrom(valueType)) { - throw new IllegalArgumentException("CacheTemplate '" + name + "' declares value type of " + cacheTemplate.valueType()); - } - - if ((resourcePools == null || resourcePools.getResourceTypeSet().isEmpty()) && cacheTemplate.resourcePools().isEmpty()) { - throw new IllegalStateException("Template defines no resources, and none were provided"); - } - CacheConfigurationBuilder builder; - if (resourcePools != null) { - builder = newCacheConfigurationBuilder(keyType, valueType, resourcePools); - } else { - ResourcePoolsBuilder resourcePoolsBuilder = newResourcePoolsBuilder(); - for (ResourcePool resourcePool : cacheTemplate.resourcePools()) { - resourcePoolsBuilder = resourcePoolsBuilder.with(resourcePool); - } - builder = newCacheConfigurationBuilder(keyType, valueType, resourcePoolsBuilder); - } - builder = builder - .withEvictionAdvisor(getInstanceOfName(cacheTemplate.evictionAdvisor(), defaultClassLoader, EvictionAdvisor.class)); - final ConfigurationParser.Expiry parsedExpiry = cacheTemplate.expiry(); - if (parsedExpiry != null) { - builder = builder.withExpiry(getExpiry(defaultClassLoader, parsedExpiry)); - } - - if (cacheTemplate.keySerializer() != null) { - final Class> keySerializer = (Class>) getClassForName(cacheTemplate.keySerializer(), defaultClassLoader); - builder = builder.add(new DefaultSerializerConfiguration(keySerializer, DefaultSerializerConfiguration.Type.KEY)); - } - if (cacheTemplate.keyCopier() != null) { - final Class> keyCopier = (Class>) getClassForName(cacheTemplate.keyCopier(), defaultClassLoader); - builder = builder.add(new DefaultCopierConfiguration(keyCopier, DefaultCopierConfiguration.Type.KEY)); - } - if (cacheTemplate.valueSerializer() != null) { - final Class> valueSerializer = (Class>) getClassForName(cacheTemplate.valueSerializer(), defaultClassLoader); - builder = builder.add(new DefaultSerializerConfiguration(valueSerializer, DefaultSerializerConfiguration.Type.VALUE)); - } - if (cacheTemplate.valueCopier() != null) { - final Class> valueCopier = (Class>) getClassForName(cacheTemplate.valueCopier(), defaultClassLoader); - builder = builder.add(new DefaultCopierConfiguration(valueCopier, DefaultCopierConfiguration.Type.VALUE)); - } - if (cacheTemplate.heapStoreSettings() != null) { - builder = builder.add(new DefaultSizeOfEngineConfiguration(cacheTemplate.heapStoreSettings().getMaxObjectSize(), cacheTemplate.heapStoreSettings().getUnit(), - cacheTemplate.heapStoreSettings().getMaxObjectGraphSize())); - } - final String loaderWriter = cacheTemplate.loaderWriter(); - if(loaderWriter!= null) { - final Class> cacheLoaderWriterClass = (Class>)getClassForName(loaderWriter, defaultClassLoader); - builder = builder.add(new DefaultCacheLoaderWriterConfiguration(cacheLoaderWriterClass)); - if(cacheTemplate.writeBehind() != null) { - WriteBehind writeBehind = cacheTemplate.writeBehind(); - WriteBehindConfigurationBuilder writeBehindConfigurationBuilder; - if (writeBehind.batching() == null) { - writeBehindConfigurationBuilder = WriteBehindConfigurationBuilder.newUnBatchedWriteBehindConfiguration(); - } else { - Batching batching = writeBehind.batching(); - writeBehindConfigurationBuilder = WriteBehindConfigurationBuilder.newBatchedWriteBehindConfiguration(batching.maxDelay(), batching.maxDelayUnit(), batching.batchSize()); - if (batching.isCoalesced()) { - writeBehindConfigurationBuilder = ((BatchedWriteBehindConfigurationBuilder) writeBehindConfigurationBuilder).enableCoalescing(); - } - } - builder = builder.add(writeBehindConfigurationBuilder - .concurrencyLevel(writeBehind.concurrency()) - .queueSize(writeBehind.maxQueueSize())); - } - } - builder = handleListenersConfig(cacheTemplate.listenersConfig(), defaultClassLoader, builder); - for (ServiceConfiguration serviceConfiguration : cacheTemplate.serviceConfigs()) { - builder = builder.add(serviceConfiguration); - } - return builder; - } - - private CacheConfigurationBuilder handleListenersConfig(ConfigurationParser.ListenersConfig listenersConfig, ClassLoader defaultClassLoader, CacheConfigurationBuilder builder) throws ClassNotFoundException { - if(listenersConfig != null) { - if (listenersConfig.threadPool() != null) { - builder = builder.add(new DefaultCacheEventDispatcherConfiguration(listenersConfig.threadPool())); - } - if (listenersConfig.listeners() != null) { - for (ConfigurationParser.Listener listener : listenersConfig.listeners()) { - @SuppressWarnings("unchecked") - final Class> cacheEventListenerClass = (Class>)getClassForName(listener.className(), defaultClassLoader); - final List eventListToFireOn = listener.fireOn(); - Set eventSetToFireOn = new HashSet<>(); - for (EventType events : eventListToFireOn) { - switch (events) { - case CREATED: - eventSetToFireOn.add(org.ehcache.event.EventType.CREATED); - break; - case EVICTED: - eventSetToFireOn.add(org.ehcache.event.EventType.EVICTED); - break; - case EXPIRED: - eventSetToFireOn.add(org.ehcache.event.EventType.EXPIRED); - break; - case UPDATED: - eventSetToFireOn.add(org.ehcache.event.EventType.UPDATED); - break; - case REMOVED: - eventSetToFireOn.add(org.ehcache.event.EventType.REMOVED); - break; - default: - throw new IllegalArgumentException("Invalid Event Type provided"); - } - } - CacheEventListenerConfigurationBuilder listenerBuilder = CacheEventListenerConfigurationBuilder - .newEventListenerConfiguration(cacheEventListenerClass, eventSetToFireOn) - .firingMode(EventFiring.valueOf(listener.eventFiring().value())) - .eventOrdering(EventOrdering.valueOf(listener.eventOrdering().value())); - builder = builder.add(listenerBuilder); - } - } - } - return builder; - } - - @Override - public Map> getCacheConfigurations() { - return cacheConfigurations; - } - - @Override - public Collection> getServiceCreationConfigurations() { - return serviceConfigurations; - } - - @Override - public ClassLoader getClassLoader() { - return classLoader; - } -} diff --git a/xml/src/main/java/org/ehcache/xml/XmlModel.java b/xml/src/main/java/org/ehcache/xml/XmlModel.java deleted file mode 100644 index d6e648f252..0000000000 --- a/xml/src/main/java/org/ehcache/xml/XmlModel.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.xml; - -import java.time.temporal.ChronoUnit; -import java.time.temporal.TemporalUnit; -import java.util.concurrent.TimeUnit; - -public class XmlModel { - public static TemporalUnit convertToJavaTimeUnit(org.ehcache.xml.model.TimeUnit unit) { - switch (unit) { - case NANOS: - return ChronoUnit.NANOS; - case MICROS: - return ChronoUnit.MICROS; - case MILLIS: - return ChronoUnit.MILLIS; - case SECONDS: - return ChronoUnit.SECONDS; - case MINUTES: - return ChronoUnit.MINUTES; - case HOURS: - return ChronoUnit.HOURS; - case DAYS: - return ChronoUnit.DAYS; - default: - throw new IllegalArgumentException("Unknown time unit: " + unit); - } - } - - public static TimeUnit convertToJUCTimeUnit(org.ehcache.xml.model.TimeUnit unit) { - switch (unit) { - case NANOS: - return TimeUnit.NANOSECONDS; - case MICROS: - return TimeUnit.MICROSECONDS; - case MILLIS: - return TimeUnit.MILLISECONDS; - case SECONDS: - return TimeUnit.SECONDS; - case MINUTES: - return TimeUnit.MINUTES; - case HOURS: - return TimeUnit.HOURS; - case DAYS: - return TimeUnit.DAYS; - default: - throw new IllegalArgumentException("Unknown time unit: " + unit); - } - } -} diff --git a/xml/src/test/java/com/pany/ehcache/MyExpiry.java b/xml/src/test/java/com/pany/ehcache/MyExpiry.java deleted file mode 100644 index c01a94dc65..0000000000 --- a/xml/src/test/java/com/pany/ehcache/MyExpiry.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.pany.ehcache; - -import org.ehcache.ValueSupplier; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expiry; - -import java.util.concurrent.TimeUnit; - -/** - * @author Alex Snaps - */ -public class MyExpiry implements Expiry { - @Override - public Duration getExpiryForCreation(final Object key, final Object value) { - return new Duration(42, TimeUnit.SECONDS); - } - - @Override - public Duration getExpiryForAccess(final Object key, final ValueSupplier value) { - return new Duration(42, TimeUnit.SECONDS); - } - - @Override - public Duration getExpiryForUpdate(Object key, ValueSupplier oldValue, Object newValue) { - return new Duration(42, TimeUnit.SECONDS); - } -} diff --git a/xml/src/test/java/com/pany/ehcache/integration/TestCacheLoaderWriter.java b/xml/src/test/java/com/pany/ehcache/integration/TestCacheLoaderWriter.java deleted file mode 100644 index 060c4a8fd4..0000000000 --- a/xml/src/test/java/com/pany/ehcache/integration/TestCacheLoaderWriter.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.pany.ehcache.integration; - -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; -import java.util.concurrent.CountDownLatch; - -import org.ehcache.spi.loaderwriter.CacheLoaderWriter; - -/** - * @author Alex Snaps - */ -public class TestCacheLoaderWriter implements CacheLoaderWriter { - - public static Number lastWrittenKey; - - public static CountDownLatch latch; - - @Override - public String load(final Number key) throws Exception { - return key.toString(); - } - - @Override - public Map loadAll(final Iterable keys) throws Exception { - final Map loaded = new HashMap<>(); - for (Number key : keys) { - loaded.put(key, load(key)); - } - return loaded; - } - - @Override - public void write(final Number key, final String value) throws Exception { - lastWrittenKey = key; - if(latch != null) { - latch.countDown(); - } - } - - @Override - public void writeAll(final Iterable> entries) throws Exception { - for (Entry entry : entries) { - lastWrittenKey = entry.getKey(); - if(latch != null) { - latch.countDown(); - } - } - } - - @Override - public void delete(final Number key) throws Exception { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public void deleteAll(final Iterable keys) throws Exception { - throw new UnsupportedOperationException("Implement me!"); - } -} diff --git a/xml/src/test/java/org/ehcache/docs/GettingStarted.java b/xml/src/test/java/org/ehcache/docs/GettingStarted.java deleted file mode 100644 index f2e11943a8..0000000000 --- a/xml/src/test/java/org/ehcache/docs/GettingStarted.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.docs; - -import org.ehcache.CacheManager; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.xml.XmlConfiguration; -import org.junit.Test; - -import java.net.URL; - -/** - * GettingStarted - */ -public class GettingStarted { - - @Test - public void xmlConfigSample() throws Exception { - // tag::xmlConfig[] - final URL myUrl = getClass().getResource("/configs/docs/getting-started.xml"); // <1> - XmlConfiguration xmlConfig = new XmlConfiguration(myUrl); // <2> - CacheManager myCacheManager = CacheManagerBuilder.newCacheManager(xmlConfig); // <3> - myCacheManager.init(); // <4> - // end::xmlConfig[] - } - - @Test - public void xmlTemplateSample() throws Exception { - // tag::xmlTemplate[] - XmlConfiguration xmlConfiguration = new XmlConfiguration(getClass().getResource("/configs/docs/template-sample.xml")); - CacheConfigurationBuilder configurationBuilder = xmlConfiguration.newCacheConfigurationBuilderFromTemplate("example", Long.class, String.class); // <1> - configurationBuilder = configurationBuilder.withResourcePools(ResourcePoolsBuilder.heap(1000)); // <2> - // end::xmlTemplate[] - } - - @Test - public void xmlExpirySample() throws Exception { - XmlConfiguration xmlConfiguration = new XmlConfiguration(getClass().getResource("/configs/docs/expiry.xml")); - CacheManagerBuilder.newCacheManager(xmlConfiguration).init(); - } -} diff --git a/xml/src/test/java/org/ehcache/xml/BarParser.java b/xml/src/test/java/org/ehcache/xml/BarParser.java deleted file mode 100644 index 063d5eb43a..0000000000 --- a/xml/src/test/java/org/ehcache/xml/BarParser.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.xml; - -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.w3c.dom.Element; - -import java.io.IOException; -import java.net.URI; -import java.net.URL; - -import javax.xml.transform.Source; -import javax.xml.transform.stream.StreamSource; - -/** - * BarParser - */ -public class BarParser implements CacheManagerServiceConfigurationParser { - - private static final URI NAMESPACE = URI.create("http://www.example.com/bar"); - private static final URL XML_SCHEMA = FooParser.class.getResource("/configs/bar.xsd"); - - @Override - public Source getXmlSchema() throws IOException { - return new StreamSource(XML_SCHEMA.openStream()); - } - - @Override - public URI getNamespace() { - return NAMESPACE; - } - - @Override - public ServiceCreationConfiguration parseServiceCreationConfiguration(Element fragment) { - return new BarConfiguration(); - } -} diff --git a/xml/src/test/java/org/ehcache/xml/FooParser.java b/xml/src/test/java/org/ehcache/xml/FooParser.java deleted file mode 100644 index 5593f08d8b..0000000000 --- a/xml/src/test/java/org/ehcache/xml/FooParser.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.xml; - -import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; -import org.w3c.dom.Element; - -import java.io.IOException; -import java.net.URI; -import java.net.URL; - -import javax.xml.transform.Source; -import javax.xml.transform.stream.StreamSource; - -/** - * - * @author cdennis - */ -public class FooParser implements CacheServiceConfigurationParser { - - private static final URI NAMESPACE = URI.create("http://www.example.com/foo"); - private static final URL XML_SCHEMA = FooParser.class.getResource("/configs/foo.xsd"); - - @Override - public Source getXmlSchema() throws IOException { - return new StreamSource(XML_SCHEMA.openStream()); - } - - @Override - public ServiceConfiguration parseServiceConfiguration(Element fragment) { - return new FooConfiguration(); - } - - @Override - public URI getNamespace() { - return NAMESPACE; - } - -} diff --git a/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java b/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java deleted file mode 100644 index aa2c9adb61..0000000000 --- a/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java +++ /dev/null @@ -1,744 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.xml; - -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.Configuration; -import org.ehcache.config.ResourceType; -import org.ehcache.config.ResourceUnit; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.internal.util.ClassLoading; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.config.copy.DefaultCopierConfiguration; -import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; -import org.ehcache.impl.config.event.DefaultCacheEventListenerConfiguration; -import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration; -import org.ehcache.impl.config.executor.PooledExecutionServiceConfiguration.PoolConfiguration; -import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; -import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; -import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; -import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; -import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; -import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; -import org.ehcache.impl.copy.SerializingCopier; -import org.ehcache.spi.copy.Copier; -import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; -import org.ehcache.spi.loaderwriter.WriteBehindConfiguration.BatchingConfiguration; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.xml.exceptions.XmlConfigurationException; -import org.hamcrest.CoreMatchers; -import org.hamcrest.Matchers; -import org.hamcrest.core.IsCollectionContaining; -import org.hamcrest.core.IsNull; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.w3c.dom.NodeList; -import org.xml.sax.SAXParseException; - -import com.pany.ehcache.copier.AnotherPersonCopier; -import com.pany.ehcache.copier.Description; -import com.pany.ehcache.copier.DescriptionCopier; -import com.pany.ehcache.copier.Person; -import com.pany.ehcache.copier.PersonCopier; -import com.pany.ehcache.serializer.TestSerializer; -import com.pany.ehcache.serializer.TestSerializer2; -import com.pany.ehcache.serializer.TestSerializer3; -import com.pany.ehcache.serializer.TestSerializer4; - -import java.io.File; -import java.net.URL; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; - -import static java.util.Collections.nCopies; -import static java.util.concurrent.TimeUnit.SECONDS; -import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.isIn; -import static org.hamcrest.collection.IsCollectionWithSize.hasSize; -import static org.hamcrest.collection.IsIterableContainingInOrder.contains; -import static org.hamcrest.core.Is.is; -import static org.hamcrest.core.IsCollectionContaining.hasItem; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.hamcrest.core.IsSame.sameInstance; -import static org.hamcrest.core.StringContains.containsString; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; - -/** - * - * @author Chris Dennis - */ -public class XmlConfigurationTest { - - @Rule - public ExpectedException thrown = ExpectedException.none(); - - @Test - public void testDefaultTypesConfig() throws Exception { - XmlConfiguration xmlConfig = new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/defaultTypes-cache.xml")); - - assertThat(xmlConfig.getCacheConfigurations().keySet(), hasItem("foo")); - assertThat(xmlConfig.getCacheConfigurations().get("foo").getKeyType(), sameInstance((Class)Object.class)); - assertThat(xmlConfig.getCacheConfigurations().get("foo").getValueType(), sameInstance((Class)Object.class)); - - assertThat(xmlConfig.getCacheConfigurations().keySet(), hasItem("bar")); - assertThat(xmlConfig.getCacheConfigurations().get("bar").getKeyType(), sameInstance((Class)Number.class)); - assertThat(xmlConfig.getCacheConfigurations().get("bar").getValueType(), sameInstance((Class)Object.class)); - - assertThat(xmlConfig.newCacheConfigurationBuilderFromTemplate("example", Object.class, Object.class, heap(10)), notNullValue()); - - //Allow the key/value to be assignable for xml configuration in case of type definition in template class - assertThat(xmlConfig.newCacheConfigurationBuilderFromTemplate("example", Number.class, Object.class, heap(10)), notNullValue()); - } - - @Test - public void testNonExistentAdvisorClassInCacheThrowsException() throws Exception { - try { - new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/nonExistentAdvisor-cache.xml")); - fail(); - } catch (XmlConfigurationException xce) { - assertThat(xce.getCause(), instanceOf(ClassNotFoundException.class)); - } - } - - @Test - public void testNonExistentAdvisorClassInTemplateThrowsException() throws Exception { - try { - new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/nonExistentAdvisor-template.xml")); - fail(); - } catch (XmlConfigurationException xce) { - assertThat(xce.getCause(), instanceOf(ClassNotFoundException.class)); - } - } - - @Test - public void testOneServiceConfig() throws Exception { - Configuration config = new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/one-service.xml")); - - assertThat(config.getServiceCreationConfigurations(), IsCollectionContaining.>hasItem(instanceOf(BarConfiguration.class))); - assertThat(config.getCacheConfigurations().keySet(), hasSize(0)); - } - - @Test - public void testOneCacheConfig() throws Exception { - Configuration config = new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/one-cache.xml")); - - assertThat(config.getServiceCreationConfigurations(), hasSize(0)); - assertThat(config.getCacheConfigurations().keySet(), hasItem("bar")); - assertThat(config.getCacheConfigurations().get("bar").getServiceConfigurations(), IsCollectionContaining.>hasItem(instanceOf(FooConfiguration.class))); - } - - @Test - public void testOneCacheConfigWithTemplate() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/template-cache.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - - assertThat(xmlConfig.getServiceCreationConfigurations(), hasSize(0)); - assertThat(xmlConfig.getCacheConfigurations().keySet(), hasItem("bar")); - assertThat(xmlConfig.getCacheConfigurations().get("bar").getServiceConfigurations(), IsCollectionContaining.>hasItem(instanceOf(FooConfiguration.class))); - assertThat(xmlConfig.getCacheConfigurations().get("bar").getKeyType(), sameInstance((Class) Number.class)); - assertThat(xmlConfig.getCacheConfigurations().get("bar").getValueType(), sameInstance((Class)String.class)); - - final CacheConfigurationBuilder example = xmlConfig.newCacheConfigurationBuilderFromTemplate("example", String.class, String.class, - newResourcePoolsBuilder().heap(5, EntryUnit.ENTRIES)); - assertThat(example.build().getExpiry(), - equalTo((Expiry) Expirations.timeToLiveExpiration(new Duration(30, TimeUnit.SECONDS)))); - - try { - xmlConfig.newCacheConfigurationBuilderFromTemplate("example", String.class, Number.class); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), is("CacheTemplate 'example' declares value type of java.lang.String")); - } - try { - xmlConfig.newCacheConfigurationBuilderFromTemplate("example", Number.class, String.class); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), is("CacheTemplate 'example' declares key type of java.lang.String")); - } - - assertThat(xmlConfig.newCacheConfigurationBuilderFromTemplate("bar", Object.class, Object.class), nullValue()); - } - - @Test - public void testExpiryIsParsed() throws Exception { - final XmlConfiguration xmlConfiguration = new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/expiry-caches.xml")); - - Expiry expiry = xmlConfiguration.getCacheConfigurations().get("none").getExpiry(); - Expiry value = Expirations.noExpiration(); - assertThat(expiry, is(value)); - - expiry = xmlConfiguration.getCacheConfigurations().get("notSet").getExpiry(); - value = Expirations.noExpiration(); - assertThat(expiry, is(value)); - - expiry = xmlConfiguration.getCacheConfigurations().get("class").getExpiry(); - assertThat(expiry, CoreMatchers.instanceOf(com.pany.ehcache.MyExpiry.class)); - - expiry = xmlConfiguration.getCacheConfigurations().get("tti").getExpiry(); - value = Expirations.timeToIdleExpiration(new Duration(500, TimeUnit.MILLISECONDS)); - assertThat(expiry, equalTo(value)); - - expiry = xmlConfiguration.getCacheConfigurations().get("ttl").getExpiry(); - value = Expirations.timeToLiveExpiration(new Duration(30, TimeUnit.SECONDS)); - assertThat(expiry, equalTo(value)); - } - - @Test - public void testInvalidCoreConfiguration() throws Exception { - try { - new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/invalid-core.xml")); - fail(); - } catch (XmlConfigurationException xce) { - SAXParseException e = (SAXParseException) xce.getCause(); - assertThat(e.getLineNumber(), is(5)); - assertThat(e.getColumnNumber(), is(29)); - } - } - - @Test - public void testInvalidServiceConfiguration() throws Exception { - try { - new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/invalid-service.xml")); - fail(); - } catch (XmlConfigurationException xce) { - SAXParseException e = (SAXParseException) xce.getCause(); - assertThat(e.getLineNumber(), is(6)); - assertThat(e.getColumnNumber(), is(15)); - } - } - - @Test - public void testTwoCachesWithSameAlias() { - try { - new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/invalid-two-caches.xml")); - fail("Two caches with the same alias should not be allowed"); - } catch (XmlConfigurationException e) { - assertThat(e.getMessage(), is("Two caches defined with the same alias: foo")); - } - } - - @Test - public void testExposesProperURL() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/one-cache.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - - assertThat(xmlConfig.getURL(), equalTo(resource)); - } - - @Test - public void testResourcesCaches() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/resources-caches.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - - CacheConfiguration tieredCacheConfig = xmlConfig.getCacheConfigurations().get("tiered"); - assertThat(tieredCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(10L)); - assertThat(tieredCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK).getSize(), equalTo(100L)); - assertThat(tieredCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(), is(false)); - - CacheConfiguration tieredPersistentCacheConfig = xmlConfig.getCacheConfigurations().get("tieredPersistent"); - assertThat(tieredPersistentCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(10L)); - assertThat(tieredPersistentCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK).getSize(), equalTo(100L)); - assertThat(tieredPersistentCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(), is(true)); - - CacheConfiguration tieredOffHeapCacheConfig = xmlConfig.getCacheConfigurations().get("tieredOffHeap"); - assertThat(tieredOffHeapCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(10L)); - assertThat(tieredOffHeapCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getSize(), equalTo(10L)); - assertThat(tieredOffHeapCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getUnit(), equalTo((ResourceUnit) MemoryUnit.MB)); - - CacheConfiguration explicitHeapOnlyCacheConfig = xmlConfig.getCacheConfigurations().get("explicitHeapOnly"); - assertThat(explicitHeapOnlyCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(15L)); - assertThat(explicitHeapOnlyCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK), is(nullValue())); - - CacheConfiguration implicitHeapOnlyCacheConfig = xmlConfig.getCacheConfigurations().get("directHeapOnly"); - assertThat(implicitHeapOnlyCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(25L)); - assertThat(implicitHeapOnlyCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK), is(nullValue())); - } - - @Test - public void testResourcesTemplates() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/resources-templates.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - - CacheConfigurationBuilder tieredResourceTemplate = xmlConfig.newCacheConfigurationBuilderFromTemplate("tieredResourceTemplate", String.class, String.class); - assertThat(tieredResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(5L)); - assertThat(tieredResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.DISK).getSize(), equalTo(50L)); - assertThat(tieredResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(), is(false)); - - CacheConfigurationBuilder persistentTieredResourceTemplate = xmlConfig.newCacheConfigurationBuilderFromTemplate("persistentTieredResourceTemplate", String.class, String.class); - assertThat(persistentTieredResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(5L)); - assertThat(persistentTieredResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.DISK).getSize(), equalTo(50L)); - assertThat(persistentTieredResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(), is(true)); - - CacheConfigurationBuilder tieredOffHeapResourceTemplate = xmlConfig.newCacheConfigurationBuilderFromTemplate("tieredOffHeapResourceTemplate", String.class, String.class); - assertThat(tieredOffHeapResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(5L)); - assertThat(tieredOffHeapResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getSize(), equalTo(50L)); - assertThat(tieredOffHeapResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.OFFHEAP).getUnit(), equalTo((ResourceUnit)MemoryUnit.MB)); - - CacheConfigurationBuilder explicitHeapResourceTemplate = xmlConfig.newCacheConfigurationBuilderFromTemplate("explicitHeapResourceTemplate", String.class, String.class); - assertThat(explicitHeapResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(15L)); - assertThat(explicitHeapResourceTemplate.build().getResourcePools().getPoolForResource(ResourceType.Core.DISK), is(nullValue())); - - CacheConfiguration tieredCacheConfig = xmlConfig.getCacheConfigurations().get("templatedTieredResource"); - assertThat(tieredCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(5L)); - assertThat(tieredCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK).getSize(), equalTo(50L)); - - CacheConfiguration explicitHeapOnlyCacheConfig = xmlConfig.getCacheConfigurations().get("templatedExplicitHeapResource"); - assertThat(explicitHeapOnlyCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), equalTo(15L)); - assertThat(explicitHeapOnlyCacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.DISK), is(nullValue())); - } - - @Test - public void testNoClassLoaderSpecified() throws Exception { - XmlConfiguration config = new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/one-cache.xml")); - - assertSame(config.getClassLoader(), ClassLoading.getDefaultClassLoader()); - assertNull(config.getCacheConfigurations().get("bar").getClassLoader()); - } - - @Test - public void testClassLoaderSpecified() throws Exception { - ClassLoader cl = new ClassLoader() { - // - }; - - XmlConfiguration config= new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/one-cache.xml"), cl); - - assertSame(cl, config.getClassLoader()); - assertNull(config.getCacheConfigurations().get("bar").getClassLoader()); - } - - @Test - public void testCacheClassLoaderSpecified() throws Exception { - ClassLoader cl = new ClassLoader() { - // - }; - - ClassLoader cl2 = new ClassLoader() { - // - }; - - assertNotSame(cl, cl2); - - Map loaders = new HashMap<>(); - loaders.put("bar", cl2); - XmlConfiguration config = new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/one-cache.xml"), cl, loaders); - - assertSame(cl, config.getClassLoader()); - assertSame(cl2, config.getCacheConfigurations().get("bar").getClassLoader()); - } - - @Test - public void testDefaultSerializerConfiguration() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/default-serializer.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - - assertThat(xmlConfig.getServiceCreationConfigurations().size(), is(1)); - - ServiceCreationConfiguration configuration = xmlConfig.getServiceCreationConfigurations().iterator().next(); - - assertThat(configuration, instanceOf(DefaultSerializationProviderConfiguration.class)); - - DefaultSerializationProviderConfiguration factoryConfiguration = (DefaultSerializationProviderConfiguration) configuration; - assertThat(factoryConfiguration.getDefaultSerializers().size(), is(4)); - assertThat(factoryConfiguration.getDefaultSerializers().get(CharSequence.class), Matchers.>equalTo(TestSerializer.class)); - assertThat(factoryConfiguration.getDefaultSerializers().get(Number.class), Matchers.>equalTo(TestSerializer2.class)); - assertThat(factoryConfiguration.getDefaultSerializers().get(Long.class), Matchers.>equalTo(TestSerializer3.class)); - assertThat(factoryConfiguration.getDefaultSerializers().get(Integer.class), Matchers.>equalTo(TestSerializer4.class)); - - - List> orderedServiceConfigurations = new ArrayList<>(xmlConfig.getCacheConfigurations() - .get("baz") - .getServiceConfigurations()); - // order services by class name so the test can rely on some sort of ordering - Collections.sort(orderedServiceConfigurations, (o1, o2) -> o1.getClass().getName().compareTo(o2.getClass().getName())); - Iterator> it = orderedServiceConfigurations.iterator(); - - DefaultSerializerConfiguration keySerializationProviderConfiguration = (DefaultSerializerConfiguration) it.next(); - assertThat(keySerializationProviderConfiguration.getType(), isIn(new DefaultSerializerConfiguration.Type[] { DefaultSerializerConfiguration.Type.KEY, DefaultSerializerConfiguration.Type.VALUE })); - } - - @Test - public void testThreadPoolsConfiguration() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/thread-pools.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - - assertThat(xmlConfig.getServiceCreationConfigurations(), contains(instanceOf(PooledExecutionServiceConfiguration.class))); - - PooledExecutionServiceConfiguration configuration = (PooledExecutionServiceConfiguration) xmlConfig.getServiceCreationConfigurations().iterator().next(); - - assertThat(configuration.getPoolConfigurations().keySet(), containsInAnyOrder("big", "small")); - - PoolConfiguration small = configuration.getPoolConfigurations().get("small"); - assertThat(small.minSize(), is(1)); - assertThat(small.maxSize(), is(1)); - - PoolConfiguration big = configuration.getPoolConfigurations().get("big"); - assertThat(big.minSize(), is(4)); - assertThat(big.maxSize(), is(32)); - - assertThat(configuration.getDefaultPoolAlias(), is("big")); - } - - @Test - public void testCacheCopierConfiguration() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/cache-copiers.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - - assertThat(xmlConfig.getServiceCreationConfigurations().size(), is(1)); - - ServiceCreationConfiguration configuration = xmlConfig.getServiceCreationConfigurations().iterator().next(); - - assertThat(configuration, instanceOf(DefaultCopyProviderConfiguration.class)); - - DefaultCopyProviderConfiguration factoryConfiguration = (DefaultCopyProviderConfiguration) configuration; - assertThat(factoryConfiguration.getDefaults().size(), is(2)); - assertThat(factoryConfiguration.getDefaults().get(Description.class).getClazz(), - Matchers.>equalTo(DescriptionCopier.class)); - assertThat(factoryConfiguration.getDefaults().get(Person.class).getClazz(), - Matchers.>equalTo(PersonCopier.class)); - - - Collection> configs = xmlConfig.getCacheConfigurations().get("baz").getServiceConfigurations(); - for(ServiceConfiguration config: configs) { - if(config instanceof DefaultCopierConfiguration) { - DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration) config; - if(copierConfig.getType() == DefaultCopierConfiguration.Type.KEY) { - assertEquals(SerializingCopier.class, copierConfig.getClazz()); - } else { - assertEquals(AnotherPersonCopier.class, copierConfig.getClazz()); - } - } else { - continue; - } - } - - configs = xmlConfig.getCacheConfigurations().get("bak").getServiceConfigurations(); - for(ServiceConfiguration config: configs) { - if(config instanceof DefaultCopierConfiguration) { - DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration) config; - if(copierConfig.getType() == DefaultCopierConfiguration.Type.KEY) { - assertEquals(SerializingCopier.class, copierConfig.getClazz()); - } else { - assertEquals(AnotherPersonCopier.class, copierConfig.getClazz()); - } - } else { - continue; - } - } - } - - @Test - public void testPersistenceConfig() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/persistence-config.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - - ServiceCreationConfiguration serviceConfig = xmlConfig.getServiceCreationConfigurations().iterator().next(); - assertThat(serviceConfig, instanceOf(DefaultPersistenceConfiguration.class)); - - DefaultPersistenceConfiguration persistenceConfiguration = (DefaultPersistenceConfiguration)serviceConfig; - assertThat(persistenceConfiguration.getRootDirectory(), is(new File(" \n\t/my/caching/persistence directory\r\n "))); - } - - @Test - public void testPersistenceConfigXmlPersistencePathHasWhitespaces() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/persistence-config.xml"); - DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); - DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); - Document doc = dBuilder.parse(new File(resource.toURI())); - - Element persistence = (Element) doc.getElementsByTagName("ehcache:persistence").item(0); - String directoryValue = persistence.getAttribute("directory"); - assertThat(directoryValue, containsString(" ")); - assertThat(directoryValue, containsString("\r")); - assertThat(directoryValue, containsString("\n")); - assertThat(directoryValue, containsString("\t")); - } - - @Test - public void testWriteBehind() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/writebehind-cache.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - - Collection> serviceConfiguration = xmlConfig.getCacheConfigurations().get("bar").getServiceConfigurations(); - - assertThat(serviceConfiguration, IsCollectionContaining.>hasItem(instanceOf(WriteBehindConfiguration.class))); - - serviceConfiguration = xmlConfig.newCacheConfigurationBuilderFromTemplate("example", Number.class, String.class).build().getServiceConfigurations(); - - assertThat(serviceConfiguration, IsCollectionContaining.>hasItem(instanceOf(WriteBehindConfiguration.class))); - - for (ServiceConfiguration configuration : serviceConfiguration) { - if(configuration instanceof WriteBehindConfiguration) { - BatchingConfiguration batchingConfig = ((WriteBehindConfiguration) configuration).getBatchingConfiguration(); - assertThat(batchingConfig.getMaxDelay(), is(10L)); - assertThat(batchingConfig.getMaxDelayUnit(), is(SECONDS)); - assertThat(batchingConfig.isCoalescing(), is(false)); - assertThat(batchingConfig.getBatchSize(), is(2)); - assertThat(((WriteBehindConfiguration) configuration).getConcurrency(), is(1)); - assertThat(((WriteBehindConfiguration) configuration).getMaxQueueSize(), is(10)); - break; - } - } - } - - @Test - public void testCacheEventListener() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/ehcache-cacheEventListener.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - assertThat(xmlConfig.getCacheConfigurations().size(), is(2)); - - Collection configuration = xmlConfig.getCacheConfigurations().get("bar").getServiceConfigurations(); - checkListenerConfigurationExists(configuration); - } - - @Test - public void testCacheEventListenerThroughTemplate() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/ehcache-cacheEventListener.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - CacheConfiguration cacheConfig = xmlConfig.getCacheConfigurations().get("template1"); - checkListenerConfigurationExists(cacheConfig.getServiceConfigurations()); - - CacheConfigurationBuilder templateConfig = xmlConfig.newCacheConfigurationBuilderFromTemplate("example", Number.class, String.class); - assertThat(templateConfig.getExistingServiceConfiguration(DefaultCacheEventListenerConfiguration.class), notNullValue()); - } - - @Test - public void testDefaulSerializerXmlsSerializersValueHasWhitespaces() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/default-serializer.xml"); - DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); - DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); - Document doc = dBuilder.parse(new File(resource.toURI())); - - NodeList nList = doc.getElementsByTagName("ehcache:serializer"); - - assertThat(nList.item(2).getFirstChild().getNodeValue(), containsString(" ")); - assertThat(nList.item(2).getFirstChild().getNodeValue(), containsString("\n")); - - assertThat(nList.item(3).getFirstChild().getNodeValue(), containsString(" ")); - assertThat(nList.item(3).getFirstChild().getNodeValue(), containsString("\n")); - - - nList = doc.getElementsByTagName("ehcache:key-type"); - - assertThat(nList.item(0).getFirstChild().getNodeValue(), containsString(" ")); - assertThat(nList.item(0).getFirstChild().getNodeValue(), containsString("\n")); - - assertThat(nList.item(1).getFirstChild().getNodeValue(), containsString(" ")); - assertThat(nList.item(1).getFirstChild().getNodeValue(), containsString("\n")); - - nList = doc.getElementsByTagName("ehcache:value-type"); - assertThat(nList.item(0).getFirstChild().getNodeValue(), containsString(" ")); - assertThat(nList.item(0).getFirstChild().getNodeValue(), containsString("\n")); - - assertThat(nList.item(1).getFirstChild().getNodeValue(), containsString(" ")); - assertThat(nList.item(1).getFirstChild().getNodeValue(), containsString("\n")); - - - } - - @Test - public void testDiskStoreSettings() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/resources-caches.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - - CacheConfiguration cacheConfig = xmlConfig.getCacheConfigurations().get("tiered"); - - OffHeapDiskStoreConfiguration diskConfig = findSingletonAmongst(OffHeapDiskStoreConfiguration.class, cacheConfig.getServiceConfigurations().toArray()); - - assertThat(diskConfig.getThreadPoolAlias(), is("some-pool")); - assertThat(diskConfig.getWriterConcurrency(), is(2)); - assertThat(diskConfig.getDiskSegments(), is(4)); - } - - @Test - public void testNullUrlInConstructorThrowsNPE() throws Exception { - thrown.expect(NullPointerException.class); - thrown.expectMessage("The url can not be null"); - XmlConfiguration xmlConfig = new XmlConfiguration(null, mock(ClassLoader.class), getClassLoaderMapMock()); - } - - @Test - public void testNullClassLoaderInConstructorThrowsNPE() throws Exception { - thrown.expect(NullPointerException.class); - thrown.expectMessage("The classLoader can not be null"); - XmlConfiguration xmlConfig = new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/one-cache.xml"), null, getClassLoaderMapMock()); - } - - @Test - public void testNullCacheClassLoaderMapInConstructorThrowsNPE() throws Exception { - thrown.expect(NullPointerException.class); - thrown.expectMessage("The cacheClassLoaders map can not be null"); - XmlConfiguration xmlConfig = new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/one-cache.xml"), mock(ClassLoader.class), null); - } - - @Test - public void testSizeOfEngineLimits() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/sizeof-engine.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - DefaultSizeOfEngineProviderConfiguration sizeOfEngineProviderConfig = findSingletonAmongst(DefaultSizeOfEngineProviderConfiguration.class, xmlConfig.getServiceCreationConfigurations()); - - assertThat(sizeOfEngineProviderConfig, notNullValue()); - assertEquals(sizeOfEngineProviderConfig.getMaxObjectGraphSize(), 200); - assertEquals(sizeOfEngineProviderConfig.getMaxObjectSize(), 100000); - - CacheConfiguration cacheConfig = xmlConfig.getCacheConfigurations().get("usesDefaultSizeOfEngine"); - DefaultSizeOfEngineConfiguration sizeOfEngineConfig = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfig.getServiceConfigurations()); - - assertThat(sizeOfEngineConfig, nullValue()); - - CacheConfiguration cacheConfig1 = xmlConfig.getCacheConfigurations().get("usesConfiguredInCache"); - DefaultSizeOfEngineConfiguration sizeOfEngineConfig1 = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfig1.getServiceConfigurations()); - - assertThat(sizeOfEngineConfig1, notNullValue()); - assertEquals(sizeOfEngineConfig1.getMaxObjectGraphSize(), 500); - assertEquals(sizeOfEngineConfig1.getMaxObjectSize(), 200000); - - CacheConfiguration cacheConfig2 = xmlConfig.getCacheConfigurations().get("usesPartialOneConfiguredInCache"); - DefaultSizeOfEngineConfiguration sizeOfEngineConfig2 = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfig2.getServiceConfigurations()); - - assertThat(sizeOfEngineConfig2, notNullValue()); - assertThat(sizeOfEngineConfig2.getMaxObjectGraphSize(), is(500L)); - assertThat(sizeOfEngineConfig2.getMaxObjectSize(), is(Long.MAX_VALUE)); - - CacheConfiguration cacheConfig3 = xmlConfig.getCacheConfigurations().get("usesPartialTwoConfiguredInCache"); - DefaultSizeOfEngineConfiguration sizeOfEngineConfig3 = findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, cacheConfig3.getServiceConfigurations()); - - assertThat(sizeOfEngineConfig3, notNullValue()); - assertThat(sizeOfEngineConfig3.getMaxObjectGraphSize(), is(1000L)); - assertThat(sizeOfEngineConfig3.getMaxObjectSize(), is(200000L)); - } - - @Test - public void testCacheManagerDefaultObjectGraphSize() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/sizeof-engine-cm-defaults-one.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - DefaultSizeOfEngineProviderConfiguration sizeOfEngineProviderConfig = findSingletonAmongst(DefaultSizeOfEngineProviderConfiguration.class, xmlConfig.getServiceCreationConfigurations()); - - assertThat(sizeOfEngineProviderConfig, notNullValue()); - assertThat(sizeOfEngineProviderConfig.getMaxObjectGraphSize(), is(1000L)); - assertThat(sizeOfEngineProviderConfig.getMaxObjectSize(), is(100000L)); - } - - @Test - public void testCacheManagerDefaultObjectSize() throws Exception { - final URL resource = XmlConfigurationTest.class.getResource("/configs/sizeof-engine-cm-defaults-two.xml"); - XmlConfiguration xmlConfig = new XmlConfiguration(resource); - DefaultSizeOfEngineProviderConfiguration sizeOfEngineProviderConfig = findSingletonAmongst(DefaultSizeOfEngineProviderConfiguration.class, xmlConfig.getServiceCreationConfigurations()); - - assertThat(sizeOfEngineProviderConfig, notNullValue()); - assertThat(sizeOfEngineProviderConfig.getMaxObjectGraphSize(), is(200L)); - assertThat(sizeOfEngineProviderConfig.getMaxObjectSize(), is(Long.MAX_VALUE)); - } - - @Test - public void testCustomResource() throws Exception { - try { - new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/custom-resource.xml")); - fail(); - } catch (XmlConfigurationException xce) { - assertThat(xce.getMessage(), containsString("Can't find parser for namespace: http://www.example.com/fancy")); - } - } - - @Test - public void testSysPropReplace() { - System.getProperties().setProperty("ehcache.match", Number.class.getName()); - XmlConfiguration xmlConfig = new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/systemprops.xml")); - - assertThat(xmlConfig.getCacheConfigurations().get("bar").getKeyType(), sameInstance((Class)Number.class)); - - DefaultPersistenceConfiguration persistenceConfiguration = (DefaultPersistenceConfiguration)xmlConfig.getServiceCreationConfigurations().iterator().next(); - assertThat(persistenceConfiguration.getRootDirectory(), is(new File(System.getProperty("user.home") + "/ehcache"))); - } - - @Test - public void testSysPropReplaceRegExp() { - assertThat(ConfigurationParser.replaceProperties("foo${file.separator}", System.getProperties()), equalTo("foo" + File.separator)); - assertThat(ConfigurationParser.replaceProperties("${file.separator}foo${file.separator}", System.getProperties()), equalTo(File.separator + "foo" + File.separator)); - try { - ConfigurationParser.replaceProperties("${bar}foo", System.getProperties()); - fail("Should have thrown!"); - } catch (IllegalStateException e) { - assertThat(e.getMessage().contains("${bar}"), is(true)); - } - assertThat(ConfigurationParser.replaceProperties("foo", System.getProperties()), nullValue()); - } - - @Test - public void testMultithreadedXmlParsing() throws InterruptedException, ExecutionException { - Callable parserTask = () -> new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/one-cache.xml")); - - ExecutorService service = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); - try { - for (Future c : service.invokeAll(nCopies(10, parserTask))) { - assertThat(c.get(), IsNull.notNullValue()); - } - } finally { - service.shutdown(); - } - } - - private void checkListenerConfigurationExists(Collection configuration) { - int count = 0; - for (Object o : configuration) { - if(o instanceof DefaultCacheEventListenerConfiguration) { - count++; - } - } - assertThat(count, is(1)); - } - - @SuppressWarnings("unchecked") - private Map getClassLoaderMapMock() { - return (Map) mock(Map.class); - } -} diff --git a/xml/src/test/resources/configs/default-serializer.xml b/xml/src/test/resources/configs/default-serializer.xml deleted file mode 100644 index 8521b3e442..0000000000 --- a/xml/src/test/resources/configs/default-serializer.xml +++ /dev/null @@ -1,57 +0,0 @@ - - - - - - com.pany.ehcache.serializer.TestSerializer - com.pany.ehcache.serializer.TestSerializer2 - - com.pany.ehcache.serializer.TestSerializer3 - - - com.pany.ehcache.serializer.TestSerializer4 - - - - - - - java.lang.Long - - java.lang.Double - - - 10 - 1 - - - - - - java.lang.String - - - java.lang.String - - - 10 - 1 - - - \ No newline at end of file diff --git a/xml/src/test/resources/configs/docs/thread-pools.xml b/xml/src/test/resources/configs/docs/thread-pools.xml deleted file mode 100644 index 5d0ad14009..0000000000 --- a/xml/src/test/resources/configs/docs/thread-pools.xml +++ /dev/null @@ -1,64 +0,0 @@ - - - - - - - - - - - - - - - - - java.lang.Long - java.lang.String - - - 10 - 10 - - - - - java.lang.Long - java.lang.String - - - org.ehcache.docs.plugs.ListenerObject - - - 10 - - - - - - 10 - 10 - - - - - - \ No newline at end of file diff --git a/xml/src/test/resources/configs/ehcache-complete.xml b/xml/src/test/resources/configs/ehcache-complete.xml deleted file mode 100644 index 2548bccb57..0000000000 --- a/xml/src/test/resources/configs/ehcache-complete.xml +++ /dev/null @@ -1,78 +0,0 @@ - - - - - - - - - some.class.Serializer - - - some.class.Copier - - - - - - - - - 1000 - 100 - - - - - java.lang.String - java.lang.String - - 1 - - some.class.Advisor - - some.class.LoaderWriter - - - 10 - - - - - - some.class.Listener - ASYNCHRONOUS - UNORDERED - EVICTED - EXPIRED - - - - 100 - 100 - - - 10 - 100 - - - - - diff --git a/xml/src/test/resources/configs/invalid-two-caches.xml b/xml/src/test/resources/configs/invalid-two-caches.xml deleted file mode 100644 index cbefbca7b9..0000000000 --- a/xml/src/test/resources/configs/invalid-two-caches.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - 2000 - - - - 2000 - - - diff --git a/xml/src/test/resources/configs/persistence-config.xml b/xml/src/test/resources/configs/persistence-config.xml deleted file mode 100644 index 7a4692ad43..0000000000 --- a/xml/src/test/resources/configs/persistence-config.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - \ No newline at end of file diff --git a/xml/src/test/resources/configs/systemprops.xml b/xml/src/test/resources/configs/systemprops.xml deleted file mode 100644 index bf18c771d0..0000000000 --- a/xml/src/test/resources/configs/systemprops.xml +++ /dev/null @@ -1,35 +0,0 @@ - - - - - - - - - - - 5 - - - - ${ehcache.match} - 5 - - diff --git a/xml/src/test/resources/configs/template-defaults.xml b/xml/src/test/resources/configs/template-defaults.xml deleted file mode 100644 index 6cd5e5a209..0000000000 --- a/xml/src/test/resources/configs/template-defaults.xml +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - - diff --git a/xml/src/test/resources/configs/thread-pools.xml b/xml/src/test/resources/configs/thread-pools.xml deleted file mode 100644 index 60bba2ac6e..0000000000 --- a/xml/src/test/resources/configs/thread-pools.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - - - \ No newline at end of file