getValueType();
-
- /**
- * The {@link EvictionAdvisor} predicate function.
- *
- * Entries which pass this predicate may be ignored by the eviction process.
- * This is only a hint.
- *
- * @return the eviction advisor predicate
- */
- EvictionAdvisor super K, ? super V> getEvictionAdvisor();
-
- /**
- * The {@link ClassLoader} for the {@link Cache}.
- *
- * This {@code ClassLoader} will be used to instantiate cache level services
- * and for deserializing cache entries when required.
- *
- * The {@code ClassLoader} must not be null.
- *
- * @return the cache {@code ClassLoader}
- */
- ClassLoader getClassLoader();
-
- /**
- * The {@link Expiry} rules for the {@link Cache}.
- *
- * The {@code Expiry} cannot be null.
- *
- * @return the {@code Expiry}
- */
- Expiry super K, ? super V> getExpiry();
-
- /**
- * The {@link ResourcePools} for the {@link Cache}.
- *
- * The {@code ResourcePools} cannot be null nor empty.
- *
- * @return the {@link ResourcePools}
- */
- ResourcePools getResourcePools();
-
-}
diff --git a/api/src/main/java/org/ehcache/expiry/Expiry.java b/api/src/main/java/org/ehcache/expiry/Expiry.java
deleted file mode 100644
index b9a38f5847..0000000000
--- a/api/src/main/java/org/ehcache/expiry/Expiry.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.ehcache.expiry;
-
-import org.ehcache.ValueSupplier;
-
-/**
- * A policy object that governs expiration for mappings in a {@link org.ehcache.Cache Cache}.
- *
- * Previous values are not accessible directly but are rather available through a {@link ValueSupplier value supplier}
- * to indicate that access can require computation (such as deserialization).
- *
- * NOTE: Some cache configurations (eg. caches with eventual consistency) may use local (ie. non-consistent) state
- * to decide whether to call {@link #getExpiryForUpdate(Object, ValueSupplier, Object)} vs.
- * {@link #getExpiryForCreation(Object, Object)}. For these cache configurations it is advised to return the same
- * value for both of these methods
- *
- * See {@link Expirations} for helper methods to create common {@code Expiry} instances.
- *
- * @param the key type for the cache
- * @param the value type for the cache
- *
- * @see Expirations
- */
-public interface Expiry {
-
- /**
- * Returns the lifetime of an entry when it is initially added to a {@link org.ehcache.Cache Cache}.
- *
- * This method must not return {@code null}.
- *
- * Exceptions thrown from this method will be swallowed and result in the expiry duration being
- * {@link Duration#ZERO ZERO}.
- *
- * @param key the key of the newly added entry
- * @param value the value of the newly added entry
- * @return a non-null {@link Duration}
- */
- Duration getExpiryForCreation(K key, V value);
-
- /**
- * Returns the expiration {@link Duration} (relative to the current time) when an existing entry is accessed from a
- * {@link org.ehcache.Cache Cache}.
- *
- * Returning {@code null} indicates that the expiration time remains unchanged.
- *
- * Exceptions thrown from this method will be swallowed and result in the expiry duration being
- * {@link Duration#ZERO ZERO}.
- *
- * @param key the key of the accessed entry
- * @param value a value supplier for the accessed entry
- * @return an expiration {@code Duration}, {@code null} means unchanged
- */
- Duration getExpiryForAccess(K key, ValueSupplier extends V> value);
-
-
- /**
- * Returns the expiration {@link Duration} (relative to the current time) when an existing entry is updated in a
- * {@link org.ehcache.Cache Cache}.
- *
- * Returning {@code null} indicates that the expiration time remains unchanged.
- *
- * Exceptions thrown from this method will be swallowed and result in the expiry duration being
- * {@link Duration#ZERO ZERO}.
- *
- * @param key the key of the updated entry
- * @param oldValue a value supplier for the previous value of the entry
- * @param newValue the new value of the entry
- * @return an expiration {@code Duration}, {@code null} means unchanged
- */
- Duration getExpiryForUpdate(K key, ValueSupplier extends V> oldValue, V newValue);
-
-}
diff --git a/api/src/main/java/org/ehcache/expiry/package-info.java b/api/src/main/java/org/ehcache/expiry/package-info.java
deleted file mode 100644
index 4ea3f314dd..0000000000
--- a/api/src/main/java/org/ehcache/expiry/package-info.java
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * {@link org.ehcache.expiry.Expiry Expiry} API of a {@link org.ehcache.Cache Cache}.
- */
-package org.ehcache.expiry;
\ No newline at end of file
diff --git a/api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriterProvider.java b/api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriterProvider.java
deleted file mode 100644
index 38e07d9418..0000000000
--- a/api/src/main/java/org/ehcache/spi/loaderwriter/CacheLoaderWriterProvider.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.ehcache.spi.loaderwriter;
-
-import org.ehcache.config.CacheConfiguration;
-import org.ehcache.spi.service.Service;
-
-/**
- * A {@link Service} that creates {@link CacheLoaderWriter} instances.
- *
- * A {@code CacheManager} will use the {@link #createCacheLoaderWriter(java.lang.String, org.ehcache.config.CacheConfiguration)}
- * method to create {@code CacheLoaderWriter} instances for each {@code Cache} it
- * manages.
- *
- * For any non {@code null} value returned, the {@code Cache} will be configured to use the
- * {@code CacheLoaderWriter} instance returned.
- */
-public interface CacheLoaderWriterProvider extends Service {
-
- /**
- * Creates a {@code CacheLoaderWriter} for use with the {@link org.ehcache.Cache Cache}
- * of the given alias and configuration.
- *
- * @param alias the {@code Cache} alias in the {@code CacheManager}
- * @param cacheConfiguration the configuration for the associated cache
- * @param the loader-writer key type
- * @param the loader-writer value type
- *
- * @return the {@code CacheLoaderWriter} to be used by the {@code Cache} or {@code null} if none
- */
- CacheLoaderWriter super K, V> createCacheLoaderWriter(String alias, CacheConfiguration cacheConfiguration);
-
- /**
- * Releases a {@code CacheLoaderWriter} when the associated {@link org.ehcache.Cache Cache}
- * is finished with it.
- *
- * If the {@code CacheLoaderWriter} instance was user provided {@link java.io.Closeable#close() close}
- * will not be invoked.
- *
- * @param cacheLoaderWriter the {@code CacheLoaderWriter} being released
- * @throws Exception when the release fails
- */
- void releaseCacheLoaderWriter(CacheLoaderWriter, ?> cacheLoaderWriter) throws Exception;
-
-}
diff --git a/api/src/main/java/org/ehcache/spi/service/ServiceConfiguration.java b/api/src/main/java/org/ehcache/spi/service/ServiceConfiguration.java
deleted file mode 100644
index b7766dc7cf..0000000000
--- a/api/src/main/java/org/ehcache/spi/service/ServiceConfiguration.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.ehcache.spi.service;
-
-/**
- * A configuration type to be used when interacting with a {@link Service}.
- *
- * @param the service type this configuration works with
- *
- */
-public interface ServiceConfiguration {
-
- /**
- * Indicates which service this configuration works with.
- *
- * @return the service type
- */
- Class getServiceType();
-}
diff --git a/api/src/main/java/org/ehcache/spi/service/ServiceCreationConfiguration.java b/api/src/main/java/org/ehcache/spi/service/ServiceCreationConfiguration.java
deleted file mode 100644
index 4db06f3209..0000000000
--- a/api/src/main/java/org/ehcache/spi/service/ServiceCreationConfiguration.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.ehcache.spi.service;
-
-/**
- * A configuration type used when creating a {@link Service}.
- *
- * @param the service type this configuration works with
- *
- */
-public interface ServiceCreationConfiguration {
-
- /**
- * Indicates which service consumes this configuration at creation.
- *
- * @return the service type
- */
- Class getServiceType();
-}
diff --git a/api/src/test/java/org/ehcache/expiry/ExpirationsTest.java b/api/src/test/java/org/ehcache/expiry/ExpirationsTest.java
deleted file mode 100644
index 05d8d1988c..0000000000
--- a/api/src/test/java/org/ehcache/expiry/ExpirationsTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.ehcache.expiry;
-
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.nullValue;
-
-import java.util.concurrent.TimeUnit;
-
-import org.ehcache.ValueSupplier;
-import org.junit.Test;
-
-public class ExpirationsTest {
-
- @Test
- public void testNoExpiration() {
- Expiry expiry = Expirations.noExpiration();
- assertThat(expiry.getExpiryForCreation(this, this), equalTo(Duration.INFINITE));
- assertThat(expiry.getExpiryForAccess(this, holderOf(this)), nullValue());
- assertThat(expiry.getExpiryForUpdate(this, holderOf(this), this), nullValue());
- }
-
- @Test
- public void testTTIExpiration() {
- Duration duration = new Duration(1L, TimeUnit.SECONDS);
- Expiry expiry = Expirations.timeToIdleExpiration(duration);
- assertThat(expiry.getExpiryForCreation(this, this), equalTo(duration));
- assertThat(expiry.getExpiryForAccess(this, holderOf(this)), equalTo(duration));
- assertThat(expiry.getExpiryForUpdate(this, holderOf(this), this), equalTo(duration));
- }
-
- @Test
- public void testTTLExpiration() {
- Duration duration = new Duration(1L, TimeUnit.SECONDS);
- Expiry expiry = Expirations.timeToLiveExpiration(duration);
- assertThat(expiry.getExpiryForCreation(this, holderOf(this)), equalTo(duration));
- assertThat(expiry.getExpiryForAccess(this, holderOf(this)), nullValue());
- assertThat(expiry.getExpiryForUpdate(this, holderOf(this), this), equalTo(duration));
- }
-
- @Test
- public void testExpiration() {
- Duration creation = new Duration(1L, TimeUnit.SECONDS);
- Duration access = new Duration(2L, TimeUnit.SECONDS);
- Duration update = new Duration(3L, TimeUnit.SECONDS);
- Expiry expiry = Expirations.builder().setCreate(creation).setAccess(access).setUpdate(update).build();
- assertThat(expiry.getExpiryForCreation(this, this), equalTo(creation));
- assertThat(expiry.getExpiryForAccess(this, holderOf(this)), equalTo(access));
- assertThat(expiry.getExpiryForUpdate(this, holderOf(this),this), equalTo(update));
- }
-
- private ValueSupplier holderOf(final Object obj) {
- return () -> obj;
- }
-}
diff --git a/azure-pipelines-static-analysis.yml b/azure-pipelines-static-analysis.yml
new file mode 100644
index 0000000000..4c3f99b9a8
--- /dev/null
+++ b/azure-pipelines-static-analysis.yml
@@ -0,0 +1,29 @@
+#
+# Copyright Terracotta, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# See shared code location for steps and parameters:
+# https://dev.azure.com/TerracottaCI/_git/terracotta
+
+resources:
+ repositories:
+ - repository: templates
+ type: git
+ name: terracotta/terracotta
+
+jobs:
+- template: build-templates/gradle-common.yml@templates
+ parameters:
+ gradleTasks: 'check'
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
new file mode 100644
index 0000000000..590980b08b
--- /dev/null
+++ b/azure-pipelines.yml
@@ -0,0 +1,49 @@
+#
+# Copyright Terracotta, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# See shared code location for steps and parameters:
+# https://dev.azure.com/TerracottaCI/_git/terracotta
+
+resources:
+ repositories:
+ - repository: templates
+ type: git
+ name: terracotta/terracotta
+
+jobs:
+- template: build-templates/gradle-common.yml@templates
+ parameters:
+ jdkVersion: '1.8'
+ jobName: 'LinuxJava8'
+
+- template: build-templates/gradle-common.yml@templates
+ parameters:
+ jdkVersion: '1.8'
+ options: '-PtestVM=java11Home'
+ jobName: 'LinuxJava11'
+
+- template: build-templates/gradle-common.yml@templates
+ parameters:
+ vmImage: 'windows-latest'
+ jdkVersion: '1.8'
+ jobName: 'WindowsJava8'
+
+- template: build-templates/gradle-common.yml@templates
+ parameters:
+ vmImage: 'windows-latest'
+ jdkVersion: '1.8'
+ options: '-PtestVM=java11Home'
+ jobName: 'WindowsJava11'
diff --git a/build-logic/build.gradle b/build-logic/build.gradle
new file mode 100644
index 0000000000..78eed08c1a
--- /dev/null
+++ b/build-logic/build.gradle
@@ -0,0 +1,67 @@
+plugins {
+ id 'java-gradle-plugin'
+}
+
+repositories {
+ gradlePluginPortal()
+ mavenCentral()
+}
+
+gradlePlugin {
+ plugins {
+ internalModule {
+ id = 'org.ehcache.build.internal-module'
+ implementationClass = 'org.ehcache.build.InternalEhcacheModule'
+ }
+ publicModule {
+ id = 'org.ehcache.build.public-module'
+ implementationClass = 'org.ehcache.build.PublicEhcacheModule'
+ }
+ clusteredModule {
+ id = 'org.ehcache.build.clustered-module'
+ implementationClass = 'org.ehcache.build.ClusteredEhcacheModule'
+ }
+ serverModule {
+ id = 'org.ehcache.build.clustered-server-module'
+ implementationClass = 'org.ehcache.build.ClusteredServerModule'
+ }
+ distribution {
+ id = 'org.ehcache.build.package'
+ implementationClass = 'org.ehcache.build.EhcachePackage'
+ }
+
+ variant {
+ id = 'org.ehcache.build.plugins.variant'
+ implementationClass = 'org.ehcache.build.plugins.VariantPlugin'
+ }
+
+ base {
+ id = 'org.ehcache.build.conventions.base'
+ implementationClass = 'org.ehcache.build.conventions.BaseConvention'
+ }
+ java {
+ id = 'org.ehcache.build.conventions.java'
+ implementationClass = 'org.ehcache.build.conventions.JavaConvention'
+ }
+ javaLibrary {
+ id = 'org.ehcache.build.conventions.java-library'
+ implementationClass = 'org.ehcache.build.conventions.JavaLibraryConvention'
+ }
+ war {
+ id = 'org.ehcache.build.conventions.war'
+ implementationClass = 'org.ehcache.build.conventions.WarConvention'
+ }
+ }
+}
+
+dependencies {
+ api gradleApi()
+ api 'biz.aQute.bnd:biz.aQute.bnd.gradle:6.0.0'
+ api 'gradle.plugin.com.github.jengelman.gradle.plugins:shadow:7.0.0'
+ api 'org.unbroken-dome.gradle-plugins:gradle-xjc-plugin:2.0.0'
+ api 'com.github.spotbugs.snom:spotbugs-gradle-plugin:4.7.9'
+ implementation 'biz.aQute.bnd:biz.aQute.bndlib:6.0.0'
+ implementation 'org.osgi:org.osgi.service.component.annotations:1.5.0'
+ implementation 'org.apache.felix:org.apache.felix.scr.generator:1.18.4'
+ implementation 'org.apache.felix:org.apache.felix.scr.ds-annotations:1.2.10'
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/ClusteredEhcacheModule.java b/build-logic/src/main/java/org/ehcache/build/ClusteredEhcacheModule.java
new file mode 100644
index 0000000000..5ca1afbab9
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/ClusteredEhcacheModule.java
@@ -0,0 +1,12 @@
+package org.ehcache.build;
+
+import org.gradle.api.Project;
+
+public class ClusteredEhcacheModule extends EhcacheModule {
+
+ @Override
+ public void apply(Project project) {
+ project.setGroup("org.ehcache.modules.clustered");
+ super.apply(project);
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/ClusteredServerModule.java b/build-logic/src/main/java/org/ehcache/build/ClusteredServerModule.java
new file mode 100644
index 0000000000..64be60977d
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/ClusteredServerModule.java
@@ -0,0 +1,17 @@
+package org.ehcache.build;
+
+import org.ehcache.build.conventions.DeployConvention;
+import org.ehcache.build.plugins.VoltronPlugin;
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+
+public class ClusteredServerModule implements Plugin {
+
+ @Override
+ public void apply(Project project) {
+ project.setGroup("org.ehcache.modules.clustered");
+
+ project.getPlugins().apply(DeployConvention.class);
+ project.getPlugins().apply(VoltronPlugin.class);
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/EhcacheModule.java b/build-logic/src/main/java/org/ehcache/build/EhcacheModule.java
new file mode 100644
index 0000000000..dd3aa59140
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/EhcacheModule.java
@@ -0,0 +1,17 @@
+package org.ehcache.build;
+
+import org.ehcache.build.conventions.BndConvention;
+import org.ehcache.build.conventions.JavaLibraryConvention;
+import org.ehcache.build.conventions.DeployConvention;
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+
+public abstract class EhcacheModule implements Plugin {
+
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(JavaLibraryConvention.class);
+ project.getPlugins().apply(DeployConvention.class);
+ project.getPlugins().apply(BndConvention.class);
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/EhcachePackage.java b/build-logic/src/main/java/org/ehcache/build/EhcachePackage.java
new file mode 100644
index 0000000000..112fd3d0d2
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/EhcachePackage.java
@@ -0,0 +1,16 @@
+package org.ehcache.build;
+
+import org.ehcache.build.conventions.DeployConvention;
+import org.ehcache.build.plugins.PackagePlugin;
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+
+public class EhcachePackage implements Plugin {
+
+ @Override
+ public void apply(Project project) {
+ project.setGroup("org.ehcache");
+ project.getPlugins().apply(PackagePlugin.class);
+ project.getPlugins().apply(DeployConvention.class);
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/InternalEhcacheModule.java b/build-logic/src/main/java/org/ehcache/build/InternalEhcacheModule.java
new file mode 100644
index 0000000000..1ba6b69b8a
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/InternalEhcacheModule.java
@@ -0,0 +1,13 @@
+package org.ehcache.build;
+
+import org.gradle.api.Project;
+
+public class InternalEhcacheModule extends EhcacheModule {
+
+ @Override
+ public void apply(Project project) {
+ project.setGroup("org.ehcache.modules");
+ super.apply(project);
+ }
+}
+
diff --git a/build-logic/src/main/java/org/ehcache/build/PublicEhcacheModule.java b/build-logic/src/main/java/org/ehcache/build/PublicEhcacheModule.java
new file mode 100644
index 0000000000..477c54b3e7
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/PublicEhcacheModule.java
@@ -0,0 +1,11 @@
+package org.ehcache.build;
+
+import org.gradle.api.Project;
+
+public class PublicEhcacheModule extends EhcacheModule {
+ @Override
+ public void apply(Project project) {
+ project.setGroup("org.ehcache");
+ super.apply(project);
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/BaseConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/BaseConvention.java
new file mode 100644
index 0000000000..edf95548a1
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/conventions/BaseConvention.java
@@ -0,0 +1,23 @@
+package org.ehcache.build.conventions;
+
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.artifacts.ResolutionStrategy;
+import org.gradle.api.plugins.BasePlugin;
+
+import java.net.URI;
+
+public class BaseConvention implements Plugin {
+
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(BasePlugin.class);
+
+ project.getRepositories().mavenCentral();
+ project.getRepositories().maven(repo -> repo.setUrl(URI.create("https://repo.terracotta.org/maven2")));
+
+ project.getConfigurations().configureEach(
+ config -> config.resolutionStrategy(ResolutionStrategy::failOnVersionConflict)
+ );
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/BndConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/BndConvention.java
new file mode 100644
index 0000000000..bbb915168e
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/conventions/BndConvention.java
@@ -0,0 +1,89 @@
+package org.ehcache.build.conventions;
+
+import aQute.bnd.gradle.BndBuilderPlugin;
+import aQute.bnd.gradle.BundleTaskExtension;
+import aQute.bnd.osgi.Constants;
+import org.ehcache.build.plugins.osgids.OsgiDsPlugin;
+import org.gradle.api.Action;
+import org.gradle.api.GradleException;
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.artifacts.Dependency;
+import org.gradle.api.artifacts.ExternalDependency;
+import org.gradle.api.artifacts.ProjectDependency;
+import org.gradle.api.plugins.JavaPlugin;
+import org.gradle.api.provider.MapProperty;
+import org.gradle.api.publish.PublishingExtension;
+import org.gradle.api.publish.maven.MavenPublication;
+import org.gradle.api.publish.plugins.PublishingPlugin;
+import org.gradle.api.tasks.bundling.Jar;
+
+import static java.lang.System.lineSeparator;
+import static java.util.stream.Collectors.joining;
+
+public class BndConvention implements Plugin {
+
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(BndBuilderPlugin.class);
+ project.getPlugins().apply(OsgiDsPlugin.class);
+ project.getPlugins().apply(DeployConvention.class);
+
+ project.getTasks().named(JavaPlugin.JAR_TASK_NAME, Jar.class, jar -> {
+ jar.getExtensions().configure(BundleTaskExtension.class, bundle -> configureBundleDefaults(project, bundle));
+ });
+
+ project.getConfigurations().named("baseline", config -> {
+ config.getResolutionStrategy().getComponentSelection().all(selection -> {
+ if (!selection.getCandidate().getVersion().matches("^\\d+(?:\\.\\d+)*$")) {
+ selection.reject("Only full releases can be used as OSGi baselines");
+ }
+ });
+ });
+
+ String dependencyNotation = project.getGroup() + ":" + project.getName() + ":(," + project.getVersion() + "[";
+ Dependency baseline = project.getDependencies().add("baseline", dependencyNotation);
+ if (baseline instanceof ProjectDependency) {
+ throw new GradleException("Baseline should not be a project dependency");
+ } else if (baseline instanceof ExternalDependency) {
+ ((ExternalDependency) baseline).setForce(true);
+ ((ExternalDependency) baseline).setTransitive(false);
+ } else {
+ throw new IllegalArgumentException("Unexpected dependency type: " + baseline);
+ }
+ }
+
+ public static void configureBundleDefaults(Project project, BundleTaskExtension bundle) {
+ MapProperty defaultInstructions = project.getObjects().mapProperty(String.class, String.class);
+ bundleDefaults(project).execute(defaultInstructions);
+ bundle.bnd(defaultInstructions.map(kv -> kv.entrySet().stream().map(e -> e.getKey() + "=" + e.getValue()).collect(joining(lineSeparator()))));
+ }
+
+ public static Action> bundleDefaults(Project project) {
+ return properties -> {
+ project.getPlugins().withType(PublishingPlugin.class).configureEach(publishingPlugin -> {
+ project.getExtensions().getByType(PublishingExtension.class).getPublications().withType(MavenPublication.class).stream().findAny().ifPresent(publication -> {
+ properties.put(Constants.BUNDLE_NAME, publication.getPom().getName());
+ properties.put(Constants.BUNDLE_DESCRIPTION, publication.getPom().getDescription());
+ });
+ });
+ properties.put(Constants.BUNDLE_SYMBOLICNAME, project.getGroup() + "." + project.getName());
+ properties.put(Constants.BUNDLE_DOCURL, "http://ehcache.org");
+ properties.put(Constants.BUNDLE_LICENSE, "LICENSE");
+ properties.put(Constants.BUNDLE_VENDOR, "Terracotta Inc., a wholly-owned subsidiary of Software AG USA, Inc.");
+ properties.put(Constants.BUNDLE_VERSION, osgiFixedVersion(project.getVersion().toString()));
+ properties.put(Constants.SERVICE_COMPONENT, "OSGI-INF/*.xml");
+ };
+ }
+
+ public static String osgiFixedVersion(String version) {
+ /*
+ * The bnd gradle plugin does not handle our 2-digit snapshot versioning scheme very well. It maps `x.y-SNAPSHOT`
+ * to `x.y.0.SNAPSHOT`. This is bad since `x.y.0.SNAPSHOT` is considered to be less than *all* `x.y.z`. This means
+ * the baseline version range expression `(,x.y.0.SNAPSHOT[` will always pick the last release from the previous
+ * minor line. To fix this we manually map to a 3-digit snapshot version where the 3rd digit is a number chosen
+ * to be higher than we would ever release ('All the worlds a VAX').
+ */
+ return version.replaceAll("^(\\d+.\\d+)-SNAPSHOT$", "$1.999-SNAPSHOT");
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/CheckstyleConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/CheckstyleConvention.java
new file mode 100644
index 0000000000..0973fc1237
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/conventions/CheckstyleConvention.java
@@ -0,0 +1,22 @@
+package org.ehcache.build.conventions;
+
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.plugins.quality.CheckstyleExtension;
+import org.gradle.api.plugins.quality.CheckstylePlugin;
+
+import java.util.Map;
+
+public class CheckstyleConvention implements Plugin {
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(CheckstylePlugin.class);
+
+ project.getExtensions().configure(CheckstyleExtension.class, checkstyle -> {
+ checkstyle.setConfigFile(project.getRootProject().file("config/checkstyle.xml"));
+ Map properties = checkstyle.getConfigProperties();
+ properties.put("projectDir", project.getProjectDir());
+ properties.put("rootDir", project.getRootDir());
+ });
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/DeployConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/DeployConvention.java
new file mode 100644
index 0000000000..bd7376ef8b
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/conventions/DeployConvention.java
@@ -0,0 +1,167 @@
+package org.ehcache.build.conventions;
+
+import org.gradle.api.Action;
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.Task;
+import org.gradle.api.artifacts.ProjectDependency;
+import org.gradle.api.internal.artifacts.ivyservice.projectmodule.ProjectComponentPublication;
+import org.gradle.api.internal.component.SoftwareComponentInternal;
+import org.gradle.api.plugins.JavaPlugin;
+import org.gradle.api.plugins.JavaPluginExtension;
+import org.gradle.api.publish.PublishingExtension;
+import org.gradle.api.publish.maven.MavenPublication;
+import org.gradle.api.publish.maven.internal.publication.MavenPomInternal;
+import org.gradle.api.publish.maven.internal.publisher.MavenProjectIdentity;
+import org.gradle.api.publish.maven.plugins.MavenPublishPlugin;
+import org.gradle.api.publish.maven.tasks.AbstractPublishToMaven;
+import org.gradle.api.publish.maven.tasks.GenerateMavenPom;
+import org.gradle.api.publish.maven.tasks.PublishToMavenRepository;
+import org.gradle.api.tasks.TaskProvider;
+import org.gradle.api.tasks.WriteProperties;
+import org.gradle.jvm.tasks.Jar;
+import org.gradle.plugins.signing.SigningExtension;
+import org.gradle.plugins.signing.SigningPlugin;
+
+import java.io.File;
+import java.util.Collection;
+import java.util.concurrent.Callable;
+import java.util.function.Predicate;
+
+import static java.util.stream.Collectors.toList;
+import static org.gradle.api.publish.plugins.PublishingPlugin.PUBLISH_TASK_GROUP;
+
+/**
+ * Deploy plugin for published artifacts. This is an abstraction over the {@code maven-publish} plugin.
+ *
+ * Defaults:
+ *
+ * POM: population of general content: organization, issue-management, scm, etc.
+ * POM copied to {@code META-INF/maven/groupId/artifactId/pom.xml}
+ * POM properties file copied to {@code META-INF/maven/groupId/artifactId/pom.properties}
+ * Javadoc and Source JAR Publishing
+ * {@code install} as alias of {@code publishToMavenLocal}
+ *
+ */
+public class DeployConvention implements Plugin {
+
+ private static final Predicate IS_RELEASE = p -> !p.getVersion().toString().endsWith("-SNAPSHOT");
+
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(SigningPlugin.class);
+ project.getPlugins().apply(MavenPublishPlugin.class);
+
+ project.getExtensions().configure(PublishingExtension.class, publishing -> {
+ publishing.getPublications().withType(MavenPublication.class).configureEach(mavenPublication -> mavenPublication.pom(pom -> {
+ pom.getUrl().set("http://ehcache.org");
+ pom.organization(org -> {
+ org.getName().set("Terracotta Inc., a wholly-owned subsidiary of Software AG USA, Inc.");
+ org.getUrl().set("http://terracotta.org");
+ });
+ pom.issueManagement(issue -> {
+ issue.getSystem().set("Github");
+ issue.getUrl().set("https://github.com/ehcache/ehcache3/issues");
+ });
+ pom.scm(scm -> {
+ scm.getUrl().set("https://github.com/ehcache/ehcache3");
+ scm.getConnection().set("scm:git:https://github.com/ehcache/ehcache3.git");
+ scm.getDeveloperConnection().set("scm:git:git@github.com:ehcache/ehcache3.git");
+ });
+ pom.licenses(licenses -> licenses.license(license -> {
+ license.getName().set("The Apache Software License, Version 2.0");
+ license.getUrl().set("http://www.apache.org/licenses/LICENSE-2.0.txt");
+ license.getDistribution().set("repo");
+ }));
+ pom.developers(devs -> devs.developer(dev -> {
+ dev.getName().set("Terracotta Engineers");
+ dev.getEmail().set("tc-oss@softwareag.com");
+ dev.getOrganization().set("Terracotta Inc., a wholly-owned subsidiary of Software AG USA, Inc.");
+ dev.getOrganizationUrl().set("http://ehcache.org");
+ }));
+ }));
+ publishing.repositories(repositories -> repositories.maven(maven -> {
+ if (IS_RELEASE.test(project)) {
+ maven.setUrl(project.property("deployUrl"));
+ maven.credentials(creds -> {
+ creds.setUsername(project.property("deployUser").toString());
+ creds.setPassword(project.property("deployPwd").toString());
+ });
+ } else {
+ maven.setName("sonatype-nexus-snapshot");
+ maven.setUrl("https://oss.sonatype.org/content/repositories/snapshots");
+ maven.credentials(creds -> {
+ creds.setUsername(project.property("sonatypeUser").toString());
+ creds.setPassword(project.property("sonatypePwd").toString());
+ });
+ }
+ }));
+ });
+
+ project.getExtensions().configure(SigningExtension.class, signing -> {
+ signing.setRequired((Callable) () -> IS_RELEASE.test(project) && project.getGradle().getTaskGraph().getAllTasks().stream().anyMatch(t -> t instanceof PublishToMavenRepository));
+ signing.sign(project.getExtensions().getByType(PublishingExtension.class).getPublications());
+ });
+
+ /*
+ * Do **not** convert the anonymous Action here to a lambda expression - it will break Gradle's up-to-date tracking
+ * and cause tasks to be needlessly rerun.
+ */
+ //noinspection Convert2Lambda
+ project.getTasks().withType(AbstractPublishToMaven.class).configureEach(publishTask -> publishTask.doFirst(new Action() {
+ @Override
+ public void execute(Task task) {
+ MavenPublication publication = publishTask.getPublication();
+ if (publication instanceof ProjectComponentPublication) {
+ SoftwareComponentInternal component = ((ProjectComponentPublication) publication).getComponent();
+ if (component != null) { //The shadow plugin doesn"t associate a component with the publication
+ Collection unpublishedDeps = component.getUsages().stream().flatMap(usage ->
+ usage.getDependencies().stream().filter(ProjectDependency.class::isInstance).map(ProjectDependency.class::cast).filter(moduleDependency ->
+ !moduleDependency.getDependencyProject().getPlugins().hasPlugin(DeployConvention.class))).collect(toList());
+ if (!unpublishedDeps.isEmpty()) {
+ project.getLogger().warn("{} has applied the deploy plugin but has unpublished project dependencies: {}", project, unpublishedDeps);
+ }
+ }
+ }
+ }
+ }));
+
+ project.getTasks().register("install", task ->
+ task.dependsOn(project.getTasks().named(MavenPublishPlugin.PUBLISH_LOCAL_LIFECYCLE_TASK_NAME))
+ );
+
+ project.getPlugins().withType(JavaPlugin.class).configureEach(plugin -> {
+ project.getExtensions().configure(JavaPluginExtension.class, java -> {
+ java.withJavadocJar();
+ java.withSourcesJar();
+ });
+
+ project.afterEvaluate(p -> {
+ p.getExtensions().configure(PublishingExtension.class, publishing -> {
+ if (publishing.getPublications().isEmpty()) {
+ publishing.publications(publications -> publications.register("mavenJava", MavenPublication.class, mavenJava -> mavenJava.from(p.getComponents().getByName("java"))));
+ }
+ });
+
+ p.getTasks().withType(GenerateMavenPom.class).all(pomTask -> {
+ MavenProjectIdentity identity = ((MavenPomInternal) pomTask.getPom()).getProjectIdentity();
+ TaskProvider pomPropertiesTask = project.getTasks().register(pomTask.getName().replace("PomFile", "PomProperties"), WriteProperties.class, task -> {
+ task.dependsOn(pomTask);
+ task.setGroup(PUBLISH_TASK_GROUP);
+ task.setOutputFile(new File(pomTask.getDestination().getParentFile(), "pom.properties"));
+ task.property("groupId", identity.getGroupId());
+ task.property("artifactId", identity.getArtifactId());
+ task.property("version", identity.getVersion());
+ });
+
+ project.getTasks().withType(Jar.class).configureEach(jar -> {
+ jar.into("META-INF/maven/" + identity.getGroupId().get() + "/" + identity.getArtifactId().get(), spec -> {
+ spec.from(pomTask, pom -> pom.rename(".*", "pom.xml"));
+ spec.from(pomPropertiesTask);
+ });
+ });
+ });
+ });
+ });
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/JacocoConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/JacocoConvention.java
new file mode 100644
index 0000000000..66f96ef814
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/conventions/JacocoConvention.java
@@ -0,0 +1,28 @@
+package org.ehcache.build.conventions;
+
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.tasks.testing.Test;
+import org.gradle.testing.jacoco.plugins.JacocoPlugin;
+import org.gradle.testing.jacoco.plugins.JacocoTaskExtension;
+import org.gradle.testing.jacoco.tasks.JacocoReport;
+
+public class JacocoConvention implements Plugin {
+
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(JacocoPlugin.class);
+
+ project.getTasks().withType(JacocoReport.class).configureEach(jacocoReport -> {
+ jacocoReport.getReports().configureEach(report -> {
+ report.getRequired().set(false);
+ });
+ });
+
+ project.getTasks().withType(Test.class).configureEach(test -> {
+ test.getExtensions().configure(JacocoTaskExtension.class, jacoco -> {
+ jacoco.getExcludes().add("org.terracotta.tripwire.*");
+ });
+ });
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/JavaBaseConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/JavaBaseConvention.java
new file mode 100644
index 0000000000..83ce040a2c
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/conventions/JavaBaseConvention.java
@@ -0,0 +1,131 @@
+package org.ehcache.build.conventions;
+
+import org.gradle.api.JavaVersion;
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.java.archives.Attributes;
+import org.gradle.api.plugins.JavaBasePlugin;
+import org.gradle.api.plugins.JavaPluginExtension;
+import org.gradle.api.tasks.bundling.Jar;
+import org.gradle.api.tasks.compile.JavaCompile;
+import org.gradle.api.tasks.javadoc.Javadoc;
+import org.gradle.api.tasks.testing.Test;
+import org.gradle.external.javadoc.CoreJavadocOptions;
+import org.gradle.internal.jvm.JavaInfo;
+import org.gradle.internal.jvm.Jvm;
+import org.gradle.process.internal.ExecException;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.OutputStream;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import static java.util.Arrays.asList;
+
+public class JavaBaseConvention implements Plugin {
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(JavaBasePlugin.class);
+ project.getPlugins().apply(BaseConvention.class);
+
+ JavaInfo testJava = fetchTestJava(project);
+ project.getExtensions().getExtraProperties().set("testJava", testJava);
+
+ project.getExtensions().configure(JavaPluginExtension.class, java -> {
+ java.setSourceCompatibility(JavaVersion.VERSION_1_8);
+ java.setTargetCompatibility(JavaVersion.VERSION_1_8);
+ });
+
+ project.getTasks().withType(Jar.class).configureEach(jar -> {
+ jar.manifest(manifest -> {
+ Attributes attributes = manifest.getAttributes();
+ attributes.put("Implementation-Title", project.getName());
+ attributes.put("Implementation-Vendor-Id", project.getGroup());
+ attributes.put("Implementation-Version", project.getVersion());
+ attributes.put("Implementation-Revision", getRevision(project));
+ attributes.put("Built-By", System.getProperty("user.name"));
+ attributes.put("Built-JDK", System.getProperty("java.version"));
+ });
+ jar.from(project.getRootProject().file("LICENSE"));
+ });
+
+ project.getTasks().withType(Test.class).configureEach(test -> {
+ test.setExecutable(testJava.getJavaExecutable());
+ test.setMaxHeapSize("256m");
+ test.setMaxParallelForks(16);
+ test.systemProperty("java.awt.headless", "true");
+ });
+
+ project.getTasks().withType(JavaCompile.class).configureEach(compile -> {
+ compile.getOptions().setEncoding("UTF-8");
+ compile.getOptions().setCompilerArgs(asList("-Werror", "-Xlint:all"));
+ });
+
+ project.getTasks().withType(Javadoc.class).configureEach(javadoc -> {
+ javadoc.setTitle(project.getName() + " " + project.getVersion() + " API");
+ javadoc.exclude("**/internal/**");
+ javadoc.getOptions().setEncoding("UTF-8");
+ ((CoreJavadocOptions) javadoc.getOptions()).addStringOption("Xdoclint:none", "-quiet");
+ });
+ }
+
+ private static JavaInfo fetchTestJava(Project project) {
+ Object testVM = project.findProperty("testVM");
+ if (testVM == null) {
+ return Jvm.current();
+ } else {
+ File jvmHome = project.file(testVM);
+ if (!jvmHome.exists() && project.hasProperty(testVM.toString())) {
+ testVM = project.property(testVM.toString());
+ jvmHome = project.file(testVM);
+ }
+
+ return jvmForHome(project, jvmHome);
+ }
+ }
+
+ private static final Pattern VERSION_OUTPUT = Pattern.compile("\\w+ version \"(?.+)\"");
+ private static Jvm jvmForHome(Project project, File home) {
+ File java = Jvm.forHome(home).getJavaExecutable();
+
+ OutputStream stdout = new ByteArrayOutputStream();
+ OutputStream stderr = new ByteArrayOutputStream();
+ project.exec(spec -> {
+ spec.executable(java);
+ spec.args("-version");
+ spec.setStandardOutput(stdout);
+ spec.setErrorOutput(stderr);
+ });
+ String versionOutput = stderr.toString();
+ Matcher matcher = VERSION_OUTPUT.matcher(versionOutput);
+ if (matcher.find()) {
+ return Jvm.discovered(home, null, JavaVersion.toVersion(matcher.group("version")));
+ } else {
+ throw new IllegalArgumentException("Could not parse version of " + java + " from output:\n" + versionOutput);
+ }
+ }
+
+
+ private static Object getRevision(Project project) {
+ String envCommit = System.getenv("GIT_COMMIT");
+ if(envCommit != null) {
+ return envCommit;
+ } else {
+ try {
+ ByteArrayOutputStream stdout = new ByteArrayOutputStream();
+ ByteArrayOutputStream stderr = new ByteArrayOutputStream();
+ project.exec(spec -> {
+ spec.executable("git");
+ spec.args("rev-parse", "HEAD");
+ spec.setStandardOutput(stdout);
+ spec.setErrorOutput(stderr);
+ }).assertNormalExitValue();
+
+ return stdout.toString().trim();
+ } catch (ExecException e) {
+ return "Unknown";
+ }
+ }
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/JavaConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/JavaConvention.java
new file mode 100644
index 0000000000..5e50b677ae
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/conventions/JavaConvention.java
@@ -0,0 +1,27 @@
+package org.ehcache.build.conventions;
+
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.artifacts.dsl.DependencyHandler;
+import org.gradle.api.plugins.JavaPlugin;
+
+public class JavaConvention implements Plugin {
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(JavaBaseConvention.class);
+ project.getPlugins().apply(JavaPlugin.class);
+ project.getPlugins().apply(CheckstyleConvention.class);
+ project.getPlugins().apply(JacocoConvention.class);
+ project.getPlugins().apply(SpotbugsConvention.class);
+
+ DependencyHandler dependencies = project.getDependencies();
+ dependencies.add(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME, "org.slf4j:slf4j-api:" + project.property("slf4jVersion"));
+ dependencies.add(JavaPlugin.TEST_RUNTIME_ONLY_CONFIGURATION_NAME, "org.slf4j:slf4j-simple:" + project.property("slf4jVersion"));
+
+ dependencies.add(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME, "junit:junit:" + project.property("junitVersion"));
+ dependencies.add(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME, "org.assertj:assertj-core:" + project.property("assertjVersion"));
+ dependencies.add(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME, "org.hamcrest:hamcrest-library:" + project.property("hamcrestVersion"));
+ dependencies.add(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME, "org.mockito:mockito-core:" + project.property("mockitoVersion"));
+ dependencies.add(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME, "org.terracotta:terracotta-utilities-test-tools:" + project.property("terracottaUtilitiesVersion"));
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/JavaLibraryConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/JavaLibraryConvention.java
new file mode 100644
index 0000000000..77b15e4b37
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/conventions/JavaLibraryConvention.java
@@ -0,0 +1,14 @@
+package org.ehcache.build.conventions;
+
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.plugins.JavaLibraryPlugin;
+
+public class JavaLibraryConvention implements Plugin {
+
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(JavaConvention.class);
+ project.getPlugins().apply(JavaLibraryPlugin.class);
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/SpotbugsConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/SpotbugsConvention.java
new file mode 100644
index 0000000000..9815385453
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/conventions/SpotbugsConvention.java
@@ -0,0 +1,53 @@
+package org.ehcache.build.conventions;
+
+import com.github.spotbugs.snom.SpotBugsExtension;
+import com.github.spotbugs.snom.SpotBugsPlugin;
+import com.github.spotbugs.snom.SpotBugsTask;
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.plugins.JavaBasePlugin;
+import org.gradle.api.plugins.JavaPluginExtension;
+
+public class SpotbugsConvention implements Plugin {
+
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(SpotBugsPlugin.class);
+
+ SpotBugsExtension spotbugs = project.getExtensions().getByType(SpotBugsExtension.class);
+
+ spotbugs.getIgnoreFailures().set(false);
+ // Later versions of Spotbugs have stupid heuristics for EI_EXPOSE_REP*
+ spotbugs.getToolVersion().set("4.2.3");
+
+ project.getPlugins().withType(JavaBasePlugin.class).configureEach(plugin -> {
+
+ project.getExtensions().configure(JavaPluginExtension.class, java -> {
+ java.getSourceSets().configureEach(sourceSet -> {
+ project.getDependencies().add(sourceSet.getCompileOnlyConfigurationName(),
+ "com.github.spotbugs:spotbugs-annotations:" + spotbugs.getToolVersion().get());
+ });
+
+ project.getTasks().withType(SpotBugsTask.class).configureEach(task -> {
+ if (task.getName().contains("Test")) {
+ task.setEnabled(false);
+ } else {
+ task.getReports().register("xml", report -> report.setEnabled(true));
+ task.getReports().register("html", report -> report.setEnabled(false));
+ }
+ });
+ });
+
+ });
+
+
+ project.getConfigurations().named("spotbugs", config -> {
+ config.getResolutionStrategy().dependencySubstitution(subs -> {
+ subs.substitute(subs.module("org.apache.commons:commons-lang3:3.11"))
+ .using(subs.module("org.apache.commons:commons-lang3:3.12.0"))
+ .because("Spotbugs has dependency divergences");
+ });
+ });
+
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/conventions/WarConvention.java b/build-logic/src/main/java/org/ehcache/build/conventions/WarConvention.java
new file mode 100644
index 0000000000..ec469eda6a
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/conventions/WarConvention.java
@@ -0,0 +1,13 @@
+package org.ehcache.build.conventions;
+
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.plugins.WarPlugin;
+
+public class WarConvention implements Plugin {
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(WarPlugin.class);
+ project.getPlugins().apply(JavaConvention.class);
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/plugins/PackagePlugin.java b/build-logic/src/main/java/org/ehcache/build/plugins/PackagePlugin.java
new file mode 100644
index 0000000000..40070ea348
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/plugins/PackagePlugin.java
@@ -0,0 +1,285 @@
+/*
+ * Copyright Terracotta, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.ehcache.build.plugins;
+
+import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar;
+import org.ehcache.build.conventions.BaseConvention;
+import org.ehcache.build.conventions.BndConvention;
+import org.ehcache.build.conventions.JavaBaseConvention;
+import org.ehcache.build.util.OsgiManifestJarExtension;
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.artifacts.Configuration;
+import org.gradle.api.capabilities.Capability;
+import org.gradle.api.component.AdhocComponentWithVariants;
+import org.gradle.api.component.SoftwareComponentFactory;
+import org.gradle.api.file.DuplicatesStrategy;
+import org.gradle.api.file.FileCollection;
+import org.gradle.api.file.FileTree;
+import org.gradle.api.internal.project.ProjectInternal;
+import org.gradle.api.plugins.BasePlugin;
+import org.gradle.api.plugins.JavaBasePlugin;
+import org.gradle.api.plugins.jvm.internal.JvmPluginServices;
+import org.gradle.api.provider.Provider;
+import org.gradle.api.publish.PublishingExtension;
+import org.gradle.api.publish.maven.MavenPublication;
+import org.gradle.api.publish.maven.plugins.MavenPublishPlugin;
+import org.gradle.api.tasks.Sync;
+import org.gradle.api.tasks.TaskProvider;
+import org.gradle.api.tasks.bundling.Jar;
+import org.gradle.api.tasks.bundling.Zip;
+import org.gradle.api.tasks.javadoc.Javadoc;
+import org.gradle.internal.jvm.Jvm;
+import org.gradle.internal.resolve.ArtifactResolveException;
+import org.gradle.internal.service.ServiceRegistry;
+import org.gradle.language.base.plugins.LifecycleBasePlugin;
+
+import java.io.File;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+
+import static java.lang.Integer.parseInt;
+import static java.util.Collections.emptyList;
+import static org.ehcache.build.plugins.VariantPlugin.COMMON_SOURCE_SET_NAME;
+import static org.ehcache.build.util.PluginUtils.bucket;
+import static org.ehcache.build.util.PluginUtils.createBucket;
+import static org.ehcache.build.util.PluginUtils.capitalize;
+import static org.gradle.api.attributes.DocsType.JAVADOC;
+import static org.gradle.api.attributes.DocsType.SOURCES;
+import static org.gradle.api.attributes.DocsType.USER_MANUAL;
+import static org.gradle.api.internal.artifacts.JavaEcosystemSupport.configureDefaultTargetPlatform;
+import static org.gradle.api.plugins.JavaPlugin.API_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.API_ELEMENTS_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.COMPILE_ONLY_API_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.JAVADOC_ELEMENTS_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.RUNTIME_ELEMENTS_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.RUNTIME_ONLY_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.SOURCES_ELEMENTS_CONFIGURATION_NAME;
+
+/**
+ * EhDistribute
+ */
+public class PackagePlugin implements Plugin {
+
+ private static final String CONTENTS_CONFIGURATION_NAME = "contents";
+
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(BaseConvention.class);
+ project.getPlugins().apply(JavaBaseConvention.class);
+
+ ServiceRegistry projectServices = ((ProjectInternal) project).getServices();
+ JvmPluginServices jvmPluginServices = projectServices.get(JvmPluginServices.class);
+ SoftwareComponentFactory softwareComponentFactory = projectServices.get(SoftwareComponentFactory.class);
+ AdhocComponentWithVariants javaComponent = softwareComponentFactory.adhoc("java");
+ project.getComponents().add(javaComponent);
+
+ TaskProvider asciidocZip = project.getTasks().register("asciidocZip", Zip.class, zip -> {
+ zip.getArchiveClassifier().set("docs");
+ zip.from(project.getTasks().getByPath(":docs:userDoc"));
+ });
+ Configuration userdocElements = jvmPluginServices.createOutgoingElements("userdocElements", builder ->
+ builder.published().artifact(asciidocZip).providesAttributes(attributes -> attributes.documentation(USER_MANUAL)));
+ javaComponent.addVariantsFromConfiguration(userdocElements, variantDetails -> {});
+
+ createDefaultPackage(project);
+
+ project.getPlugins().withType(VariantPlugin.class).configureEach(plugin -> {
+ Configuration commonContents = createBucket(project, CONTENTS_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME);
+ Configuration commonApi = createBucket(project, API_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME);
+ Configuration commonImplementation = createBucket(project, IMPLEMENTATION_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME).extendsFrom(commonApi);
+ Configuration commonCompileOnlyApi = createBucket(project, COMPILE_ONLY_API_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME);
+ Configuration commonRuntimeOnly = createBucket(project, RUNTIME_ONLY_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME);
+
+ project.getConfigurations().named(CONTENTS_CONFIGURATION_NAME).configure(conf -> conf.extendsFrom(commonContents));
+ project.getConfigurations().named(API_CONFIGURATION_NAME).configure(conf -> conf.extendsFrom(commonApi));
+ project.getConfigurations().named(IMPLEMENTATION_CONFIGURATION_NAME).configure(conf -> conf.extendsFrom(commonImplementation));
+ project.getConfigurations().named(COMPILE_ONLY_API_CONFIGURATION_NAME).configure(conf -> conf.extendsFrom(commonCompileOnlyApi));
+ project.getConfigurations().named(RUNTIME_ONLY_CONFIGURATION_NAME).configure(conf -> conf.extendsFrom(commonRuntimeOnly));
+
+ project.getExtensions().configure(VariantPlugin.VariantExtension.class, variants -> {
+ variants.getVariants().configureEach(variant -> {
+ createPackage(project, variant.getName(), variant.getCapabilities().get());
+
+ bucket(project, CONTENTS_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonContents);
+ bucket(project, API_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonApi);
+ bucket(project, IMPLEMENTATION_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonImplementation);
+ bucket(project, COMPILE_ONLY_API_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonCompileOnlyApi);
+ bucket(project, RUNTIME_ONLY_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonRuntimeOnly);
+ });
+ });
+ });
+
+ project.getPlugins().withType(MavenPublishPlugin.class).configureEach(plugin -> {
+ project.getExtensions().configure(PublishingExtension.class, publishing -> {
+ publishing.getPublications().register("mavenJava", MavenPublication.class, mavenPublication -> {
+ mavenPublication.from(javaComponent);
+ });
+ });
+ });
+ }
+
+ private void createDefaultPackage(Project project) {
+ createPackage(project, null, emptyList());
+ }
+
+ private void createPackage(Project project, String variant, List capabilities) {
+ ServiceRegistry projectServices = ((ProjectInternal) project).getServices();
+ JvmPluginServices jvmPluginServices = projectServices.get(JvmPluginServices.class);
+
+ Configuration contents = createBucket(project, CONTENTS_CONFIGURATION_NAME, variant);
+
+ Configuration contentsRuntimeElements = jvmPluginServices.createResolvableConfiguration(camelPrefix(variant, "contentsRuntimeElements"), builder ->
+ builder.extendsFrom(contents).requiresJavaLibrariesRuntime());
+
+ Configuration contentSourcesElements = jvmPluginServices.createResolvableConfiguration(camelPrefix(variant, "contentsSourcesElements"), builder ->
+ builder.extendsFrom(contents).requiresAttributes(refiner -> refiner.documentation(SOURCES)));
+
+ TaskProvider shadowJar = project.getTasks().register(camelPrefix(variant, "jar"), ShadowJar.class, shadow -> {
+ shadow.setGroup(BasePlugin.BUILD_GROUP);
+ shadow.getArchiveClassifier().set(variant);
+
+ shadow.setConfigurations(Collections.singletonList(contentsRuntimeElements));
+ shadow.relocate("org.terracotta.statistics.", "org.ehcache.shadow.org.terracotta.statistics.");
+ shadow.relocate("org.terracotta.offheapstore.", "org.ehcache.shadow.org.terracotta.offheapstore.");
+ shadow.relocate("org.terracotta.context.", "org.ehcache.shadow.org.terracotta.context.");
+ shadow.relocate("org.terracotta.utilities.", "org.ehcache.shadow.org.terracotta.utilities.");
+
+ shadow.mergeServiceFiles();
+
+ shadow.exclude("META-INF/MANIFEST.MF", "LICENSE", "NOTICE");
+
+ // LICENSE is included in root gradle build
+ shadow.from(new File(project.getRootDir(), "NOTICE"));
+ shadow.setDuplicatesStrategy(DuplicatesStrategy.EXCLUDE);
+ });
+
+ Provider sourcesTree = project.provider(() -> contentSourcesElements.getResolvedConfiguration().getLenientConfiguration().getAllModuleDependencies().stream().flatMap(d -> d.getModuleArtifacts().stream())
+ .map(artifact -> {
+ try {
+ return Optional.of(artifact.getFile());
+ } catch (ArtifactResolveException e) {
+ return Optional.empty();
+ }
+ }).filter(Optional::isPresent).map(Optional::get).distinct().map(file -> {
+ if (file.isFile()) {
+ return project.zipTree(file);
+ } else {
+ return project.fileTree(file);
+ }
+ }).reduce(FileTree::plus).orElse(project.files().getAsFileTree()));
+
+ TaskProvider sources = project.getTasks().register(camelPrefix(variant, "sources"), Sync.class, sync -> {
+ sync.dependsOn(contentSourcesElements);
+ sync.from(sourcesTree, spec -> spec.exclude("META-INF/**", "LICENSE", "NOTICE"));
+ sync.into(project.getLayout().getBuildDirectory().dir(camelPrefix(variant,"sources")));
+ });
+
+ TaskProvider sourcesJar = project.getTasks().register(camelPrefix(variant, "sourcesJar"), Jar.class, jar -> {
+ jar.setGroup(BasePlugin.BUILD_GROUP);
+ jar.from(sources);
+ jar.from(shadowJar, spec -> spec.include("META-INF/**", "LICENSE", "NOTICE"));
+ jar.getArchiveClassifier().set(kebabPrefix(variant, "sources"));
+ });
+
+ TaskProvider javadoc = project.getTasks().register(camelPrefix(variant, "javadoc"), Javadoc.class, task -> {
+ task.setGroup(JavaBasePlugin.DOCUMENTATION_GROUP);
+ task.setTitle(project.getName() + " " + project.getVersion() + " API");
+ task.source(sources);
+ task.include("*.java");
+ task.setClasspath(contentsRuntimeElements);
+ task.setDestinationDir(new File(project.getBuildDir(), "docs/" + camelPrefix(variant, "javadoc")));
+ });
+ TaskProvider javadocJar = project.getTasks().register(camelPrefix(variant, "javadocJar"), Jar.class, jar -> {
+ jar.setGroup(BasePlugin.BUILD_GROUP);
+ jar.from(javadoc);
+ jar.getArchiveClassifier().set(kebabPrefix(variant, "javadoc"));
+ });
+
+ Configuration api = createBucket(project, API_CONFIGURATION_NAME, variant);
+ Configuration implementation = createBucket(project, IMPLEMENTATION_CONFIGURATION_NAME, variant).extendsFrom(api);
+ Configuration compileOnlyApi = createBucket(project, COMPILE_ONLY_API_CONFIGURATION_NAME, variant);
+ Configuration runtimeOnly = createBucket(project, RUNTIME_ONLY_CONFIGURATION_NAME, variant);
+
+ Configuration apiElements = jvmPluginServices.createOutgoingElements(camelPrefix(variant, API_ELEMENTS_CONFIGURATION_NAME), builder ->
+ builder.extendsFrom(api, compileOnlyApi).published().providesApi().withCapabilities(capabilities).artifact(shadowJar));
+ configureDefaultTargetPlatform(apiElements, parseInt(Jvm.current().getJavaVersion().getMajorVersion()));
+ Configuration compileClasspath = jvmPluginServices.createResolvableConfiguration(camelPrefix(variant, COMPILE_CLASSPATH_CONFIGURATION_NAME), builder ->
+ builder.extendsFrom(apiElements).requiresJavaLibrariesRuntime());
+ Configuration runtimeElements = jvmPluginServices.createOutgoingElements(camelPrefix(variant, RUNTIME_ELEMENTS_CONFIGURATION_NAME), builder ->
+ builder.extendsFrom(implementation, runtimeOnly).published().providesRuntime().withCapabilities(capabilities).artifact(shadowJar));
+ configureDefaultTargetPlatform(runtimeElements, parseInt(Jvm.current().getJavaVersion().getMajorVersion()));
+ Configuration runtimeClasspath = jvmPluginServices.createResolvableConfiguration(camelPrefix(variant, RUNTIME_CLASSPATH_CONFIGURATION_NAME), builder ->
+ builder.extendsFrom(runtimeElements).requiresJavaLibrariesRuntime());
+
+ Configuration sourcesElements = jvmPluginServices.createOutgoingElements(camelPrefix(variant, SOURCES_ELEMENTS_CONFIGURATION_NAME), builder ->
+ builder.published().artifact(sourcesJar).withCapabilities(capabilities).providesAttributes(attributes -> attributes.documentation(SOURCES).asJar()));
+ Configuration javadocElements = jvmPluginServices.createOutgoingElements(camelPrefix(variant, JAVADOC_ELEMENTS_CONFIGURATION_NAME), builder ->
+ builder.published().artifact(javadocJar).withCapabilities(capabilities).providesAttributes(attributes -> attributes.documentation(JAVADOC).asJar()));
+
+ shadowJar.configure(shadow -> {
+ OsgiManifestJarExtension osgiExtension = new OsgiManifestJarExtension(shadow);
+ osgiExtension.getClasspath().from(runtimeClasspath);
+ osgiExtension.getSources().from(sources);
+ BndConvention.bundleDefaults(project).execute(osgiExtension.getInstructions());
+ });
+
+ project.getComponents().named("java", AdhocComponentWithVariants.class, java -> {
+ java.addVariantsFromConfiguration(apiElements, variantDetails -> {
+ variantDetails.mapToMavenScope("compile");
+ if (variant != null) {
+ variantDetails.mapToOptional();
+ }
+ });
+ java.addVariantsFromConfiguration(runtimeElements, variantDetails -> {
+ variantDetails.mapToMavenScope("runtime");
+ if (variant != null) {
+ variantDetails.mapToOptional();
+ }
+ });
+ java.addVariantsFromConfiguration(sourcesElements, variantDetails -> {});
+ java.addVariantsFromConfiguration(javadocElements, variantDetails -> {});
+ });
+
+
+ project.getTasks().named(LifecycleBasePlugin.ASSEMBLE_TASK_NAME).configure(task -> {
+ task.dependsOn(shadowJar);
+ task.dependsOn(javadocJar);
+ task.dependsOn(sourcesJar);
+ });
+ }
+
+ private static String camelPrefix(String variant, String thing) {
+ if (variant == null) {
+ return thing;
+ } else {
+ return variant + capitalize(thing);
+ }
+ }
+
+ private static String kebabPrefix(String variant, String thing) {
+ if (variant == null) {
+ return thing;
+ } else {
+ return variant + "-" + thing;
+ }
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/plugins/VariantPlugin.java b/build-logic/src/main/java/org/ehcache/build/plugins/VariantPlugin.java
new file mode 100644
index 0000000000..8cc14e802b
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/plugins/VariantPlugin.java
@@ -0,0 +1,228 @@
+package org.ehcache.build.plugins;
+
+import aQute.bnd.gradle.BndBuilderPlugin;
+import aQute.bnd.gradle.BundleTaskExtension;
+import org.ehcache.build.conventions.BndConvention;
+import org.ehcache.build.util.PluginUtils;
+import org.gradle.api.Action;
+import org.gradle.api.NamedDomainObjectContainer;
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.artifacts.Configuration;
+import org.gradle.api.capabilities.Capability;
+import org.gradle.api.file.Directory;
+import org.gradle.api.file.SourceDirectorySet;
+import org.gradle.api.internal.HasConvention;
+import org.gradle.api.internal.artifacts.dsl.CapabilityNotationParserFactory;
+import org.gradle.api.internal.project.ProjectInternal;
+import org.gradle.api.model.ObjectFactory;
+import org.gradle.api.plugins.JavaPlugin;
+import org.gradle.api.plugins.JavaPluginExtension;
+import org.gradle.api.plugins.jvm.internal.JvmPluginServices;
+import org.gradle.api.provider.ListProperty;
+import org.gradle.api.provider.Property;
+import org.gradle.api.provider.Provider;
+import org.gradle.api.tasks.SourceSet;
+import org.gradle.api.tasks.Sync;
+import org.gradle.api.tasks.TaskProvider;
+import org.gradle.api.tasks.bundling.Jar;
+import org.gradle.internal.typeconversion.NotationParser;
+import org.gradle.language.base.plugins.LifecycleBasePlugin;
+import org.unbrokendome.gradle.plugins.xjc.XjcPlugin;
+import org.unbrokendome.gradle.plugins.xjc.XjcSourceSetConvention;
+
+import java.util.function.Function;
+
+import static java.util.Objects.requireNonNull;
+import static org.ehcache.build.util.PluginUtils.capitalize;
+import static org.gradle.api.attributes.DocsType.SOURCES;
+import static org.gradle.api.plugins.JavaPlugin.API_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.API_ELEMENTS_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.COMPILE_ONLY_API_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.RUNTIME_ELEMENTS_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.RUNTIME_ONLY_CONFIGURATION_NAME;
+import static org.gradle.api.plugins.JavaPlugin.SOURCES_ELEMENTS_CONFIGURATION_NAME;
+import static org.gradle.api.tasks.SourceSet.MAIN_SOURCE_SET_NAME;
+
+public class VariantPlugin implements Plugin {
+
+ protected static final String COMMON_SOURCE_SET_NAME = "common";
+
+ @Override
+ public void apply(Project project) {
+ VariantExtension variants = project.getExtensions().create("variants", VariantExtension.class, project);
+ configureJavaPluginBehavior(project, variants);
+ }
+
+ private void configureJavaPluginBehavior(Project project, VariantExtension variants) {
+ project.getPlugins().withType(JavaPlugin.class).configureEach(javaPlugin -> {
+ JavaPluginExtension java = project.getExtensions().getByType(JavaPluginExtension.class);
+
+ variants.getVariants().configureEach(variant -> {
+ if (variant.hasSources().get()) {
+ SourceSet commonSources = java.getSourceSets().findByName(COMMON_SOURCE_SET_NAME);
+ if (commonSources == null) {
+ commonSources = java.getSourceSets().create(COMMON_SOURCE_SET_NAME, common -> {
+ project.getTasks().named(common.getCompileJavaTaskName(), task -> task.setEnabled(false));
+ project.getTasks().named(common.getClassesTaskName(), task -> task.setEnabled(false));
+ linkToCommonSource(project, common, java.getSourceSets().getByName(MAIN_SOURCE_SET_NAME));
+ });
+ }
+ SourceSet variantSources = java.getSourceSets().create(variant.getName());
+
+ linkToCommonSource(project, commonSources, variantSources);
+
+ java.registerFeature(variant.getName(), feature -> {
+ feature.usingSourceSet(variantSources);
+ feature.withSourcesJar();
+ variant.getCapabilities().get().forEach(capability -> {
+ feature.capability(capability.getGroup(), capability.getName(), requireNonNull(capability.getVersion()));
+ });
+ });
+
+ project.getPlugins().withType(BndBuilderPlugin.class).configureEach(bnd -> {
+ project.getTasks().named(variantSources.getJarTaskName(), Jar.class, jar -> {
+ jar.setDescription("Assembles a bundle containing the " + variant + " variant classes.");
+ BundleTaskExtension extension = jar.getExtensions().create(BundleTaskExtension.NAME, BundleTaskExtension.class, jar);
+ BndConvention.configureBundleDefaults(project, extension);
+ jar.doLast("buildBundle", extension.buildAction());
+ });
+ });
+
+ project.getTasks().named(LifecycleBasePlugin.ASSEMBLE_TASK_NAME).configure(task -> {
+ task.dependsOn(variantSources.getJarTaskName());
+ task.dependsOn(variantSources.getSourcesJarTaskName());
+ });
+ } else {
+ SourceSet mainSource = java.getSourceSets().getByName(MAIN_SOURCE_SET_NAME);
+
+ JvmPluginServices jvmPluginServices = ((ProjectInternal) project).getServices().get(JvmPluginServices.class);
+
+ Configuration commonApi = PluginUtils.createBucket(project, API_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME);
+ project.getConfigurations().named(mainSource.getApiConfigurationName()).configure(config -> config.extendsFrom(commonApi));
+ Configuration commonCompileOnlyApi = PluginUtils.createBucket(project, COMPILE_ONLY_API_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME);
+ project.getConfigurations().named(mainSource.getCompileOnlyApiConfigurationName()).configure(config -> config.extendsFrom(commonCompileOnlyApi));
+ Configuration commonImplementation = PluginUtils.createBucket(project, IMPLEMENTATION_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME);
+ project.getConfigurations().named(mainSource.getImplementationConfigurationName()).configure(config -> config.extendsFrom(commonImplementation));
+ Configuration commonRuntimeOnly = PluginUtils.createBucket(project, RUNTIME_ONLY_CONFIGURATION_NAME, COMMON_SOURCE_SET_NAME);
+ project.getConfigurations().named(mainSource.getRuntimeOnlyConfigurationName()).configure(config -> config.extendsFrom(commonRuntimeOnly));
+
+ Configuration api = PluginUtils.createBucket(project, API_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonApi);
+ Configuration implementation = PluginUtils.createBucket(project, IMPLEMENTATION_CONFIGURATION_NAME, variant.getName()).extendsFrom(api, commonImplementation);
+ Configuration compileOnlyApi = PluginUtils.createBucket(project, COMPILE_ONLY_API_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonCompileOnlyApi);
+ Configuration runtimeOnly = PluginUtils.createBucket(project, RUNTIME_ONLY_CONFIGURATION_NAME, variant.getName()).extendsFrom(commonRuntimeOnly);
+
+ Configuration apiElements = jvmPluginServices.createOutgoingElements(variant.getName() + capitalize(API_ELEMENTS_CONFIGURATION_NAME), builder ->
+ builder.fromSourceSet(mainSource).withCapabilities(variant.getCapabilities().get())
+ .extendsFrom(api, compileOnlyApi).withClassDirectoryVariant().providesApi());
+ project.getConfigurations().named(mainSource.getApiElementsConfigurationName(),
+ config -> config.getOutgoing().getArtifacts().configureEach(artifact -> apiElements.getOutgoing().getArtifacts().add(artifact)));
+
+ Configuration runtimeElements = jvmPluginServices.createOutgoingElements(variant.getName() + capitalize(RUNTIME_ELEMENTS_CONFIGURATION_NAME), builder ->
+ builder.fromSourceSet(mainSource).withCapabilities(variant.getCapabilities().get()).published()
+ .extendsFrom(implementation, runtimeOnly).providesRuntime());
+ project.getConfigurations().named(mainSource.getRuntimeElementsConfigurationName(),
+ config -> config.getOutgoing().getArtifacts().configureEach(artifact -> runtimeElements.getOutgoing().getArtifacts().add(artifact)));
+
+ Configuration sourcesElements = jvmPluginServices.createOutgoingElements(variant.getName() + capitalize(SOURCES_ELEMENTS_CONFIGURATION_NAME), builder ->
+ builder.fromSourceSet(mainSource).withCapabilities(variant.getCapabilities().get()).published()
+ .providesAttributes(attributes -> attributes.documentation(SOURCES).asJar()));
+ project.getConfigurations().named(mainSource.getSourcesElementsConfigurationName(),
+ config -> config.getOutgoing().getArtifacts().configureEach(artifact -> sourcesElements.getOutgoing().getArtifacts().add(artifact)));
+ }
+ });
+ });
+ }
+
+ private static void linkToCommonSource(Project project, SourceSet commonSources, SourceSet derivedSources) {
+ registerCommonCopyTask(project, commonSources, derivedSources, SourceSet::getJava);
+ registerCommonCopyTask(project, commonSources, derivedSources, SourceSet::getResources);
+
+ Configuration commonApi = project.getConfigurations().maybeCreate(commonSources.getApiConfigurationName());
+ project.getConfigurations().maybeCreate(derivedSources.getApiConfigurationName()).extendsFrom(commonApi);
+ Configuration commonImplementation = project.getConfigurations().maybeCreate(commonSources.getImplementationConfigurationName());
+ project.getConfigurations().maybeCreate(derivedSources.getImplementationConfigurationName()).extendsFrom(commonImplementation);
+
+ project.getPlugins().withType(XjcPlugin.class).configureEach(plugin -> {
+ Function xjc = sourceSet -> ((HasConvention) sourceSet).getConvention().getPlugin(XjcSourceSetConvention.class);
+
+ XjcSourceSetConvention commonXjc = xjc.apply(commonSources);
+ project.getTasks().named(commonXjc.getXjcGenerateTaskName(), task -> task.setEnabled(false));
+
+ registerCommonCopyTask(project, commonSources, derivedSources, xjc.andThen(XjcSourceSetConvention::getXjcSchema));
+ registerCommonCopyTask(project, commonSources, derivedSources, xjc.andThen(XjcSourceSetConvention::getXjcCatalog));
+ registerCommonCopyTask(project, commonSources, derivedSources, xjc.andThen(XjcSourceSetConvention::getXjcBinding));
+ registerCommonCopyTask(project, commonSources, derivedSources, xjc.andThen(XjcSourceSetConvention::getXjcUrl));
+ });
+ }
+
+ private static void registerCommonCopyTask(Project project, SourceSet common, SourceSet variant, Function type) {
+ SourceDirectorySet commonSource = type.apply(common);
+ Provider variantLocation = project.getLayout().getBuildDirectory().dir("generated/sources/common/" + variant.getName() + "/" + commonSource.getName());
+ TaskProvider variantTask = project.getTasks().register(variant.getTaskName("copyCommon", commonSource.getName()), Sync.class, sync -> {
+ sync.from(commonSource);
+ sync.into(variantLocation);
+ });
+ type.apply(variant).srcDir(variantTask);
+ }
+
+ public static class Variant {
+
+ private static final NotationParser CAPABILITY_NOTATION_PARSER = new CapabilityNotationParserFactory(true).create();
+
+ private final String name;
+ private final Property hasSources;
+ private final ListProperty capabilities;
+
+ public Variant(String name, ObjectFactory objectFactory) {
+ this.name = name;
+ this.hasSources = objectFactory.property(Boolean.class).convention(false);
+ this.capabilities = objectFactory.listProperty(Capability.class);
+
+ this.hasSources.finalizeValueOnRead();
+ this.capabilities.finalizeValueOnRead();
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public Property hasSources() {
+ return hasSources;
+ }
+
+ public ListProperty getCapabilities() {
+ return capabilities;
+ }
+
+ public void withSeparateSource() {
+ this.hasSources.set(true);
+ }
+
+ public void capability(Object notation) {
+ this.capabilities.add(CAPABILITY_NOTATION_PARSER.parseNotation(notation));
+ }
+ }
+
+ public static class VariantExtension {
+
+ private final ObjectFactory objectFactory;
+ private final NamedDomainObjectContainer variants;
+
+ public VariantExtension(Project project) {
+ this.objectFactory = project.getObjects();
+ this.variants = project.container(Variant.class);
+ }
+
+ public void variant(String variant, Action action) {
+ Variant v = new Variant(variant, objectFactory);
+ action.execute(v);
+ variants.add(v);
+ }
+
+ public NamedDomainObjectContainer getVariants() {
+ return variants;
+ }
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/plugins/VoltronPlugin.java b/build-logic/src/main/java/org/ehcache/build/plugins/VoltronPlugin.java
new file mode 100644
index 0000000000..db5299198f
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/plugins/VoltronPlugin.java
@@ -0,0 +1,66 @@
+package org.ehcache.build.plugins;
+
+import org.ehcache.build.conventions.JavaLibraryConvention;
+import org.gradle.api.Action;
+import org.gradle.api.NamedDomainObjectProvider;
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.Task;
+import org.gradle.api.artifacts.Configuration;
+import org.gradle.api.artifacts.dsl.DependencyHandler;
+import org.gradle.api.plugins.JavaPlugin;
+import org.gradle.api.tasks.bundling.Jar;
+
+import java.io.File;
+import java.util.jar.Attributes;
+import java.util.stream.Collectors;
+
+import static java.util.Collections.singletonMap;
+
+public class VoltronPlugin implements Plugin {
+
+ private static final String VOLTRON_CONFIGURATION_NAME = "voltron";
+ private static final String SERVICE_CONFIGURATION_NAME = "service";
+
+ @Override
+ public void apply(Project project) {
+ project.getPlugins().apply(JavaLibraryConvention.class);
+
+ NamedDomainObjectProvider voltron = project.getConfigurations().register(VOLTRON_CONFIGURATION_NAME, config -> {
+ config.setDescription("Dependencies provided by Voltron from server/lib");
+ config.setCanBeConsumed(true);
+ config.setCanBeResolved(true);
+
+ DependencyHandler dependencyHandler = project.getDependencies();
+ String terracottaApisVersion = project.property("terracottaApisVersion").toString();
+ String slf4jVersion = project.property("slf4jVersion").toString();
+ config.getDependencies().add(dependencyHandler.create("org.terracotta:entity-server-api:" + terracottaApisVersion));
+ config.getDependencies().add(dependencyHandler.create("org.terracotta:standard-cluster-services:" + terracottaApisVersion));
+ config.getDependencies().add(dependencyHandler.create("org.terracotta:packaging-support:" + terracottaApisVersion));
+ config.getDependencies().add(dependencyHandler.create("org.slf4j:slf4j-api:" + slf4jVersion));
+ });
+
+ NamedDomainObjectProvider service = project.getConfigurations().register(SERVICE_CONFIGURATION_NAME, config -> {
+ config.setDescription("Services consumed by this plugin");
+ config.setCanBeResolved(true);
+ config.setCanBeConsumed(true);
+ });
+
+ project.getConfigurations().named(JavaPlugin.API_CONFIGURATION_NAME, config -> {
+ config.extendsFrom(voltron.get());
+ config.extendsFrom(service.get());
+ });
+
+ project.getTasks().named(JavaPlugin.JAR_TASK_NAME, Jar.class, jar -> {
+ //noinspection Convert2Lambda
+ jar.doFirst(new Action() {
+ @Override
+ public void execute(Task task) {
+ jar.manifest(manifest -> manifest.attributes(singletonMap(Attributes.Name.CLASS_PATH.toString(),
+ (project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME).minus(voltron.get()).minus(service.get()))
+ .getFiles().stream().map(File::getName).collect(Collectors.joining(" ")))));
+ }
+ });
+ });
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/plugins/osgids/GenerateDeclarativeServicesDescriptors.java b/build-logic/src/main/java/org/ehcache/build/plugins/osgids/GenerateDeclarativeServicesDescriptors.java
new file mode 100644
index 0000000000..c285f97550
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/plugins/osgids/GenerateDeclarativeServicesDescriptors.java
@@ -0,0 +1,113 @@
+package org.ehcache.build.plugins.osgids;
+
+import org.apache.felix.scrplugin.Options;
+import org.apache.felix.scrplugin.Project;
+import org.apache.felix.scrplugin.SCRDescriptorException;
+import org.apache.felix.scrplugin.SCRDescriptorFailureException;
+import org.apache.felix.scrplugin.SCRDescriptorGenerator;
+import org.apache.felix.scrplugin.Source;
+import org.gradle.api.DefaultTask;
+import org.gradle.api.GradleException;
+import org.gradle.api.file.ConfigurableFileCollection;
+import org.gradle.api.file.DirectoryProperty;
+import org.gradle.api.file.EmptyFileVisitor;
+import org.gradle.api.file.FileCollection;
+import org.gradle.api.file.FileVisitDetails;
+import org.gradle.api.tasks.Classpath;
+import org.gradle.api.tasks.InputFiles;
+import org.gradle.api.tasks.OutputDirectory;
+import org.gradle.api.tasks.TaskAction;
+
+import java.io.Closeable;
+import java.io.File;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Set;
+
+public abstract class GenerateDeclarativeServicesDescriptors extends DefaultTask {
+
+ @InputFiles
+ public abstract ConfigurableFileCollection getInputFiles();
+
+ @Classpath
+ public abstract ConfigurableFileCollection getClasspath();
+
+ @OutputDirectory
+ public abstract DirectoryProperty getOutputDirectory();
+
+ @TaskAction
+ public void generateDeclarativeServicesDescriptors() throws SCRDescriptorException, SCRDescriptorFailureException, IOException {
+ final Options scrOptions = createOptions();
+
+ try (GradleScrProject scrProject = new GradleScrProject(getInputFiles(), getClasspath())) {
+ final SCRDescriptorGenerator scrGenerator = new SCRDescriptorGenerator(new ScrLoggerAdapter(getLogger()));
+ scrGenerator.setOptions(scrOptions);
+ scrGenerator.setProject(scrProject);
+
+ scrGenerator.execute();
+ }
+ }
+
+ private Options createOptions() {
+ final Options scrOptions = new Options();
+ scrOptions.setOutputDirectory(getOutputDirectory().get().getAsFile());
+ scrOptions.setStrictMode(false);
+ scrOptions.setSpecVersion(null);
+
+ return scrOptions;
+ }
+
+ static class GradleScrProject extends Project implements Closeable {
+
+ private final URLClassLoader urlClassLoader;
+
+ GradleScrProject(FileCollection input, FileCollection classpath) {
+ Set classpathFiles = classpath.getFiles();
+ URL[] classpathUrls = classpathFiles.stream().map(f -> {
+ try {
+ return f.toURI().toURL();
+ } catch (MalformedURLException e) {
+ throw new GradleException("Malformed URL in classpath", e);
+ }
+ }).toArray(URL[]::new);
+ this.urlClassLoader = URLClassLoader.newInstance(classpathUrls, getClass().getClassLoader());
+ setClassLoader(urlClassLoader);
+ setDependencies(classpathFiles);
+ setSources(createScrSources(input));
+ }
+
+ @Override
+ public void close() throws IOException {
+ urlClassLoader.close();
+ }
+
+ private static Collection createScrSources(FileCollection input) {
+ Collection sources = new ArrayList<>();
+
+ input.getAsFileTree().matching(f -> f.include("**/*.class")).visit(new EmptyFileVisitor() {
+ @Override
+ public void visitFile(FileVisitDetails fileVisitDetails) {
+ String dotSeparated = String.join(".", fileVisitDetails.getRelativePath().getSegments());
+ String className = dotSeparated.substring(0, dotSeparated.length() - ".class".length());
+ File file = fileVisitDetails.getFile();
+ sources.add(new Source() {
+ @Override
+ public String getClassName() {
+ return className;
+ }
+
+ @Override
+ public File getFile() {
+ return file;
+ }
+ });
+ }
+ });
+ return sources;
+ }
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/plugins/osgids/OsgiDsPlugin.java b/build-logic/src/main/java/org/ehcache/build/plugins/osgids/OsgiDsPlugin.java
new file mode 100644
index 0000000000..a844ab0af9
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/plugins/osgids/OsgiDsPlugin.java
@@ -0,0 +1,22 @@
+package org.ehcache.build.plugins.osgids;
+
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+import org.gradle.api.tasks.SourceSetContainer;
+import org.gradle.api.tasks.TaskProvider;
+
+public class OsgiDsPlugin implements Plugin {
+ @Override
+ public void apply(Project project) {
+ project.getExtensions().configure(SourceSetContainer.class, sourceSets -> sourceSets.configureEach(sourceSet -> {
+ String taskName = sourceSet.getTaskName("generate", "DeclarativeServicesDescriptors");
+ TaskProvider generateTask = project.getTasks().register(taskName, GenerateDeclarativeServicesDescriptors.class, task -> {
+ task.setDescription("Generate OSGi Declarative Services XML descriptors for " + sourceSet.getName() + " classes");
+ task.getInputFiles().from(sourceSet.getOutput().getClassesDirs());
+ task.getClasspath().from(sourceSet.getRuntimeClasspath());
+ task.getOutputDirectory().set(project.getLayout().getBuildDirectory().dir("generated/resources/osgi-ds/" + sourceSet.getName()));
+ });
+ sourceSet.getOutput().getGeneratedSourcesDirs().plus(project.fileTree(generateTask));
+ }));
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/plugins/osgids/ScrLoggerAdapter.java b/build-logic/src/main/java/org/ehcache/build/plugins/osgids/ScrLoggerAdapter.java
new file mode 100644
index 0000000000..279b26714f
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/plugins/osgids/ScrLoggerAdapter.java
@@ -0,0 +1,149 @@
+/**
+ * Copyright (C) 2016 Elmar Schug
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.ehcache.build.plugins.osgids;
+
+import org.apache.felix.scrplugin.Log;
+import org.gradle.api.logging.Logger;
+
+
+final class ScrLoggerAdapter implements Log
+{
+ private final Logger logger;
+
+ ScrLoggerAdapter(Logger logger) {
+ this.logger = logger;
+ }
+
+ @Override
+ public boolean isDebugEnabled()
+ {
+ return logger.isDebugEnabled();
+ }
+
+ @Override
+ public void debug(String content)
+ {
+ logger.debug(content);
+ }
+
+ @Override
+ public void debug(String content, Throwable error)
+ {
+ logger.debug(content, error);
+ }
+
+ @Override
+ public void debug(Throwable error)
+ {
+ logger.debug(error.toString());
+ }
+
+ @Override
+ public boolean isInfoEnabled()
+ {
+ return logger.isInfoEnabled();
+ }
+
+ @Override
+ public void info(String content)
+ {
+ logger.info(content);
+ }
+
+ @Override
+ public void info(String content, Throwable error)
+ {
+ logger.info(content, error);
+ }
+
+ @Override
+ public void info(Throwable error)
+ {
+ logger.info(error.toString());
+ }
+
+ @Override
+ public boolean isWarnEnabled()
+ {
+ return logger.isWarnEnabled();
+ }
+
+ @Override
+ public void warn(String content)
+ {
+ logger.warn(content);
+ }
+
+ @Override
+ public void warn(String content, String location, int lineNumber)
+ {
+ logger.warn("{} [{},{}]", content, location, lineNumber);
+ }
+
+ @Override
+ public void warn(String content, String location, int lineNumber, int columNumber)
+ {
+ logger.warn("{} [{},{}:{}]", content, location, lineNumber, columNumber);
+ }
+
+ @Override
+ public void warn(String content, Throwable error)
+ {
+ logger.warn(content, error);
+ }
+
+ @Override
+ public void warn(Throwable error)
+ {
+ logger.warn(error.toString());
+ }
+
+ @Override
+ public boolean isErrorEnabled()
+ {
+ return logger.isErrorEnabled();
+ }
+
+ @Override
+ public void error(String content)
+ {
+ logger.error(content);
+ }
+
+ @Override
+ public void error(String content, String location, int lineNumber)
+ {
+ logger.error("{} [{},}{}]", content, location, lineNumber);
+ }
+
+ @Override
+ public void error(String content, String location, int lineNumber, int columnNumber)
+ {
+ logger.error("{} [{},{}:{}]", content, location, lineNumber, columnNumber);
+ }
+
+ @Override
+ public void error(String content, Throwable error)
+ {
+ logger.error(content, error);
+ }
+
+ @Override
+ public void error(Throwable error)
+ {
+ logger.error(error.toString());
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/util/OsgiManifestJarExtension.java b/build-logic/src/main/java/org/ehcache/build/util/OsgiManifestJarExtension.java
new file mode 100644
index 0000000000..d2e8556173
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/util/OsgiManifestJarExtension.java
@@ -0,0 +1,103 @@
+package org.ehcache.build.util;
+
+import aQute.bnd.osgi.Builder;
+import aQute.bnd.osgi.Jar;
+import aQute.service.reporter.Report;
+import org.gradle.api.Action;
+import org.gradle.api.GradleException;
+import org.gradle.api.Task;
+import org.gradle.api.file.ConfigurableFileCollection;
+import org.gradle.api.provider.MapProperty;
+import org.gradle.api.provider.Provider;
+import org.gradle.api.tasks.Classpath;
+import org.gradle.api.tasks.ClasspathNormalizer;
+import org.gradle.api.tasks.Input;
+import org.gradle.api.tasks.InputFiles;
+
+import java.io.File;
+import java.util.Map;
+import java.util.concurrent.Callable;
+
+public class OsgiManifestJarExtension {
+
+ private final org.gradle.api.tasks.bundling.Jar jarTask;
+ private final MapProperty instructions;
+ private final ConfigurableFileCollection classpath;
+ private final ConfigurableFileCollection sources;
+
+ public OsgiManifestJarExtension(org.gradle.api.tasks.bundling.Jar jarTask) {
+ this.jarTask = jarTask;
+ this.instructions = jarTask.getProject().getObjects().mapProperty(String.class, String.class);
+ this.classpath = jarTask.getProject().getObjects().fileCollection();
+ this.sources = jarTask.getProject().getObjects().fileCollection();
+
+ jarTask.getInputs().files(classpath).withNormalizer(ClasspathNormalizer.class).withPropertyName("osgi.classpath");
+ jarTask.getInputs().files(sources).withPropertyName("osgi.sources");
+ jarTask.getInputs().property("osgi.instructions", (Callable>) instructions::get);
+
+ jarTask.getExtensions().add("osgi", this);
+ jarTask.doLast("buildManifest", new BuildAction());
+ }
+
+ public void instruction(String key, String value) {
+ instructions.put(key, value);
+ }
+
+ public void instruction(String key, Provider value) {
+ instructions.put(key, value);
+ }
+
+ @Input @Classpath
+ public ConfigurableFileCollection getClasspath() {
+ return classpath;
+ }
+
+ @InputFiles
+ public ConfigurableFileCollection getSources() {
+ return sources;
+ }
+
+ @Input
+ public MapProperty getInstructions() {
+ return instructions;
+ }
+
+
+ private class BuildAction implements Action {
+ @Override
+ public void execute(Task t) {
+ try (Builder builder = new Builder()) {
+ File archiveFile = jarTask.getArchiveFile().get().getAsFile();
+
+ jarTask.getProject().sync(sync -> sync.from(archiveFile).into(jarTask.getTemporaryDir()));
+ File archiveCopyFile = new File(jarTask.getTemporaryDir(), archiveFile.getName());
+
+ Jar bundleJar = new Jar(archiveCopyFile);
+
+ builder.setJar(bundleJar);
+ builder.setClasspath(getClasspath().getFiles());
+ builder.setSourcepath(getSources().getFiles().toArray(new File[0]));
+ builder.addProperties(getInstructions().get());
+
+ try (Jar builtJar = builder.build()) {
+ builtJar.write(archiveFile);
+ }
+
+ if (!builder.isOk()) {
+ jarTask.getProject().delete(archiveFile);
+ builder.getErrors().forEach((String msg) -> {
+ Report.Location location = builder.getLocation(msg);
+ if ((location != null) && (location.file != null)) {
+ jarTask.getLogger().error("{}:{}: error: {}", location.file, location.line, msg);
+ } else {
+ jarTask.getLogger().error("error : {}", msg);
+ }
+ });
+ throw new GradleException("Bundle " + archiveFile.getName() + " has errors");
+ }
+ } catch (Exception e) {
+ throw new GradleException("Error building bundle", e);
+ }
+ }
+ }
+}
diff --git a/build-logic/src/main/java/org/ehcache/build/util/PluginUtils.java b/build-logic/src/main/java/org/ehcache/build/util/PluginUtils.java
new file mode 100644
index 0000000000..619b6413f3
--- /dev/null
+++ b/build-logic/src/main/java/org/ehcache/build/util/PluginUtils.java
@@ -0,0 +1,48 @@
+package org.ehcache.build.util;
+
+import org.gradle.api.Project;
+import org.gradle.api.artifacts.Configuration;
+
+import java.util.Locale;
+
+public class PluginUtils {
+
+ public static Configuration createBucket(Project project, String kind, String variant) {
+ if (variant == null) {
+ return createBucket(project, kind);
+ } else {
+ Configuration configuration = project.getConfigurations().maybeCreate(variant + capitalize(kind));
+ configuration.setDescription(capitalize(kind) + " dependencies for " + variant);
+ configuration.setVisible(false);
+ configuration.setCanBeResolved(false);
+ configuration.setCanBeConsumed(false);
+ return configuration;
+ }
+ }
+
+ public static Configuration createBucket(Project project, String kind) {
+ Configuration configuration = project.getConfigurations().maybeCreate(kind);
+ configuration.setDescription(capitalize(kind) + " dependencies");
+ configuration.setVisible(false);
+ configuration.setCanBeResolved(false);
+ configuration.setCanBeConsumed(false);
+ return configuration;
+ }
+
+ public static Configuration bucket(Project project, String kind, String variant) {
+ if (variant == null) {
+ return bucket(project, kind);
+ } else {
+ return project.getConfigurations().getByName(variant + capitalize(kind));
+ }
+ }
+
+ public static Configuration bucket(Project project, String kind) {
+ return project.getConfigurations().getByName(kind);
+ }
+
+ public static String capitalize(String word) {
+ return word.substring(0, 1).toUpperCase(Locale.ROOT) + word.substring(1);
+ }
+
+}
diff --git a/build.gradle b/build.gradle
index b094bd8526..b107ffb59d 100644
--- a/build.gradle
+++ b/build.gradle
@@ -13,16 +13,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import scripts.*
-import org.gradle.internal.jvm.Jvm
plugins {
// This adds tasks to auto close or release nexus staging repos
// see https://github.com/Codearte/gradle-nexus-staging-plugin/
- id 'io.codearte.nexus-staging' version '0.9.0'
- // This adds the ability to print a taskTree
- // ./gradlew ... taskTree
- id "com.dorongold.task-tree" version "1.3"
+ id 'io.codearte.nexus-staging'
+ //OWASP Security Vulnerability Detection
+ id 'org.owasp.dependencycheck'
+}
+
+wrapper {
+ distributionType = Wrapper.DistributionType.ALL
+}
+
+allprojects {
+ version = findProperty('overrideVersion') ?: ehcacheVersion
}
if (deployUrl.contains('nexus')) {
@@ -45,178 +50,25 @@ if (deployUrl.contains('nexus')) {
}
}
-project.nexusStaging {
+nexusStaging {
username = project.ext.deployUser
password = project.ext.deployPwd
logger.debug("Nexus Staging: Using login ${username} and url ${serverUrl}")
}
-// Disable automatic promotion for added safety
-closeAndReleaseRepository.enabled = false
-
-
-ext {
-
- baseVersion = findProperty('overrideVersion') ?: '3.5.0-SNAPSHOT'
-
- utils = new Utils(baseVersion, logger)
- isReleaseVersion = !baseVersion.endsWith('SNAPSHOT')
- isCloudbees = System.getenv('JENKINS_URL')?.contains('cloudbees')
+tasks.named('closeAndReleaseRepository') {
+ // Disable automatic promotion for added safety
+ enabled = false;
}
-
assert (JavaVersion.current().isJava8Compatible()) : 'The Ehcache 3 build requires Java 8+ to run'
-ext {
- testJava = Jvm.current()
-}
-
-if (hasProperty('testVM')) {
- testJava = Utils.jvmForHome(new File(testVM))
- println "Using Test JVM $testJava [Version: $testJava.javaVersion.majorVersion]"
-}
-
-subprojects {
- apply plugin: 'java'
- apply plugin: 'eclipse'
- apply plugin: 'checkstyle'
- apply plugin: 'findbugs'
- apply plugin: 'jacoco'
-
- group = 'org.ehcache.modules'
- version = baseVersion
-
- archivesBaseName = "ehcache-${project.name}"
-
- sourceCompatibility = 1.8
- targetCompatibility = 1.8
-
- repositories {
- if (project.hasProperty('mvnlocal')) {
- mavenLocal()
- }
- mavenCentral()
- maven { url "http://repo.terracotta.org/maven2" }
- }
-
- sourceSets {
- slowTest {
- java.srcDir 'src/slow-test/java'
- resources.srcDir 'src/slow-test/resources'
- compileClasspath += sourceSets.test.compileClasspath
- runtimeClasspath += sourceSets.test.runtimeClasspath
- }
- }
-
- dependencies {
- compileOnly "com.google.code.findbugs:annotations:$parent.findbugsVersion"
- testCompileOnly "com.google.code.findbugs:annotations:$parent.findbugsVersion"
- testCompile "junit:junit:$junitVersion", "org.assertj:assertj-core:$assertjVersion", "org.hamcrest:hamcrest-library:$hamcrestVersion"
- testCompile("org.mockito:mockito-core:$mockitoVersion") {
- exclude group:'org.hamcrest', module:'hamcrest-core'
- }
- testRuntime "org.slf4j:slf4j-simple:$parent.slf4jVersion"
- }
-
- jar {
- utils.fillManifest(manifest,"ehcache-${project.name}")
- from "$rootDir/LICENSE"
- }
-
- test {
- maxHeapSize = "1408m"
- systemProperty 'java.awt.headless', 'true'
- if (parent.isCloudbees) {
- systemProperty 'disable.concurrent.tests', 'true'
- }
- }
-
- task slowTest(type: Test) {
- testClassesDirs = sourceSets.slowTest.output.classesDirs
- classpath += sourceSets.slowTest.runtimeClasspath
-
- binResultsDir file("$buildDir/slow-tests-results/binary/$name")
- reports.junitXml.destination = file("$buildDir/slow-tests-results")
- reports.html.destination = file("$buildDir/reports/slow-tests")
- }
-
- task sourceJar(type: Jar, dependsOn: classes) {
- from sourceSets.main.allJava
- classifier = 'sources'
- }
-
- javadoc {
- title "$project.archivesBaseName $project.version API"
- exclude '**/internal/**'
- }
-
- task javadocJar(type: Jar, dependsOn: javadoc) {
- from javadoc.destinationDir
- classifier = 'javadoc'
- }
-
- artifacts {
- archives jar
- archives javadocJar
- archives sourceJar
- }
-
- checkstyle {
- configFile = file("$rootDir/config/checkstyle.xml")
- configProperties = ['projectDir':projectDir, 'rootDir':rootDir]
- toolVersion = checkstyleVersion
- }
-
- findbugs {
- ignoreFailures = false
- sourceSets = [sourceSets.main]
- toolVersion = findbugsVersion
- }
-
- findbugsMain {
- reports {
- // Switch from xml to html by changing these flags
- xml.enabled = true
- html.enabled = false
- }
- }
-
- jacoco {
- toolVersion = jacocoVersion
- }
-
- jacocoTestReport {
- reports {
- xml.enabled false
- csv.enabled false
- }
- }
-
- tasks.withType(AbstractCompile) {
- options.with {
- fork = true
- }
- }
- tasks.withType(Test) {
- executable = testJava.javaExecutable
- }
- tasks.withType(Javadoc) {
- options.addStringOption('Xdoclint:none', '-quiet')
- }
-
- configurations.all {
- resolutionStrategy {
- failOnVersionConflict()
- }
- }
+dependencyCheck {
+ failBuildOnCVSS = 0
+ suppressionFile = 'config/owasp-supressions.xml'
+ skipConfigurations += ['checkstyle', 'spotbugs', 'xjcClasspath']
+ skipProjects += [':docs', ':demos:00-NoCache', ':demos:01-CacheAside']
}
-
-allprojects {
- tasks.withType(JavaCompile) {
- options.encoding = 'UTF-8'
- options.compilerArgs += ['-Xlint:unchecked']
- }
- tasks.withType(Javadoc) {
- options.encoding = 'UTF-8'
- }
+tasks.register('check') {
+ dependsOn dependencyCheckAggregate
}
diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle
deleted file mode 100644
index 5e7622186e..0000000000
--- a/buildSrc/build.gradle
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-apply plugin: 'groovy'
-
-repositories { jcenter() }
-dependencies {
- compile gradleApi()
- compile localGroovy()
- compile 'com.github.jengelman.gradle.plugins:shadow:2.0.1'
-}
diff --git a/buildSrc/src/main/groovy/EhDeploy.groovy b/buildSrc/src/main/groovy/EhDeploy.groovy
deleted file mode 100644
index 417716d255..0000000000
--- a/buildSrc/src/main/groovy/EhDeploy.groovy
+++ /dev/null
@@ -1,92 +0,0 @@
-import org.gradle.api.Plugin
-import org.gradle.api.Project
-import org.gradle.api.artifacts.maven.Conf2ScopeMappingContainer
-import org.gradle.api.artifacts.maven.MavenDeployment
-import org.gradle.api.plugins.MavenPlugin
-import org.gradle.plugins.signing.Sign
-import scripts.Utils
-
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * EhDeploy
- */
-class EhDeploy implements Plugin {
- @Override
- void apply(Project project) {
-
- def utils = new Utils(project.baseVersion, project.logger)
-
- project.plugins.apply 'signing'
- project.plugins.apply 'maven'
- project.plugins.apply EhPomGenerate // for generating pom.*
-
- project.configurations {
- provided
- }
-
- project.sourceSets {
- main {
- compileClasspath += project.configurations.provided
- }
- test {
- compileClasspath += project.configurations.provided
- runtimeClasspath += project.configurations.provided
- }
- }
-
- project.signing {
- required { project.isReleaseVersion && project.gradle.taskGraph.hasTask("uploadArchives") }
- sign project.configurations.getByName('archives')
- }
-
- def artifactFiltering = {
- pom.scopeMappings.mappings.remove(project.configurations.testCompile)
- pom.scopeMappings.mappings.remove(project.configurations.testRuntime)
- pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.provided, Conf2ScopeMappingContainer.PROVIDED)
-
- utils.pomFiller(pom, project.subPomName, project.subPomDesc)
-
- }
-
- project.install {
- repositories.mavenInstaller artifactFiltering
- }
-
- project.uploadArchives {
- repositories {
- mavenDeployer ({
- beforeDeployment { MavenDeployment deployment -> project.signing.signPom(deployment)}
-
- if (project.isReleaseVersion) {
- repository(url: project.deployUrl) {
- authentication(userName: project.deployUser, password: project.deployPwd)
- }
- } else {
- repository(id: 'sonatype-nexus-snapshot', url: 'https://oss.sonatype.org/content/repositories/snapshots') {
- authentication(userName: project.sonatypeUser, password: project.sonatypePwd)
- }
- }
- } << artifactFiltering)
- }
- }
-
- def installer = project.install.repositories.mavenInstaller
- def deployer = project.uploadArchives.repositories.mavenDeployer
-
- }
-}
diff --git a/buildSrc/src/main/groovy/EhDistribute.groovy b/buildSrc/src/main/groovy/EhDistribute.groovy
deleted file mode 100644
index e1d1ba8f5d..0000000000
--- a/buildSrc/src/main/groovy/EhDistribute.groovy
+++ /dev/null
@@ -1,82 +0,0 @@
-import org.gradle.api.Plugin
-import org.gradle.api.Project
-import org.gradle.api.artifacts.ProjectDependency
-import scripts.Utils
-
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * EhDistribute
- */
-class EhDistribute implements Plugin {
-
- @Override
- void apply(Project project) {
- def utils = new Utils(project.baseVersion, project.logger)
- def hashsetOfProjects = project.configurations.compileOnly.dependencies.withType(ProjectDependency).dependencyProject
-
- project.plugins.apply 'java'
- project.plugins.apply 'maven'
- project.plugins.apply 'signing'
- project.plugins.apply 'com.github.johnrengelman.shadow'
- project.plugins.apply EhOsgi
- project.plugins.apply EhPomMangle
- project.plugins.apply EhDocs
- project.plugins.apply EhPomGenerate
-
- def OSGI_OVERRIDE_KEYS = ['Import-Package', 'Export-Package', 'Private-Package', 'Tool', 'Bnd-LastModified', 'Created-By', 'Require-Capability']
-
- project.configurations {
- shadowCompile
- shadowProvided
- }
-
- project.shadowJar {
- configurations = [[project.configurations.compileOnly]]
- baseName = "$project.archivesBaseName-shadow"
- classifier = ''
- dependencies {
- exclude({ rdep -> !['org.ehcache', 'org.terracotta'].any({ prefix -> rdep.moduleGroup.startsWith(prefix) })})
- }
- mergeServiceFiles()
- }
-
- project.jar {
- dependsOn project.shadowJar
- from(project.zipTree(project.shadowJar.archivePath.getPath())) {
- exclude 'META-INF/MANIFEST.MF', 'LICENSE', 'NOTICE'
- }
- // LICENSE is included in root gradle build
- from "$project.rootDir/NOTICE"
- duplicatesStrategy = 'exclude'
- }
-
-
- project.sourceJar {
- from hashsetOfProjects.flatten {
- it.sourceSets.main.allSource
- }
- }
-
-
- project.signing {
- required { project.isReleaseVersion && project.gradle.taskGraph.hasTask("uploadArchives") }
- sign project.configurations.getByName('archives')
- }
-
- }
-}
diff --git a/buildSrc/src/main/groovy/EhDocs.groovy b/buildSrc/src/main/groovy/EhDocs.groovy
deleted file mode 100644
index 0a900fb480..0000000000
--- a/buildSrc/src/main/groovy/EhDocs.groovy
+++ /dev/null
@@ -1,76 +0,0 @@
-import org.gradle.api.Plugin
-import org.gradle.api.Project
-import org.gradle.api.artifacts.ProjectDependency
-import org.gradle.api.tasks.bundling.Jar
-import org.gradle.api.tasks.bundling.Zip
-import org.gradle.api.tasks.javadoc.Javadoc
-import scripts.Utils
-
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * EhDocs
- * Handle javadocs and API/SPI/asciidoc
- */
-class EhDocs implements Plugin {
-
- @Override
- void apply(Project project) {
- def utils = new Utils(project.baseVersion, project.logger)
- def hashsetOfProjects = project.configurations.compile.dependencies.withType(ProjectDependency).dependencyProject +
- project.configurations.compileOnly.dependencies.withType(ProjectDependency).dependencyProject
-
- project.javadoc {
- title "$project.archivesBaseName $project.version API"
- source hashsetOfProjects.javadoc.source
- classpath = project.files(hashsetOfProjects.javadoc.classpath)
- project.ext.properties.javadocExclude?.tokenize(',').each {
- exclude it.trim()
- }
- }
-
- if (!project.hasProperty('spiJavadocDisable')) {
-
- project.task('spiJavadoc', type: Javadoc) {
- title "$project.archivesBaseName $project.version API & SPI"
- source hashsetOfProjects.javadoc.source
- classpath = project.files(hashsetOfProjects.javadoc.classpath)
- exclude '**/internal/**'
- destinationDir = project.file("$project.docsDir/spi-javadoc")
- }
-
- project.task('spiJavadocJar', type: Jar, dependsOn: 'spiJavadoc') {
- classifier = 'spi-javadoc'
- from project.tasks.getByPath('spiJavadoc').destinationDir
- }
-
- }
-
- project.task('asciidocZip', type: Zip, dependsOn: ':docs:asciidoctor') {
- classifier = 'docs'
- from project.tasks.getByPath(':docs:asciidoctor').outputDir
- }
-
- project.artifacts {
- archives project.asciidocZip
- if (!project.hasProperty('spiJavadocDisable')) {
- archives project.spiJavadocJar
- }
- }
-
- }
-}
diff --git a/buildSrc/src/main/groovy/EhOsgi.groovy b/buildSrc/src/main/groovy/EhOsgi.groovy
deleted file mode 100644
index 6b16d01d1c..0000000000
--- a/buildSrc/src/main/groovy/EhOsgi.groovy
+++ /dev/null
@@ -1,95 +0,0 @@
-import com.github.jengelman.gradle.plugins.shadow.tasks.DefaultInheritManifest
-import groovy.json.JsonSlurper
-import org.gradle.api.Plugin
-import org.gradle.api.Project
-import org.gradle.api.artifacts.ProjectDependency
-import org.gradle.api.internal.file.FileResolver
-import org.gradle.api.plugins.osgi.OsgiPluginConvention
-import scripts.Utils
-
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * EhOsgi
- * OSGI additions to the manifest controlled by osgi key in gradle.properties
- * This plugin supports shadowJar if available
- */
-class EhOsgi implements Plugin {
-
- @Override
- void apply(Project project) {
- def utils = new Utils(project.baseVersion, project.logger)
- def hashsetOfProjects = project.configurations.compile.dependencies.withType(ProjectDependency).dependencyProject +
- project.configurations.compileOnly.dependencies.withType(ProjectDependency).dependencyProject
- hashsetOfProjects += project //self also, in case the invoking project defines osgi properties
-
- project.plugins.apply 'java'
- project.plugins.apply 'maven'
- project.plugins.apply 'signing'
-
- def OSGI_OVERRIDE_KEYS = ['Import-Package', 'Export-Package', 'Private-Package', 'Tool', 'Bnd-LastModified', 'Created-By', 'Require-Capability']
-
- project.jar.doFirst {
- manifest = new DefaultInheritManifest(getServices().get(FileResolver.class))
- if (project.hasProperty('shadowJar')) {
- manifest.inheritFrom "$project.buildDir/tmp/shadowJar/MANIFEST.MF"
- }
- utils.fillManifest(manifest, project.archivesBaseName)
-
- def osgiConvention = new OsgiPluginConvention(project)
- def osgiManifest = osgiConvention.osgiManifest {
-
- if (project.hasProperty('shadowJar')) {
- classesDir = project.shadowJar.archivePath
- classpath = project.files(project.configurations.shadowCompile, project.configurations.shadowProvided)
- } else {
- classesDir = project.sourceSets.main.java.outputDir
- classpath = project.sourceSets.main.compileClasspath
- }
-
- // Metadata
- instructionReplace 'Bundle-Name', "$project.archivesBaseName 3"
- instructionReplace 'Bundle-SymbolicName', "org.ehcache.$project.archivesBaseName"
- instruction 'Bundle-Description', 'Ehcache is an open-source caching library, compliant with the JSR-107 standard.'
- instruction 'Bundle-DocURL', 'http://ehcache.org'
- instruction 'Bundle-License', 'LICENSE'
- instruction 'Bundle-Vendor', 'Terracotta Inc., a wholly-owned subsidiary of Software AG USA, Inc.'
- instruction 'Bundle-RequiredExecutionEnvironment', 'JavaSE-1.8'
-
- hashsetOfProjects.findAll({ p -> p.ext.properties.osgi}).each{ prop ->
- new JsonSlurper().parseText(prop.ext.properties.osgi).each {
- project.logger.info "OSGI: ${it.key}: ${it.value}"
- instruction(it.key, *it.value)
- }
- }
-
- instruction 'Export-Package', '*'
- instruction 'Import-Package', '*'
- }
- manifest.inheritFrom(osgiManifest) {
- eachEntry {
- if (it.getKey().startsWith('Bundle') || OSGI_OVERRIDE_KEYS.contains(it.getKey())) {
- it.setValue(it.getMergeValue())
- } else {
- it.setValue(it.getBaseValue())
- }
- }
- }
- }
-
- }
-}
diff --git a/buildSrc/src/main/groovy/EhPomGenerate.groovy b/buildSrc/src/main/groovy/EhPomGenerate.groovy
deleted file mode 100644
index 81761ffe63..0000000000
--- a/buildSrc/src/main/groovy/EhPomGenerate.groovy
+++ /dev/null
@@ -1,125 +0,0 @@
-
-
-import org.gradle.api.Plugin
-import org.gradle.api.Project
-import org.gradle.api.publish.maven.MavenPublication
-import scripts.Utils
-
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * EhPomGenerate:
- * Creates pom.xml and pom.properties to be included in produced jars
- * Mimics standard maven jar layout.
- */
-class EhPomGenerate implements Plugin {
-
- @Override
- void apply(Project project) {
-
- def utils = new Utils(project.baseVersion, project.logger)
-
- project.plugins.apply 'maven-publish' // for generating pom.*
-
- def mavenTempResourcePath = "${project.buildDir}/mvn/META-INF/maven/${project.group}/${project.archivesBaseName}"
-
- project.model {
- // Write pom to temp location to be picked up later,
- // generatePomFileForMavenJavaPublication task comes from maven-publish.
- tasks.generatePomFileForMavenJavaPublication {
- destination = project.file("$mavenTempResourcePath/pom.xml")
- }
- }
-
- // Configure pom generation
- project.publishing {
- publications {
- mavenJava(MavenPublication) {
- artifactId project.archivesBaseName
- from project.components.java
- utils.pomFiller(pom, project.subPomName, project.subPomDesc)
- if (project.hasProperty('shadowJar')) {
- pom.withXml {
- if (asNode().dependencies.isEmpty()) {
- asNode().appendNode('dependencies')
- }
- project.configurations.shadowCompile.dependencies.each {
- def dep = asNode().dependencies[0].appendNode('dependency')
- dep.appendNode('groupId', it.group)
- dep.appendNode('artifactId', it.name)
- dep.appendNode('version', it.version)
- dep.appendNode('scope', 'compile')
- }
- project.configurations.pomOnlyCompile.dependencies.each {
- def dep = asNode().dependencies[0].appendNode('dependency')
- dep.appendNode('groupId', it.group)
- dep.appendNode('artifactId', it.name)
- dep.appendNode('version', it.version)
- dep.appendNode('scope', 'compile')
- }
- project.configurations.shadowProvided.dependencies.each {
- def dep = asNode().dependencies[0].appendNode('dependency')
- dep.appendNode('groupId', it.group)
- dep.appendNode('artifactId', it.name)
- dep.appendNode('version', it.version)
- dep.appendNode('scope', 'provided')
- }
- project.configurations.pomOnlyProvided.dependencies.each {
- def dep = asNode().dependencies[0].appendNode('dependency')
- dep.appendNode('groupId', it.group)
- dep.appendNode('artifactId', it.name)
- dep.appendNode('version', it.version)
- dep.appendNode('scope', 'provided')
- }
- }
- }
- }
- }
- }
-
- // Write pom.properties to temp location
- project.task('writeMavenProperties') {
- doLast {
- project.file(mavenTempResourcePath).mkdirs()
- def propertyFile = project.file "$mavenTempResourcePath/pom.properties"
- def props = new Properties()
- props.setProperty('version', project.version)
- props.setProperty('groupId', project.group)
- props.setProperty('artifactId', project.archivesBaseName)
- props.store propertyFile.newWriter(), null
- }
- }
-
- if (utils.isReleaseVersion) {
- //ensure that we generate maven stuff and delay resolution as the first task is created dynamically
- project.processResources.dependsOn {
- project.tasks.findAll { task ->
- task.name == 'generatePomFileForMavenJavaPublication' || task.name == 'writeMavenProperties'
- }
- }
-
- // Pick up pom.xml and pom.properties from temp location
- project.sourceSets {
- main {
- resources {
- srcDir "${project.buildDir}/mvn"
- }
- }
- }
- }
- }
-}
diff --git a/buildSrc/src/main/groovy/EhPomMangle.groovy b/buildSrc/src/main/groovy/EhPomMangle.groovy
deleted file mode 100644
index 271271ab3d..0000000000
--- a/buildSrc/src/main/groovy/EhPomMangle.groovy
+++ /dev/null
@@ -1,97 +0,0 @@
-import org.gradle.api.Plugin
-import org.gradle.api.Project
-import org.gradle.api.artifacts.ProjectDependency
-import org.gradle.api.artifacts.maven.Conf2ScopeMappingContainer
-import org.gradle.api.artifacts.maven.MavenDeployment
-import org.gradle.api.plugins.MavenPlugin
-import scripts.Utils
-
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * EhPomMangle
- * Removes all implicit dependencies from the pom
- * and adds only what is specified in (from shadowJar)
- *
- * project.configurations.shadowCompile (as compile)
- * project.configurations.shadowProvided (as provided)
- *
- * as well as (these do not affect shadow)
- *
- * project.configurations.pomOnlyCompile
- * project.configurations.pomOnlyProvided
- *
- * Also defines the pom defaults (name, desc, etc) unless overridden in gradle.properties
- * Also sets up upload repositories
- */
-class EhPomMangle implements Plugin {
-
- @Override
- void apply(Project project) {
- def utils = new Utils(project.baseVersion, project.logger)
-
- project.plugins.apply 'java'
- project.plugins.apply 'maven'
- project.plugins.apply 'signing'
-
- project.configurations {
- shadowCompile
- shadowProvided
- pomOnlyCompile
- pomOnlyProvided
- }
-
- def artifactFiltering = {
- pom.scopeMappings.mappings.remove(project.configurations.compile)
- pom.scopeMappings.mappings.remove(project.configurations.runtime)
- pom.scopeMappings.mappings.remove(project.configurations.testCompile)
- pom.scopeMappings.mappings.remove(project.configurations.testRuntime)
- pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.shadowCompile, Conf2ScopeMappingContainer.COMPILE)
- pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.shadowProvided, Conf2ScopeMappingContainer.PROVIDED)
-
- //Anything extra to add to pom that isn't in the shadowed jar or compilation
- pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.pomOnlyCompile, Conf2ScopeMappingContainer.COMPILE)
- pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.pomOnlyProvided, Conf2ScopeMappingContainer.PROVIDED)
-
- utils.pomFiller(pom, project.subPomName, project.subPomDesc)
-
- }
-
- project.install {
- repositories.mavenInstaller artifactFiltering
- }
-
- project.uploadArchives {
- repositories {
- mavenDeployer ({
- beforeDeployment { MavenDeployment deployment -> project.signing.signPom(deployment)}
-
- if (project.isReleaseVersion) {
- repository(url: project.deployUrl) {
- authentication(userName: project.deployUser, password: project.deployPwd)
- }
- } else {
- repository(id: 'sonatype-nexus-snapshot', url: 'https://oss.sonatype.org/content/repositories/snapshots') {
- authentication(userName: project.sonatypeUser, password: project.sonatypePwd)
- }
- }
- } << artifactFiltering)
- }
- }
-
- }
-}
diff --git a/buildSrc/src/main/groovy/scripts/Utils.groovy b/buildSrc/src/main/groovy/scripts/Utils.groovy
deleted file mode 100644
index b674c95236..0000000000
--- a/buildSrc/src/main/groovy/scripts/Utils.groovy
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package scripts
-
-import org.gradle.api.JavaVersion
-import org.gradle.internal.jvm.Jvm
-
-class Utils {
-
- String version
- String revision
- boolean isReleaseVersion
-
- Utils(version, logger) {
- this.version = version
- this.isReleaseVersion = !version.endsWith('SNAPSHOT')
- def tmp = System.getenv("GIT_COMMIT")
- if(tmp != null) {
- revision = tmp
- } else {
- logger.debug('Revision not found in system properties, trying command line')
- def cmd = 'git rev-parse HEAD'
- try {
- def proc = cmd.execute()
- revision = proc.text.trim()
- } catch (IOException ioex) {
- revision = 'Unknown'
- }
- }
- logger.debug(revision)
- }
-
- def fillManifest(manifest, title) {
- manifest.attributes(
- 'provider': 'gradle',
- 'Implementation-Title': title,
- 'Implementation-Version': "$version $revision",
- 'Built-By': System.getProperty('user.name'),
- 'Built-JDK': System.getProperty('java.version'))
- if (isReleaseVersion) {
- manifest.attributes('Build-Time': new Date().format("yyyy-MM-dd'T'HH:mm:ssZ"))
- }
- }
-
- def pomFiller(pom, nameVar, descriptionVar) {
- pom.withXml {
- asNode().version[0] + {
- name nameVar
- description descriptionVar
- url 'http://ehcache.org'
- organization {
- name 'Terracotta Inc., a wholly-owned subsidiary of Software AG USA, Inc.'
- url 'http://terracotta.org'
- }
- issueManagement {
- system 'Github'
- url 'https://github.com/ehcache/ehcache3/issues'
- }
- scm {
- url 'https://github.com/ehcache/ehcache3'
- connection 'scm:git:https://github.com/ehcache/ehcache3.git'
- developerConnection 'scm:git:git@github.com:ehcache/ehcache3.git'
- }
- licenses {
- license {
- name 'The Apache Software License, Version 2.0'
- url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
- distribution 'repo'
- }
- }
- developers {
- developer {
- name 'Terracotta Engineers'
- email 'tc-oss@softwareag.com'
- organization 'Terracotta Inc., a wholly-owned subsidiary of Software AG USA, Inc.'
- organizationUrl 'http://ehcache.org'
- }
- }
- }
- }
- }
-
- static def jvmForHome(File home) {
- def java = Jvm.forHome(home).javaExecutable
- def versionCommand = "$java -version".execute();
- def version = JavaVersion.toVersion((versionCommand.err.text =~ /\w+ version "(.+)"/)[0][1])
- return Jvm.discovered(home, version)
- }
-}
diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle
deleted file mode 100644
index dd5e7e5e94..0000000000
--- a/clustered/client/build.gradle
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-apply plugin: EhDeploy
-
-dependencies {
- compileOnly project(':api')
- compileOnly project(':xml')
- compile project(':clustered:common'), "org.slf4j:slf4j-api:$slf4jVersion"
- provided "org.terracotta:entity-client-api:$terracottaApisVersion"
- provided "org.terracotta:runnel:$terracottaPlatformVersion"
-
- testCompile project(':api')
- testCompile project(':xml')
- testCompile project(':transactions')
- testCompile(project(':clustered:server')) {
- exclude group: 'org.terracotta.internal', module: 'tc-config-parser'
- }
- testCompile "org.terracotta:entity-test-lib:$terracottaPassthroughTestingVersion"
- testCompile "org.terracotta:passthrough-server:$terracottaPassthroughTestingVersion"
-}
-
-test {
- if (testJava.javaVersion.isJava9Compatible()) {
- jvmArgs += ['--add-modules', 'java.xml.bind']
- }
-}
-
-tasks.withType(JavaCompile) {
- options.compilerArgs += ['-Werror']
-}
diff --git a/clustered/client/gradle.properties b/clustered/client/gradle.properties
deleted file mode 100644
index 56c6dfbf5d..0000000000
--- a/clustered/client/gradle.properties
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-# Copyright Terracotta, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-subPomName = Ehcache 3 Client Side Clustering module
-subPomDesc = The Client Side Clustering module of Ehcache 3
-osgi = {"Export-Package" : ["!org.ehcache.clustered.client.internal.*", "!sun.misc"],\
- "Import-Package" : ["!org.ehcache.clustered.client*", "!sun.misc*"]}
diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteringServiceConfiguration.java b/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteringServiceConfiguration.java
deleted file mode 100644
index 0499bba666..0000000000
--- a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteringServiceConfiguration.java
+++ /dev/null
@@ -1,218 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.ehcache.clustered.client.config;
-
-import org.ehcache.CacheManager;
-import org.ehcache.PersistentCacheManager;
-import org.ehcache.clustered.client.service.ClusteringService;
-import org.ehcache.config.builders.CacheManagerBuilder;
-import org.ehcache.config.builders.CacheManagerConfiguration;
-import org.ehcache.core.HumanReadable;
-import org.ehcache.spi.service.ServiceCreationConfiguration;
-
-import java.net.URI;
-import java.time.Duration;
-import java.util.Map;
-import java.util.Objects;
-
-import org.ehcache.clustered.common.ServerSideConfiguration;
-
-import static org.ehcache.clustered.client.config.Timeouts.DEFAULT_OPERATION_TIMEOUT;
-
-/**
- * Specifies the configuration for a {@link ClusteringService}.
- */
-// TODO: Should this accept/hold a *list* of URIs?
-public class ClusteringServiceConfiguration
- implements ServiceCreationConfiguration,
- CacheManagerConfiguration,
- HumanReadable {
-
- private final URI clusterUri;
- private final boolean autoCreate;
- private final ServerSideConfiguration serverConfiguration;
- private final Timeouts timeouts;
-
- /**
- * Creates a {@code ClusteringServiceConfiguration} from the properties provided.
- *
- * @param clusterUri the non-{@code null} URI identifying the cluster server
- *
- * @throws NullPointerException if {@code clusterUri} is {@code null}
- * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations
- */
- public ClusteringServiceConfiguration(URI clusterUri) {
- this(clusterUri, Timeouts.DEFAULT);
- }
-
- /**
- * Creates a {@code ClusteringServiceConfiguration} from the properties provided.
- *
- * @param clusterUri the non-{@code null} URI identifying the cluster server
- * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations
- *
- * @throws NullPointerException if {@code clusterUri} is {@code null}
- * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations
- */
- public ClusteringServiceConfiguration(URI clusterUri, Timeouts timeouts) {
- this(clusterUri, timeouts, null);
- }
-
- /**
- * Creates a {@code ClusteringServiceConfiguration} from the properties provided.
- *
- * @param clusterUri the non-{@code null} URI identifying the cluster server
- * @param serverConfig the server side entity configuration required
- *
- * @throws NullPointerException if {@code clusterUri} is {@code null}
- * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations
- */
- public ClusteringServiceConfiguration(URI clusterUri, ServerSideConfiguration serverConfig) {
- this(clusterUri, Timeouts.DEFAULT, serverConfig);
- }
-
- /**
- * Creates a {@code ClusteringServiceConfiguration} from the properties provided.
- *
- * @param clusterUri the non-{@code null} URI identifying the cluster server
- * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations
- * @param serverConfig the server side entity configuration required
- *
- * @throws NullPointerException if {@code clusterUri} is {@code null}
- * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations
- */
- public ClusteringServiceConfiguration(URI clusterUri, Timeouts timeouts, ServerSideConfiguration serverConfig) {
- this(clusterUri, timeouts, false, serverConfig);
- }
-
- /**
- * Creates a {@code ClusteringServiceConfiguration} from the properties provided.
- *
- * @param clusterUri the non-{@code null} URI identifying the cluster server
- * @param autoCreate {@code true} if server components should be auto created
- * @param serverConfig the server side entity configuration required
- *
- * @throws NullPointerException if {@code clusterUri} is {@code null}
- * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations
- */
- public ClusteringServiceConfiguration(URI clusterUri, boolean autoCreate, ServerSideConfiguration serverConfig) {
- this(clusterUri, Timeouts.DEFAULT, autoCreate, serverConfig);
- }
-
- /**
- * Creates a {@code ClusteringServiceConfiguration} from the properties provided.
- *
- * @param clusterUri the non-{@code null} URI identifying the cluster server
- * @param timeouts the {@link Timeouts} specifying the time limit for clustered cache operations
- * @param autoCreate {@code true} if server components should be auto created
- * @param serverConfig the server side entity configuration required
- *
- * @throws NullPointerException if {@code clusterUri} is {@code null}
- * @throws IllegalArgumentException if {@code clusterUri} is not URI valid for cluster operations
- */
- public ClusteringServiceConfiguration(URI clusterUri, Timeouts timeouts, boolean autoCreate, ServerSideConfiguration serverConfig) {
- this.clusterUri = Objects.requireNonNull(clusterUri, "Cluster URI cannot be null");
- this.autoCreate = autoCreate;
- this.serverConfiguration = serverConfig;
- this.timeouts = Objects.requireNonNull(timeouts, "Operation timeouts cannot be null");
- }
-
- protected ClusteringServiceConfiguration(ClusteringServiceConfiguration baseConfig) {
- Objects.requireNonNull(baseConfig, "Base configuration cannot be null");
- this.clusterUri = baseConfig.getClusterUri();
- this.timeouts = baseConfig.getTimeouts();
- this.autoCreate = baseConfig.isAutoCreate();
- this.serverConfiguration = baseConfig.getServerConfiguration();
- }
-
- /**
- * The {@code URI} of the cluster that will be connected to.
- *
- * @return the cluster {@code URI}
- */
- public URI getClusterUri() {
- return clusterUri;
- }
-
- /**
- * Returns {@code true} is server side components should be automatically created.
- *
- * @return {@code true} is auto-create is enabled
- */
- public boolean isAutoCreate() {
- return autoCreate;
- }
-
- /**
- * The default server resource to use for caches and pools, or {@code null} if one is not defined.
- *
- * @return the default server resource
- */
- public ServerSideConfiguration getServerConfiguration() {
- return serverConfiguration;
- }
-
- /**
- * The timeouts for all cache operations
- *
- * @return the cache timeouts
- */
- public Timeouts getTimeouts() {
- return timeouts;
- }
-
- /**
- * The timeout for cache read operations.
- *
- * @return the cache read operation timeout
- *
- * @deprecated Use {@link #getTimeouts()}
- */
- @Deprecated
- public Duration getReadOperationTimeout() {
- return timeouts.getReadOperationTimeout();
- }
-
- @Override
- public Class getServiceType() {
- return ClusteringService.class;
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public CacheManagerBuilder builder(CacheManagerBuilder extends CacheManager> other) {
- return (CacheManagerBuilder) other.using(this); // unchecked
- }
-
- @Override
- public String readableString() {
- return this.getClass().getName() + ":\n " +
- "clusterUri: " + getClusterUri()+ "\n " +
- "timeouts: " + getTimeouts()+ "\n " +
- "autoCreate: " + isAutoCreate() + "\n " +
- "defaultServerResource: " + serverConfiguration.getDefaultServerResource() + "\n " +
- readablePoolsString();
- }
-
- private String readablePoolsString() {
- StringBuilder pools = new StringBuilder("resourcePools:\n");
- for(Map.Entry entry : serverConfiguration.getResourcePools().entrySet()) {
- pools.append(" " + entry.getKey() + ": " + entry.getValue() + "\n");
- }
- return pools.toString();
- }
-}
diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteringServiceConfigurationBuilder.java b/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteringServiceConfigurationBuilder.java
deleted file mode 100644
index 0374f5943b..0000000000
--- a/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteringServiceConfigurationBuilder.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.ehcache.clustered.client.config.builders;
-
-import java.net.URI;
-
-import org.ehcache.clustered.client.config.ClusteringServiceConfiguration;
-
-import java.time.Duration;
-import java.time.temporal.ChronoUnit;
-import java.util.Objects;
-import java.util.concurrent.TimeUnit;
-import org.ehcache.clustered.client.config.Timeouts;
-import org.ehcache.clustered.common.ServerSideConfiguration;
-import org.ehcache.config.Builder;
-
-/**
- * A builder of ClusteringService configurations.
- */
-public final class ClusteringServiceConfigurationBuilder implements Builder {
-
- private final URI clusterUri;
- private final Timeouts timeouts;
- private final boolean autoCreate;
-
- /**
- * Creates a new builder connecting to the given cluster.
- *
- * @param clusterUri cluster URI
- *
- * @return a clustering service configuration builder
- */
- public static ClusteringServiceConfigurationBuilder cluster(URI clusterUri) {
- return new ClusteringServiceConfigurationBuilder(clusterUri, TimeoutsBuilder.timeouts().build(), false);
- }
-
- private ClusteringServiceConfigurationBuilder(URI clusterUri, Timeouts timeouts, boolean autoCreate) {
- this.clusterUri = Objects.requireNonNull(clusterUri, "Cluster URI can't be null");
- this.timeouts = Objects.requireNonNull(timeouts, "Timeouts can't be null");
- this.autoCreate = autoCreate;
- }
-
- /**
- * Support connection to an existing entity or create if the entity if absent.
- *
- * @return a clustering service configuration builder
- */
- public ServerSideConfigurationBuilder autoCreate() {
- return new ServerSideConfigurationBuilder(new ClusteringServiceConfigurationBuilder(this.clusterUri, this.timeouts, true));
- }
-
- /**
- * Only support connection to an existing entity.
- *
- * @return a clustering service configuration builder
- */
- public ServerSideConfigurationBuilder expecting() {
- return new ServerSideConfigurationBuilder(new ClusteringServiceConfigurationBuilder(this.clusterUri, this.timeouts, false));
- }
-
- /**
- * Adds timeouts.
- * Read operations which time out return a result comparable to a cache miss.
- * Write operations which time out won't do anything.
- * Lifecycle operations which time out will fail with exception
- *
- * @param timeouts the amount of time permitted for all operations
- *
- * @return a clustering service configuration builder
- *
- * @throws NullPointerException if {@code timeouts} is {@code null}
- */
- public ClusteringServiceConfigurationBuilder timeouts(Timeouts timeouts) {
- return new ClusteringServiceConfigurationBuilder(this.clusterUri, timeouts, this.autoCreate);
- }
-
- /**
- * Adds timeouts.
- * Read operations which time out return a result comparable to a cache miss.
- * Write operations which time out won't do anything.
- * Lifecycle operations which time out will fail with exception
- *
- * @param timeoutsBuilder the builder for amount of time permitted for all operations
- *
- * @return a clustering service configuration builder
- *
- * @throws NullPointerException if {@code timeouts} is {@code null}
- */
- public ClusteringServiceConfigurationBuilder timeouts(Builder extends Timeouts> timeoutsBuilder) {
- return new ClusteringServiceConfigurationBuilder(this.clusterUri, timeoutsBuilder.build(), this.autoCreate);
- }
-
- /**
- * Adds a read operation timeout. Read operations which time out return a result comparable to
- * a cache miss.
- *
- * @param duration the amount of time permitted for read operations
- * @param unit the time units for {@code duration}
- *
- * @return a clustering service configuration builder
- *
- * @throws NullPointerException if {@code unit} is {@code null}
- * @throws IllegalArgumentException if {@code amount} is negative
- *
- * @deprecated Use {@link #timeouts(Timeouts)}. Note that calling this method will override any timeouts previously set
- * by setting the read operation timeout to the specified value and everything else to its default.
- */
- @Deprecated
- public ClusteringServiceConfigurationBuilder readOperationTimeout(long duration, TimeUnit unit) {
- Duration readTimeout = Duration.of(duration, toChronoUnit(unit));
- return timeouts(TimeoutsBuilder.timeouts().read(readTimeout).build());
- }
-
- @Override
- public ClusteringServiceConfiguration build() {
- return new ClusteringServiceConfiguration(clusterUri, timeouts, autoCreate, null);
- }
-
- /**
- * Internal method to build a new {@link ClusteringServiceConfiguration} from the {@link ServerSideConfigurationBuilder}.
- *
- * @param serverSideConfiguration the {@code ServerSideConfiguration} to use
- *
- * @return a new {@code ClusteringServiceConfiguration} instance built from {@code this}
- * {@code ClusteringServiceConfigurationBuilder} and the {@code serverSideConfiguration} provided
- */
- ClusteringServiceConfiguration build(ServerSideConfiguration serverSideConfiguration) {
- return new ClusteringServiceConfiguration(clusterUri, timeouts, autoCreate, serverSideConfiguration);
- }
-
- private static ChronoUnit toChronoUnit(TimeUnit unit) {
- if(unit == null) {
- return null;
- }
- switch (unit) {
- case NANOSECONDS: return ChronoUnit.NANOS;
- case MICROSECONDS: return ChronoUnit.MICROS;
- case MILLISECONDS: return ChronoUnit.MILLIS;
- case SECONDS: return ChronoUnit.SECONDS;
- case MINUTES: return ChronoUnit.MINUTES;
- case HOURS: return ChronoUnit.HOURS;
- case DAYS: return ChronoUnit.DAYS;
- default: throw new AssertionError("Unknown unit: " + unit);
- }
- }
-
-}
diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredResourceConfigurationParser.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredResourceConfigurationParser.java
deleted file mode 100644
index 11903da55c..0000000000
--- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteredResourceConfigurationParser.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.ehcache.clustered.client.internal.config.xml;
-
-import org.ehcache.clustered.client.internal.config.DedicatedClusteredResourcePoolImpl;
-import org.ehcache.clustered.client.internal.config.SharedClusteredResourcePoolImpl;
-import org.ehcache.clustered.client.internal.config.ClusteredResourcePoolImpl;
-import org.ehcache.config.ResourcePool;
-import org.ehcache.config.units.MemoryUnit;
-import org.ehcache.xml.CacheResourceConfigurationParser;
-import org.ehcache.xml.exceptions.XmlConfigurationException;
-import org.w3c.dom.Attr;
-import org.w3c.dom.DOMException;
-import org.w3c.dom.Element;
-
-import java.io.IOException;
-import java.net.URI;
-
-import javax.xml.transform.Source;
-import javax.xml.transform.stream.StreamSource;
-
-import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.NAMESPACE;
-import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.XML_SCHEMA;
-
-/**
- * Provides a parser for the {@code /config/cache/resources} extension elements.
- */
-public class ClusteredResourceConfigurationParser implements CacheResourceConfigurationParser {
- @Override
- public Source getXmlSchema() throws IOException {
- return new StreamSource(XML_SCHEMA.openStream());
- }
-
- @Override
- public URI getNamespace() {
- return NAMESPACE;
- }
-
- protected ResourcePool parseResourceConfig(final Element fragment) {
- final String elementName = fragment.getLocalName();
- if ("clustered-shared".equals(elementName)) {
- final String sharing = fragment.getAttribute("sharing");
- return new SharedClusteredResourcePoolImpl(sharing);
-
- } else if ("clustered-dedicated".equals(elementName)) {
- // 'from' attribute is optional on 'clustered-dedicated' element
- final Attr fromAttr = fragment.getAttributeNode("from");
- final String from = (fromAttr == null ? null : fromAttr.getValue());
-
- final String unitValue = fragment.getAttribute("unit").toUpperCase();
- final MemoryUnit sizeUnits;
- try {
- sizeUnits = MemoryUnit.valueOf(unitValue);
- } catch (IllegalArgumentException e) {
- throw new XmlConfigurationException(String.format("XML configuration element <%s> 'unit' attribute '%s' is not valid", elementName, unitValue), e);
- }
-
- final String sizeValue;
- try {
- sizeValue = fragment.getFirstChild().getNodeValue();
- } catch (DOMException e) {
- throw new XmlConfigurationException(String.format("XML configuration element <%s> value is not valid", elementName), e);
- }
- final long size;
- try {
- size = Long.parseLong(sizeValue);
- } catch (NumberFormatException e) {
- throw new XmlConfigurationException(String.format("XML configuration element <%s> value '%s' is not valid", elementName, sizeValue), e);
- }
-
- return new DedicatedClusteredResourcePoolImpl(from, size, sizeUnits);
- } else if("clustered".equals(elementName)) {
- return new ClusteredResourcePoolImpl();
- }
- return null;
- }
-
- @Override
- public ResourcePool parseResourceConfiguration(final Element fragment) {
- ResourcePool resourcePool = parseResourceConfig(fragment);
- if (resourcePool != null) {
- return resourcePool;
- }
- throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported",
- fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName())));
- }
-}
diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParser.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParser.java
deleted file mode 100644
index ddc8fc1f62..0000000000
--- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParser.java
+++ /dev/null
@@ -1,263 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.ehcache.clustered.client.internal.config.xml;
-
-import org.ehcache.clustered.client.config.ClusteredStoreConfiguration;
-import org.ehcache.clustered.client.config.ClusteringServiceConfiguration;
-import org.ehcache.clustered.client.config.Timeouts;
-import org.ehcache.clustered.client.config.builders.TimeoutsBuilder;
-import org.ehcache.clustered.client.internal.store.ClusteredStore;
-import org.ehcache.clustered.client.service.ClusteringService;
-import org.ehcache.clustered.common.Consistency;
-import org.ehcache.clustered.common.ServerSideConfiguration;
-import org.ehcache.clustered.common.ServerSideConfiguration.Pool;
-import org.ehcache.config.units.MemoryUnit;
-import org.ehcache.spi.service.ServiceConfiguration;
-import org.ehcache.spi.service.ServiceCreationConfiguration;
-import org.ehcache.xml.CacheManagerServiceConfigurationParser;
-import org.ehcache.xml.CacheServiceConfigurationParser;
-import org.ehcache.xml.exceptions.XmlConfigurationException;
-import org.ehcache.xml.model.TimeType;
-import org.w3c.dom.Attr;
-import org.w3c.dom.Element;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-
-import java.io.IOException;
-import java.math.BigInteger;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.time.Duration;
-import java.util.HashMap;
-import java.util.Locale;
-import java.util.Map;
-
-import javax.xml.bind.JAXBContext;
-import javax.xml.bind.JAXBElement;
-import javax.xml.bind.JAXBException;
-import javax.xml.bind.Unmarshaller;
-import javax.xml.transform.Source;
-import javax.xml.transform.stream.StreamSource;
-
-import static org.ehcache.clustered.client.internal.config.xml.ClusteredCacheConstants.*;
-import static org.ehcache.xml.XmlModel.convertToJavaTimeUnit;
-
-/**
- * Provides parsing support for the {@code } elements representing a {@link ClusteringService ClusteringService}.
- *
- * @see ClusteredCacheConstants#XSD
- */
-public class ClusteringServiceConfigurationParser implements CacheManagerServiceConfigurationParser,
- CacheServiceConfigurationParser {
-
- public static final String CLUSTERED_STORE_ELEMENT_NAME = "clustered-store";
- public static final String CONSISTENCY_ATTRIBUTE_NAME = "consistency";
-
- @Override
- public Source getXmlSchema() throws IOException {
- return new StreamSource(XML_SCHEMA.openStream());
- }
-
- @Override
- public URI getNamespace() {
- return NAMESPACE;
- }
-
- @Override
- public ServiceConfiguration parseServiceConfiguration(Element fragment) {
- if (CLUSTERED_STORE_ELEMENT_NAME.equals(fragment.getLocalName())) {
- if (fragment.hasAttribute(CONSISTENCY_ATTRIBUTE_NAME)) {
- return new ClusteredStoreConfiguration(Consistency.valueOf(fragment.getAttribute("consistency").toUpperCase()));
- } else {
- return new ClusteredStoreConfiguration();
- }
- }
- throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported",
- fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName())));
- }
-
- /**
- * Complete interpretation of the top-level elements defined in {@value ClusteredCacheConstants#XSD}
.
- * This method is called only for those elements from the namespace set by {@link ClusteredCacheConstants#NAMESPACE}.
- *
- * This method presumes the element presented is valid according to the XSD.
- *
- * @param fragment the XML fragment to process
- *
- * @return a {@link org.ehcache.clustered.client.config.ClusteringServiceConfiguration ClusteringServiceConfiguration}
- */
- @Override
- public ServiceCreationConfiguration parseServiceCreationConfiguration(final Element fragment) {
-
- if ("cluster".equals(fragment.getLocalName())) {
-
- ServerSideConfig serverConfig = null;
- URI connectionUri = null;
- Duration getTimeout = null, putTimeout = null, connectionTimeout = null;
- final NodeList childNodes = fragment.getChildNodes();
- for (int i = 0; i < childNodes.getLength(); i++) {
- final Node item = childNodes.item(i);
- if (Node.ELEMENT_NODE == item.getNodeType()) {
- if ("connection".equals(item.getLocalName())) {
- /*
- * is a required element in the XSD
- */
- final Attr urlAttribute = ((Element)item).getAttributeNode("url");
- final String urlValue = urlAttribute.getValue();
- try {
- connectionUri = new URI(urlValue);
- } catch (URISyntaxException e) {
- throw new XmlConfigurationException(
- String.format("Value of %s attribute on XML configuration element <%s> in <%s> is not a valid URI - '%s'",
- urlAttribute.getName(), item.getNodeName(), fragment.getTagName(), connectionUri), e);
- }
-
- } else if ("read-timeout".equals(item.getLocalName())) {
- /*
- * is an optional element
- */
- getTimeout = processTimeout(fragment, item);
-
- } else if ("write-timeout".equals(item.getLocalName())) {
- /*
- * is an optional element
- */
- putTimeout = processTimeout(fragment, item);
-
- } else if ("connection-timeout".equals(item.getLocalName())) {
- /*
- * is an optional element
- */
- connectionTimeout = processTimeout(fragment, item);
-
- } else if ("server-side-config".equals(item.getLocalName())) {
- /*
- * is an optional element
- */
- serverConfig = processServerSideConfig(item);
- }
- }
- }
-
- try {
- Timeouts timeouts = getTimeouts(getTimeout, putTimeout, connectionTimeout);
- if (serverConfig == null) {
- return new ClusteringServiceConfiguration(connectionUri, timeouts);
- }
-
- ServerSideConfiguration serverSideConfiguration;
- if (serverConfig.defaultServerResource == null) {
- serverSideConfiguration = new ServerSideConfiguration(serverConfig.pools);
- } else {
- serverSideConfiguration = new ServerSideConfiguration(serverConfig.defaultServerResource, serverConfig.pools);
- }
-
- return new ClusteringServiceConfiguration(connectionUri, timeouts, serverConfig.autoCreate, serverSideConfiguration);
- } catch (IllegalArgumentException e) {
- throw new XmlConfigurationException(e);
- }
- }
- throw new XmlConfigurationException(String.format("XML configuration element <%s> in <%s> is not supported",
- fragment.getTagName(), (fragment.getParentNode() == null ? "null" : fragment.getParentNode().getLocalName())));
- }
-
- private Timeouts getTimeouts(Duration getTimeout, Duration putTimeout, Duration connectionTimeout) {
- TimeoutsBuilder builder = TimeoutsBuilder.timeouts();
- if (getTimeout != null) {
- builder.read(getTimeout);
- }
- if(putTimeout != null) {
- builder.write(putTimeout);
- }
- if(connectionTimeout != null) {
- builder.connection(connectionTimeout);
- }
- return builder.build();
- }
-
- private Duration processTimeout(Element parentElement, Node timeoutNode) {
- try {
- // are direct subtype of ehcache:time-type; use JAXB to interpret it
- JAXBContext context = JAXBContext.newInstance(TimeType.class.getPackage().getName());
- Unmarshaller unmarshaller = context.createUnmarshaller();
- JAXBElement jaxbElement = unmarshaller.unmarshal(timeoutNode, TimeType.class);
-
- TimeType timeType = jaxbElement.getValue();
- BigInteger amount = timeType.getValue();
- if (amount.compareTo(BigInteger.valueOf(Long.MAX_VALUE)) > 0) {
- throw new XmlConfigurationException(
- String.format("Value of XML configuration element <%s> in <%s> exceeds allowed value - %s",
- timeoutNode.getNodeName(), parentElement.getTagName(), amount));
- }
- return Duration.of(amount.longValue(), convertToJavaTimeUnit(timeType.getUnit()));
-
- } catch (JAXBException e) {
- throw new XmlConfigurationException(e);
- }
- }
-
- private ServerSideConfig processServerSideConfig(Node serverSideConfigElement) {
- ServerSideConfig serverSideConfig = new ServerSideConfig();
- serverSideConfig.autoCreate = Boolean.parseBoolean(((Element) serverSideConfigElement).getAttribute("auto-create"));
- final NodeList serverSideNodes = serverSideConfigElement.getChildNodes();
- for (int i = 0; i < serverSideNodes.getLength(); i++) {
- final Node item = serverSideNodes.item(i);
- if (Node.ELEMENT_NODE == item.getNodeType()) {
- String nodeLocalName = item.getLocalName();
- if ("default-resource".equals(nodeLocalName)) {
- serverSideConfig.defaultServerResource = ((Element)item).getAttribute("from");
-
- } else if ("shared-pool".equals(nodeLocalName)) {
- Element sharedPoolElement = (Element)item;
- String poolName = sharedPoolElement.getAttribute("name"); // required
- Attr fromAttr = sharedPoolElement.getAttributeNode("from"); // optional
- String fromResource = (fromAttr == null ? null : fromAttr.getValue());
- Attr unitAttr = sharedPoolElement.getAttributeNode("unit"); // optional - default 'B'
- String unit = (unitAttr == null ? "B" : unitAttr.getValue());
- MemoryUnit memoryUnit = MemoryUnit.valueOf(unit.toUpperCase(Locale.ENGLISH));
-
- String quantityValue = sharedPoolElement.getFirstChild().getNodeValue();
- long quantity;
- try {
- quantity = Long.parseLong(quantityValue);
- } catch (NumberFormatException e) {
- throw new XmlConfigurationException("Magnitude of value specified for is too large");
- }
-
- Pool poolDefinition;
- if (fromResource == null) {
- poolDefinition = new Pool(memoryUnit.toBytes(quantity));
- } else {
- poolDefinition = new Pool(memoryUnit.toBytes(quantity), fromResource);
- }
-
- if (serverSideConfig.pools.put(poolName, poolDefinition) != null) {
- throw new XmlConfigurationException("Duplicate definition for ");
- }
- }
- }
- }
- return serverSideConfig;
- }
-
- private static final class ServerSideConfig {
- private boolean autoCreate = false;
- private String defaultServerResource = null;
- private final Map pools = new HashMap<>();
- }
-}
diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java
deleted file mode 100644
index 51b5a90c62..0000000000
--- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java
+++ /dev/null
@@ -1,476 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.ehcache.clustered.client.internal.service;
-
-import org.ehcache.CachePersistenceException;
-import org.ehcache.clustered.client.config.ClusteredResourcePool;
-import org.ehcache.clustered.client.config.ClusteredResourceType;
-import org.ehcache.clustered.client.config.ClusteringServiceConfiguration;
-import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntity;
-import org.ehcache.clustered.client.internal.ClusterTierManagerClientEntityFactory;
-import org.ehcache.clustered.client.internal.ClusterTierManagerCreationException;
-import org.ehcache.clustered.client.internal.ClusterTierManagerNotFoundException;
-import org.ehcache.clustered.client.internal.ClusterTierManagerValidationException;
-import org.ehcache.clustered.client.config.Timeouts;
-import org.ehcache.clustered.client.internal.store.ClusterTierClientEntity;
-import org.ehcache.clustered.client.internal.store.EventualServerStoreProxy;
-import org.ehcache.clustered.client.internal.store.ServerStoreProxy;
-import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback;
-import org.ehcache.clustered.client.internal.store.StrongServerStoreProxy;
-import org.ehcache.clustered.client.service.ClientEntityFactory;
-import org.ehcache.clustered.client.service.ClusteringService;
-import org.ehcache.clustered.client.service.EntityBusyException;
-import org.ehcache.clustered.client.service.EntityService;
-import org.ehcache.clustered.common.Consistency;
-import org.ehcache.clustered.common.internal.ServerStoreConfiguration;
-import org.ehcache.clustered.common.internal.exceptions.DestroyInProgressException;
-import org.ehcache.config.CacheConfiguration;
-import org.ehcache.config.ResourceType;
-import org.ehcache.core.spi.store.Store;
-import org.ehcache.spi.persistence.StateRepository;
-import org.ehcache.spi.service.MaintainableService;
-import org.ehcache.spi.service.Service;
-import org.ehcache.spi.service.ServiceProvider;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.terracotta.connection.Connection;
-import org.terracotta.connection.ConnectionException;
-import org.terracotta.connection.ConnectionFactory;
-import org.terracotta.connection.ConnectionPropertyNames;
-import org.terracotta.connection.entity.Entity;
-import org.terracotta.exception.EntityAlreadyExistsException;
-import org.terracotta.exception.EntityNotFoundException;
-
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.Arrays;
-import java.util.Properties;
-import java.util.Random;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-import java.util.concurrent.TimeoutException;
-
-/**
- * Provides support for accessing server-based cluster services.
- */
-class DefaultClusteringService implements ClusteringService, EntityService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(DefaultClusteringService.class);
-
- static final String CONNECTION_PREFIX = "Ehcache:";
-
- private final ClusteringServiceConfiguration configuration;
- private final URI clusterUri;
- private final String entityIdentifier;
- private final ConcurrentMap knownPersistenceSpaces = new ConcurrentHashMap<>();
- private final Timeouts timeouts;
-
- private volatile Connection clusterConnection;
- private ClusterTierManagerClientEntityFactory entityFactory;
- private ClusterTierManagerClientEntity entity;
- private final ConcurrentMap clusterTierEntities = new ConcurrentHashMap<>();
-
- private volatile boolean inMaintenance = false;
-
- DefaultClusteringService(final ClusteringServiceConfiguration configuration) {
- this.configuration = configuration;
- URI ehcacheUri = configuration.getClusterUri();
- this.clusterUri = extractClusterUri(ehcacheUri);
- this.entityIdentifier = clusterUri.relativize(ehcacheUri).getPath();
- this.timeouts = configuration.getTimeouts();
- }
-
- private static URI extractClusterUri(URI uri) {
- try {
- return new URI(uri.getScheme(), uri.getAuthority(), null, null, null);
- } catch (URISyntaxException e) {
- throw new AssertionError(e);
- }
- }
-
- @Override
- public ClusteringServiceConfiguration getConfiguration() {
- return this.configuration;
- }
-
- @Override
- public ClientEntityFactory newClientEntityFactory(String entityIdentifier, Class entityType, long entityVersion, C configuration) {
- return new AbstractClientEntityFactory(entityIdentifier, entityType, entityVersion, configuration) {
- @Override
- protected Connection getConnection() {
- if (!isConnected()) {
- throw new IllegalStateException(getClass().getSimpleName() + " not started.");
- }
- return clusterConnection;
- }
- };
- }
-
- @Override
- public boolean isConnected() {
- return clusterConnection != null;
- }
-
- @Override
- public void start(final ServiceProvider serviceProvider) {
- initClusterConnection();
- createEntityFactory();
- try {
- if (configuration.isAutoCreate()) {
- entity = autoCreateEntity();
- } else {
- try {
- entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration());
- } catch (DestroyInProgressException | EntityNotFoundException e) {
- throw new IllegalStateException("The cluster tier manager '" + entityIdentifier + "' does not exist."
- + " Please review your configuration.", e);
- } catch (TimeoutException e) {
- throw new RuntimeException("Could not connect to the cluster tier manager '" + entityIdentifier
- + "'; retrieve operation timed out", e);
- }
- }
- } catch (RuntimeException e) {
- entityFactory = null;
- closeConnection();
- throw e;
- }
- }
-
- @Override
- public void startForMaintenance(ServiceProvider super MaintainableService> serviceProvider, MaintenanceScope maintenanceScope) {
- initClusterConnection();
- createEntityFactory();
- if(maintenanceScope == MaintenanceScope.CACHE_MANAGER) {
- if (!entityFactory.acquireLeadership(entityIdentifier)) {
- entityFactory = null;
- closeConnection();
- throw new IllegalStateException("Couldn't acquire cluster-wide maintenance lease");
- }
- }
- inMaintenance = true;
- }
-
- private void createEntityFactory() {
- entityFactory = new ClusterTierManagerClientEntityFactory(clusterConnection, timeouts);
- }
-
- private void initClusterConnection() {
- try {
- Properties properties = new Properties();
- properties.put(ConnectionPropertyNames.CONNECTION_NAME, CONNECTION_PREFIX + entityIdentifier);
- properties.put(ConnectionPropertyNames.CONNECTION_TIMEOUT, Long.toString(timeouts.getConnectionTimeout().toMillis()));
- clusterConnection = ConnectionFactory.connect(clusterUri, properties);
- } catch (ConnectionException ex) {
- throw new RuntimeException(ex);
- }
- }
-
- private ClusterTierManagerClientEntity autoCreateEntity() throws ClusterTierManagerValidationException, IllegalStateException {
- while (true) {
- try {
- entityFactory.create(entityIdentifier, configuration.getServerConfiguration());
- } catch (ClusterTierManagerCreationException e) {
- throw new IllegalStateException("Could not create the cluster tier manager '" + entityIdentifier + "'.", e);
- } catch (EntityAlreadyExistsException | EntityBusyException e) {
- //ignore - entity already exists - try to retrieve
- }
- try {
- return entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration());
- } catch (DestroyInProgressException e) {
- silentDestroy();
- } catch (EntityNotFoundException e) {
- //ignore - loop and try to create
- } catch (TimeoutException e) {
- throw new RuntimeException("Could not connect to the cluster tier manager '" + entityIdentifier
- + "'; retrieve operation timed out", e);
- }
- }
- }
-
- private void silentDestroy() {
- LOGGER.debug("Found a broken ClusterTierManager - trying to clean it up");
- try {
- // Random sleep to enable racing clients to have a window to do the cleanup
- Thread.sleep(new Random().nextInt(1000));
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- }
- try {
- entityFactory.destroy(entityIdentifier);
- } catch (EntityBusyException e) {
- // Ignore - we have a racy client
- LOGGER.debug("ClusterTierManager {} marked busy when trying to clean it up", entityIdentifier);
- }
- }
-
- @Override
- public void stop() {
- LOGGER.info("Closing connection to cluster {}", this.clusterUri);
-
- /*
- * Entity close() operations must *not* be called; if the server connection is disconnected, the entity
- * close operations will stall attempting to communicate with the server. (EntityClientEndpointImpl.close()
- * calls a "closeHook" method provided by ClientEntityManagerImpl which ultimately winds up in
- * InFlightMessage.waitForAcks -- a method that can wait forever.) Theoretically, the connection close will
- * take care of server-side cleanup in the event the server is connected.
- */
- entityFactory = null;
- inMaintenance = false;
-
- clusterTierEntities.clear();
- entity = null;
-
- closeConnection();
- }
-
- @Override
- public void destroyAll() throws CachePersistenceException {
- if (!inMaintenance) {
- throw new IllegalStateException("Maintenance mode required");
- }
- LOGGER.info("destroyAll called for cluster tiers on {}", this.clusterUri);
-
- try {
- entityFactory.destroy(entityIdentifier);
- } catch (EntityBusyException e) {
- throw new CachePersistenceException("Can not delete cluster tiers on " + this.clusterUri, e);
- }
- }
-
- @Override
- public boolean handlesResourceType(ResourceType> resourceType) {
- return (Arrays.asList(ClusteredResourceType.Types.values()).contains(resourceType));
- }
-
- @Override
- public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration, ?> config) throws CachePersistenceException {
- ClusteredSpace clusteredSpace = knownPersistenceSpaces.get(name);
- if(clusteredSpace != null) {
- return clusteredSpace.identifier;
- } else {
- ClusteredCacheIdentifier cacheIdentifier = new DefaultClusterCacheIdentifier(name);
- clusteredSpace = knownPersistenceSpaces.putIfAbsent(name, new ClusteredSpace(cacheIdentifier));
- if(clusteredSpace == null) {
- return cacheIdentifier;
- } else {
- return clusteredSpace.identifier;
- }
- }
- }
-
- @Override
- public void releasePersistenceSpaceIdentifier(PersistenceSpaceIdentifier> identifier) throws CachePersistenceException {
- ClusteredCacheIdentifier clusterCacheIdentifier = (ClusteredCacheIdentifier) identifier;
- if (knownPersistenceSpaces.remove(clusterCacheIdentifier.getId()) == null) {
- throw new CachePersistenceException("Unknown identifier: " + clusterCacheIdentifier);
- }
- }
-
- @Override
- public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier> identifier, String name) throws CachePersistenceException {
- ClusteredCacheIdentifier clusterCacheIdentifier = (ClusteredCacheIdentifier) identifier;
- ClusteredSpace clusteredSpace = knownPersistenceSpaces.get(clusterCacheIdentifier.getId());
- if (clusteredSpace == null) {
- throw new CachePersistenceException("Clustered space not found for identifier: " + clusterCacheIdentifier);
- }
- ConcurrentMap stateRepositories = clusteredSpace.stateRepositories;
- ClusterStateRepository currentRepo = stateRepositories.get(name);
- if(currentRepo != null) {
- return currentRepo;
- } else {
- ClusterStateRepository newRepo = new ClusterStateRepository(clusterCacheIdentifier, name, clusterTierEntities.get(clusterCacheIdentifier.getId()));
- currentRepo = stateRepositories.putIfAbsent(name, newRepo);
- if (currentRepo == null) {
- return newRepo;
- } else {
- return currentRepo;
- }
- }
- }
-
- private void checkStarted() {
- if(!isStarted()) {
- throw new IllegalStateException(getClass().getName() + " should be started to call destroy");
- }
- }
-
- @Override
- public void destroy(String name) throws CachePersistenceException {
- checkStarted();
-
- // will happen when in maintenance mode
- if(entity == null) {
- try {
- entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration());
- } catch (EntityNotFoundException e) {
- // No entity on the server, so no need to destroy anything
- } catch (TimeoutException e) {
- throw new CachePersistenceException("Could not connect to the cluster tier manager '" + entityIdentifier
- + "'; retrieve operation timed out", e);
- } catch (DestroyInProgressException e) {
- silentDestroy();
- // Nothing left to do
- return;
- }
- }
-
- try {
- if (entity != null) {
- entityFactory.destroyClusteredStoreEntity(entityIdentifier, name);
- }
- } catch (EntityNotFoundException e) {
- // Ignore - does not exist, nothing to destroy
- LOGGER.debug("Destruction of cluster tier {} failed as it does not exist", name);
- }
- }
-
- protected boolean isStarted() {
- return entityFactory != null;
- }
-
- @Override
- public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifier cacheIdentifier,
- final Store.Configuration storeConfig,
- Consistency configuredConsistency,
- ServerCallback invalidation) throws CachePersistenceException {
- final String cacheId = cacheIdentifier.getId();
-
- if (configuredConsistency == null) {
- throw new NullPointerException("Consistency cannot be null");
- }
-
- /*
- * This method is expected to be called with exactly ONE ClusteredResourcePool specified.
- */
- ClusteredResourcePool clusteredResourcePool = null;
- for (ClusteredResourceType> type : ClusteredResourceType.Types.values()) {
- ClusteredResourcePool pool = storeConfig.getResourcePools().getPoolForResource(type);
- if (pool != null) {
- if (clusteredResourcePool != null) {
- throw new IllegalStateException("At most one clustered resource supported for a cache");
- }
- clusteredResourcePool = pool;
- }
- }
- if (clusteredResourcePool == null) {
- throw new IllegalStateException("A clustered resource is required for a clustered cache");
- }
-
- final ServerStoreConfiguration clientStoreConfiguration = new ServerStoreConfiguration(
- clusteredResourcePool.getPoolAllocation(),
- storeConfig.getKeyType().getName(),
- storeConfig.getValueType().getName(),
- (storeConfig.getKeySerializer() == null ? null : storeConfig.getKeySerializer().getClass().getName()),
- (storeConfig.getValueSerializer() == null ? null : storeConfig.getValueSerializer().getClass().getName()),
- configuredConsistency
- );
-
- ClusterTierClientEntity storeClientEntity;
- try {
- storeClientEntity = entityFactory.fetchOrCreateClusteredStoreEntity(entityIdentifier, cacheId,
- clientStoreConfiguration, configuration.isAutoCreate());
- clusterTierEntities.put(cacheId, storeClientEntity);
- } catch (EntityNotFoundException e) {
- throw new CachePersistenceException("Cluster tier proxy '" + cacheIdentifier.getId() + "' for entity '" + entityIdentifier + "' does not exist.", e);
- }
-
-
- ServerStoreProxy serverStoreProxy;
- switch (configuredConsistency) {
- case STRONG:
- serverStoreProxy = new StrongServerStoreProxy(cacheId, storeClientEntity, invalidation);
- break;
- case EVENTUAL:
- serverStoreProxy = new EventualServerStoreProxy(cacheId, storeClientEntity, invalidation);
- break;
- default:
- throw new AssertionError("Unknown consistency : " + configuredConsistency);
- }
-
- try {
- storeClientEntity.validate(clientStoreConfiguration);
- } catch (ClusterTierException e) {
- serverStoreProxy.close();
- throw new CachePersistenceException("Unable to create cluster tier proxy '" + cacheIdentifier.getId() + "' for entity '" + entityIdentifier + "'", e);
- } catch (TimeoutException e) {
- serverStoreProxy.close();
- throw new CachePersistenceException("Unable to create cluster tier proxy '"
- + cacheIdentifier.getId() + "' for entity '" + entityIdentifier
- + "'; validate operation timed out", e);
- }
-
- return serverStoreProxy;
- }
-
- @Override
- public void releaseServerStoreProxy(ServerStoreProxy storeProxy) {
- clusterTierEntities.remove(storeProxy.getCacheId());
- storeProxy.close();
- }
-
- private void closeConnection() {
- Connection conn = clusterConnection;
- clusterConnection = null;
- if(conn != null) {
- try {
- conn.close();
- } catch (IOException e) {
- LOGGER.warn("Error closing cluster connection: " + e);
- }
- }
- }
-
- /**
- * Supplies the identifier to use for identifying a client-side cache to its server counterparts.
- */
- private static class DefaultClusterCacheIdentifier implements ClusteredCacheIdentifier {
-
- private final String id;
-
- DefaultClusterCacheIdentifier(final String id) {
- this.id = id;
- }
-
- @Override
- public String getId() {
- return this.id;
- }
-
- @Override
- public Class getServiceType() {
- return ClusteringService.class;
- }
-
- @Override
- public String toString() {
- return getClass().getSimpleName() + "@" + id;
- }
- }
-
- private static class ClusteredSpace {
-
- private final ClusteredCacheIdentifier identifier;
- private final ConcurrentMap stateRepositories;
-
- ClusteredSpace(final ClusteredCacheIdentifier identifier) {
- this.identifier = identifier;
- this.stateRepositories = new ConcurrentHashMap<>();
- }
- }
-
-}
diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ChainBuilder.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ChainBuilder.java
deleted file mode 100644
index 068e7edc33..0000000000
--- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ChainBuilder.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.ehcache.clustered.client.internal.store;
-
-import org.ehcache.clustered.common.internal.store.Chain;
-import org.ehcache.clustered.common.internal.store.Util;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Builds {@link Chain}s
- */
-public class ChainBuilder {
-
- private List buffers = new ArrayList<>();
-
- public ChainBuilder() {
- }
-
- private ChainBuilder(List buffers) {
- this.buffers = buffers;
- }
-
- //TODO: optimize this & make this mutable
- public ChainBuilder add(final ByteBuffer payload) {
- List newList = new ArrayList<>();
- newList.addAll(this.buffers);
- newList.add(payload);
- return new ChainBuilder(newList);
- }
-
- public Chain build() {
- ByteBuffer[] elements = new ByteBuffer[buffers.size()];
- buffers.toArray(elements);
- return Util.getChain(false, elements);
- }
-
-}
diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java
deleted file mode 100644
index 44cd04eaa9..0000000000
--- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java
+++ /dev/null
@@ -1,819 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.ehcache.clustered.client.internal.store;
-
-import org.ehcache.Cache;
-import org.ehcache.CachePersistenceException;
-import org.ehcache.clustered.client.config.ClusteredResourceType;
-import org.ehcache.clustered.client.config.ClusteredStoreConfiguration;
-import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback;
-import org.ehcache.clustered.client.internal.store.operations.ChainResolver;
-import org.ehcache.clustered.client.internal.store.operations.EternalChainResolver;
-import org.ehcache.clustered.client.internal.store.operations.ConditionalRemoveOperation;
-import org.ehcache.clustered.client.internal.store.operations.ConditionalReplaceOperation;
-import org.ehcache.clustered.client.internal.store.operations.ExpiryChainResolver;
-import org.ehcache.clustered.client.internal.store.operations.PutIfAbsentOperation;
-import org.ehcache.clustered.client.internal.store.operations.PutOperation;
-import org.ehcache.clustered.client.internal.store.operations.RemoveOperation;
-import org.ehcache.clustered.client.internal.store.operations.ReplaceOperation;
-import org.ehcache.clustered.client.internal.store.operations.Result;
-import org.ehcache.clustered.client.internal.store.operations.codecs.OperationsCodec;
-import org.ehcache.clustered.client.service.ClusteringService;
-import org.ehcache.clustered.client.service.ClusteringService.ClusteredCacheIdentifier;
-import org.ehcache.clustered.common.Consistency;
-import org.ehcache.clustered.common.internal.store.Chain;
-import org.ehcache.config.ResourceType;
-import org.ehcache.core.CacheConfigurationChangeListener;
-import org.ehcache.core.Ehcache;
-import org.ehcache.core.events.CacheEventListenerConfiguration;
-import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap;
-import org.ehcache.core.spi.store.Store;
-import org.ehcache.core.spi.store.StoreAccessTimeoutException;
-import org.ehcache.core.spi.store.events.StoreEventSource;
-import org.ehcache.core.spi.store.StoreAccessException;
-import org.ehcache.core.spi.store.tiering.AuthoritativeTier;
-import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes;
-import org.ehcache.core.statistics.StoreOperationOutcomes;
-import org.ehcache.core.spi.time.TimeSource;
-import org.ehcache.core.spi.time.TimeSourceService;
-import org.ehcache.core.statistics.StoreOperationOutcomes.EvictionOutcome;
-import org.ehcache.core.statistics.TierOperationOutcomes;
-import org.ehcache.expiry.Expirations;
-import org.ehcache.expiry.Expiry;
-import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration;
-import org.ehcache.core.events.NullStoreEventDispatcher;
-import org.ehcache.impl.store.HashUtils;
-import org.ehcache.spi.persistence.StateRepository;
-import org.ehcache.spi.serialization.Serializer;
-import org.ehcache.spi.serialization.StatefulSerializer;
-import org.ehcache.spi.service.ServiceDependencies;
-import org.ehcache.spi.service.ServiceProvider;
-import org.ehcache.spi.service.Service;
-import org.ehcache.spi.service.ServiceConfiguration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.terracotta.statistics.MappedOperationStatistic;
-import org.terracotta.statistics.StatisticsManager;
-import org.terracotta.statistics.observer.OperationObserver;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeoutException;
-import java.util.function.BiFunction;
-import java.util.function.Function;
-import java.util.function.Supplier;
-
-import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException;
-import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst;
-import static org.terracotta.statistics.StatisticBuilder.operation;
-
-/**
- * Supports a {@link Store} in a clustered environment.
- */
-public class ClusteredStore implements AuthoritativeTier {
-
- private static final String STATISTICS_TAG = "Clustered";
- private static final int TIER_HEIGHT = ClusteredResourceType.Types.UNKNOWN.getTierHeight(); //TierHeight is the same for all ClusteredResourceType.Types
- static final String CHAIN_COMPACTION_THRESHOLD_PROP = "ehcache.client.chain.compaction.threshold";
- static final int DEFAULT_CHAIN_COMPACTION_THRESHOLD = 4;
-
- private final int chainCompactionLimit;
- private final OperationsCodec codec;
- private final ChainResolver resolver;
-
- private final TimeSource timeSource;
-
- private volatile ServerStoreProxy storeProxy;
- private volatile InvalidationValve invalidationValve;
-
- private final OperationObserver getObserver;
- private final OperationObserver putObserver;
- private final OperationObserver removeObserver;
- private final OperationObserver putIfAbsentObserver;
- private final OperationObserver conditionalRemoveObserver;
- private final OperationObserver replaceObserver;
- private final OperationObserver conditionalReplaceObserver;
- // Needed for JSR-107 compatibility even if unused
- private final OperationObserver evictionObserver;
- private final OperationObserver getAndFaultObserver;
-
-
- private ClusteredStore(final OperationsCodec codec, final ChainResolver resolver, TimeSource timeSource) {
- this.chainCompactionLimit = Integer.getInteger(CHAIN_COMPACTION_THRESHOLD_PROP, DEFAULT_CHAIN_COMPACTION_THRESHOLD);
- this.codec = codec;
- this.resolver = resolver;
- this.timeSource = timeSource;
-
- this.getObserver = operation(StoreOperationOutcomes.GetOutcome.class).of(this).named("get").tag(STATISTICS_TAG).build();
- this.putObserver = operation(StoreOperationOutcomes.PutOutcome.class).of(this).named("put").tag(STATISTICS_TAG).build();
- this.removeObserver = operation(StoreOperationOutcomes.RemoveOutcome.class).of(this).named("remove").tag(STATISTICS_TAG).build();
- this.putIfAbsentObserver = operation(StoreOperationOutcomes.PutIfAbsentOutcome.class).of(this).named("putIfAbsent").tag(STATISTICS_TAG).build();
- this.conditionalRemoveObserver = operation(StoreOperationOutcomes.ConditionalRemoveOutcome.class).of(this).named("conditionalRemove").tag(STATISTICS_TAG).build();
- this.replaceObserver = operation(StoreOperationOutcomes.ReplaceOutcome.class).of(this).named("replace").tag(STATISTICS_TAG).build();
- this.conditionalReplaceObserver = operation(StoreOperationOutcomes.ConditionalReplaceOutcome.class).of(this).named("conditionalReplace").tag(STATISTICS_TAG).build();
- this.evictionObserver = operation(StoreOperationOutcomes.EvictionOutcome.class).of(this).named("eviction").tag(STATISTICS_TAG).build();
- this.getAndFaultObserver = operation(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class).of(this).named("getAndFault").tag(STATISTICS_TAG).build();
-
- Set tags = new HashSet<>(Arrays.asList(STATISTICS_TAG, "tier"));
- StatisticsManager.createPassThroughStatistic(this, "mappings", tags, () -> -1L);
- StatisticsManager.createPassThroughStatistic(this, "maxMappings", tags, () -> -1L);
- StatisticsManager.createPassThroughStatistic(this, "allocatedMemory", tags, () -> -1L);
- StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", tags, () -> -1L);
-
- }
-
- /**
- * For tests
- */
- ClusteredStore(OperationsCodec codec, EternalChainResolver resolver, ServerStoreProxy proxy, TimeSource timeSource) {
- this(codec, resolver, timeSource);
- this.storeProxy = proxy;
- }
-
- @Override
- public ValueHolder get(final K key) throws StoreAccessException {
- getObserver.begin();
- ValueHolder value;
- try {
- value = getInternal(key);
- } catch (TimeoutException e) {
- getObserver.end(StoreOperationOutcomes.GetOutcome.TIMEOUT);
- return null;
- }
- if(value == null) {
- getObserver.end(StoreOperationOutcomes.GetOutcome.MISS);
- return null;
- } else {
- getObserver.end(StoreOperationOutcomes.GetOutcome.HIT);
- return value;
- }
- }
-
- private ValueHolder getInternal(K key) throws StoreAccessException, TimeoutException {
- ClusteredValueHolder holder = null;
- try {
- Chain chain = storeProxy.get(extractLongKey(key));
- if(!chain.isEmpty()) {
- ResolvedChain resolvedChain = resolver.resolve(chain, key, timeSource.getTimeMillis());
-
- if (resolvedChain.isCompacted()) {
- Chain compactedChain = resolvedChain.getCompactedChain();
- storeProxy.replaceAtHead(extractLongKey(key), chain, compactedChain);
- }
-
- Result resolvedResult = resolvedChain.getResolvedResult(key);
- if (resolvedResult != null) {
- V value = resolvedResult.getValue();
- long expirationTime = resolvedChain.getExpirationTime();
- if (expirationTime == Long.MAX_VALUE) {
- holder = new ClusteredValueHolder<>(value);
- } else {
- holder = new ClusteredValueHolder<>(value, expirationTime);
- }
- }
- }
- } catch (RuntimeException re) {
- throw handleRuntimeException(re);
- }
- return holder;
- }
-
- private long extractLongKey(K key) {
- return HashUtils.intHashToLong(key.hashCode());
- }
-
- @Override
- public boolean containsKey(final K key) throws StoreAccessException {
- try {
- return getInternal(key) != null;
- } catch (TimeoutException e) {
- return false;
- }
- }
-
- @Override
- public PutStatus put(final K key, final V value) throws StoreAccessException {
- putObserver.begin();
- PutStatus status = silentPut(key, value);
- switch (status) {
- case PUT:
- putObserver.end(StoreOperationOutcomes.PutOutcome.PUT);
- break;
- case NOOP:
- putObserver.end(StoreOperationOutcomes.PutOutcome.NOOP);
- break;
- default:
- throw new AssertionError("Invalid put status: " + status);
- }
- return status;
- }
-
- private PutStatus silentPut(final K key, final V value) throws StoreAccessException {
- try {
- PutOperation operation = new PutOperation<>(key, value, timeSource.getTimeMillis());
- ByteBuffer payload = codec.encode(operation);
- long extractedKey = extractLongKey(key);
- storeProxy.append(extractedKey, payload);
- return PutStatus.PUT;
- } catch (RuntimeException re) {
- throw handleRuntimeException(re);
- } catch (TimeoutException e) {
- throw new StoreAccessTimeoutException(e);
- }
- }
-
- @Override
- public ValueHolder putIfAbsent(final K key, final V value) throws StoreAccessException {
- putIfAbsentObserver.begin();
- try {
- PutIfAbsentOperation operation = new PutIfAbsentOperation<>(key, value, timeSource.getTimeMillis());
- ByteBuffer payload = codec.encode(operation);
- long extractedKey = extractLongKey(key);
- Chain chain = storeProxy.getAndAppend(extractedKey, payload);
- ResolvedChain resolvedChain = resolver.resolve(chain, key, timeSource.getTimeMillis());
-
- if (resolvedChain.getCompactionCount() > chainCompactionLimit) {
- Chain compactedChain = resolvedChain.getCompactedChain();
- storeProxy.replaceAtHead(extractedKey, chain, compactedChain);
- }
-
- Result result = resolvedChain.getResolvedResult(key);
- if(result == null) {
- putIfAbsentObserver.end(StoreOperationOutcomes.PutIfAbsentOutcome.PUT);
- return null;
- } else {
- putIfAbsentObserver.end(StoreOperationOutcomes.PutIfAbsentOutcome.HIT);
- return new ClusteredValueHolder<>(result.getValue());
- }
- } catch (RuntimeException re) {
- throw handleRuntimeException(re);
- } catch (TimeoutException e) {
- throw new StoreAccessTimeoutException(e);
- }
- }
-
- @Override
- public boolean remove(final K key) throws StoreAccessException {
- removeObserver.begin();
- if(silentRemove(key)) {
- removeObserver.end(StoreOperationOutcomes.RemoveOutcome.REMOVED);
- return true;
- } else {
- removeObserver.end(StoreOperationOutcomes.RemoveOutcome.MISS);
- return false;
- }
- }
-
- private boolean silentRemove(final K key) throws StoreAccessException {
- try {
- RemoveOperation operation = new RemoveOperation<>(key, timeSource.getTimeMillis());
- ByteBuffer payload = codec.encode(operation);
- long extractedKey = extractLongKey(key);
- Chain chain = storeProxy.getAndAppend(extractedKey, payload);
- ResolvedChain resolvedChain = resolver.resolve(chain, key, timeSource.getTimeMillis());
-
- if(resolvedChain.getResolvedResult(key) != null) {
- storeProxy.replaceAtHead(extractedKey, chain, resolvedChain.getCompactedChain());
- return true;
- } else {
- return false;
- }
- } catch (RuntimeException re) {
- throw handleRuntimeException(re);
- } catch (TimeoutException e) {
- throw new StoreAccessTimeoutException(e);
- }
- }
-
- @Override
- public RemoveStatus remove(final K key, final V value) throws StoreAccessException {
- conditionalRemoveObserver.begin();
- try {
- ConditionalRemoveOperation operation = new ConditionalRemoveOperation<>(key, value, timeSource.getTimeMillis());
- ByteBuffer payload = codec.encode(operation);
- long extractedKey = extractLongKey(key);
- Chain chain = storeProxy.getAndAppend(extractedKey, payload);
- ResolvedChain resolvedChain = resolver.resolve(chain, key, timeSource.getTimeMillis());
-
- Result result = resolvedChain.getResolvedResult(key);
- if(result != null) {
- if(value.equals(result.getValue())) {
- storeProxy.replaceAtHead(extractedKey, chain, resolvedChain.getCompactedChain());
-
- conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.REMOVED);
- return RemoveStatus.REMOVED;
- } else {
- conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS);
- return RemoveStatus.KEY_PRESENT;
- }
- } else {
- conditionalRemoveObserver.end(StoreOperationOutcomes.ConditionalRemoveOutcome.MISS);
- return RemoveStatus.KEY_MISSING;
- }
- } catch (RuntimeException re) {
- throw handleRuntimeException(re);
- } catch (TimeoutException e) {
- throw new StoreAccessTimeoutException(e);
- }
- }
-
- @Override
- public ValueHolder replace(final K key, final V value) throws StoreAccessException {
- replaceObserver.begin();
- try {
- ReplaceOperation operation = new ReplaceOperation<>(key, value, timeSource.getTimeMillis());
- ByteBuffer payload = codec.encode(operation);
- long extractedKey = extractLongKey(key);
- Chain chain = storeProxy.getAndAppend(extractedKey, payload);
- ResolvedChain resolvedChain = resolver.resolve(chain, key, timeSource.getTimeMillis());
-
- if (resolvedChain.getCompactionCount() > chainCompactionLimit) {
- Chain compactedChain = resolvedChain.getCompactedChain();
- storeProxy.replaceAtHead(extractedKey, chain, compactedChain);
- }
-
- Result result = resolvedChain.getResolvedResult(key);
- if(result == null) {
- replaceObserver.end(StoreOperationOutcomes.ReplaceOutcome.MISS);
- return null;
- } else {
- replaceObserver.end(StoreOperationOutcomes.ReplaceOutcome.REPLACED);
- return new ClusteredValueHolder<>(result.getValue());
- }
- } catch (RuntimeException re) {
- throw handleRuntimeException(re);
- } catch (TimeoutException e) {
- throw new StoreAccessTimeoutException(e);
- }
- }
-
- @Override
- public ReplaceStatus replace(final K key, final V oldValue, final V newValue) throws StoreAccessException {
- conditionalReplaceObserver.begin();
- try {
- ConditionalReplaceOperation operation = new ConditionalReplaceOperation<>(key, oldValue, newValue, timeSource
- .getTimeMillis());
- ByteBuffer payload = codec.encode(operation);
- long extractedKey = extractLongKey(key);
- Chain chain = storeProxy.getAndAppend(extractedKey, payload);
- ResolvedChain resolvedChain = resolver.resolve(chain, key, timeSource.getTimeMillis());
-
- if (resolvedChain.getCompactionCount() > chainCompactionLimit) {
- Chain compactedChain = resolvedChain.getCompactedChain();
- storeProxy.replaceAtHead(extractedKey, chain, compactedChain);
- }
-
- Result result = resolvedChain.getResolvedResult(key);
- if(result != null) {
- if(oldValue.equals(result.getValue())) {
- conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.REPLACED);
- return ReplaceStatus.HIT;
- } else {
- conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS);
- return ReplaceStatus.MISS_PRESENT;
- }
- } else {
- conditionalReplaceObserver.end(StoreOperationOutcomes.ConditionalReplaceOutcome.MISS);
- return ReplaceStatus.MISS_NOT_PRESENT;
- }
- } catch (RuntimeException re) {
- throw handleRuntimeException(re);
- } catch (TimeoutException e) {
- throw new StoreAccessTimeoutException(e);
- }
- }
-
- @Override
- public void clear() throws StoreAccessException {
- try {
- storeProxy.clear();
- } catch (RuntimeException re) {
- throw handleRuntimeException(re);
- } catch (TimeoutException e) {
- throw new StoreAccessTimeoutException(e);
- }
- }
-
- @Override
- public StoreEventSource getStoreEventSource() {
- // TODO: Is there a StoreEventSource for a ServerStore?
- return new NullStoreEventDispatcher<>();
- }
-
- @Override
- public Iterator>> iterator() {
- // TODO: Make appropriate ServerStoreProxy call
- throw new UnsupportedOperationException("Implement me");
- }
-
- @Override
- public ValueHolder compute(final K key, final BiFunction super K, ? super V, ? extends V> mappingFunction)
- throws StoreAccessException {
- // TODO: Make appropriate ServerStoreProxy call
- throw new UnsupportedOperationException("Implement me");
- }
-
- @Override
- public ValueHolder compute(final K key, final BiFunction super K, ? super V, ? extends V> mappingFunction, final Supplier replaceEqual)
- throws StoreAccessException {
- // TODO: Make appropriate ServerStoreProxy call
- throw new UnsupportedOperationException("Implement me");
- }
-
- @Override
- public ValueHolder computeIfAbsent(final K key, final Function super K, ? extends V> mappingFunction)
- throws StoreAccessException {
- // TODO: Make appropriate ServerStoreProxy call
- throw new UnsupportedOperationException("Implement me");
- }
-
- /**
- * The assumption is that this method will be invoked only by cache.putAll and cache.removeAll methods.
- */
- @Override
- public Map> bulkCompute(final Set extends K> keys, final Function>, Iterable extends Map.Entry extends K, ? extends V>>> remappingFunction)
- throws StoreAccessException {
- Map> valueHolderMap = new HashMap<>();
- if(remappingFunction instanceof Ehcache.PutAllFunction) {
- Ehcache.PutAllFunction putAllFunction = (Ehcache.PutAllFunction)remappingFunction;
- Map entriesToRemap = putAllFunction.getEntriesToRemap();
- for(Map.Entry entry: entriesToRemap.entrySet()) {
- PutStatus putStatus = silentPut(entry.getKey(), entry.getValue());
- if(putStatus == PutStatus.PUT) {
- putAllFunction.getActualPutCount().incrementAndGet();
- valueHolderMap.put(entry.getKey(), new ClusteredValueHolder<>(entry.getValue()));
- }
- }
- } else if(remappingFunction instanceof Ehcache.RemoveAllFunction) {
- Ehcache.RemoveAllFunction removeAllFunction = (Ehcache.RemoveAllFunction)remappingFunction;
- for (K key : keys) {
- boolean removed = silentRemove(key);
- if(removed) {
- removeAllFunction.getActualRemoveCount().incrementAndGet();
- }
- }
- } else {
- throw new UnsupportedOperationException("This compute method is not yet capable of handling generic computation functions");
- }
- return valueHolderMap;
- }
-
- @Override
- public Map> bulkCompute(final Set extends K> keys, final Function>, Iterable extends Map.Entry extends K, ? extends V>>> remappingFunction, final Supplier replaceEqual)
- throws StoreAccessException {
- // TODO: Make appropriate ServerStoreProxy call
- throw new UnsupportedOperationException("Implement me");
- }
-
- /**
- * The assumption is that this method will be invoked only by cache.getAll method.
- */
- @Override
- public Map> bulkComputeIfAbsent(final Set extends K> keys, final Function, Iterable extends Map.Entry extends K, ? extends V>>> mappingFunction)
- throws StoreAccessException {
- if(mappingFunction instanceof Ehcache.GetAllFunction) {
- Map> map = new HashMap<>();
- for (K key : keys) {
- ValueHolder value;
- try {
- value = getInternal(key);
- } catch (TimeoutException e) {
- // This timeout handling is safe **only** in the context of a get/read operation!
- value = null;
- }
- ValueHolder holder = (value != null) ? value : null;
- map.put(key, holder);
- }
- return map;
- } else {
- throw new UnsupportedOperationException("This compute method is not yet capable of handling generic computation functions");
- }
- }
-
- @Override
- public List getConfigurationChangeListeners() {
- // TODO: Make appropriate ServerStoreProxy call
- return Collections.emptyList();
- }
-
- @Override
- public ValueHolder getAndFault(K key) throws StoreAccessException {
- getAndFaultObserver.begin();
- ValueHolder value;
- try {
- value = getInternal(key);
- } catch (TimeoutException e) {
- getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT);
- return null;
- }
- if(value == null) {
- getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS);
- return null;
- } else {
- getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT);
- return value;
- }
- }
-
- @Override
- public ValueHolder computeIfAbsentAndFault(K key, Function super K, ? extends V> mappingFunction) throws StoreAccessException {
- return computeIfAbsent(key, mappingFunction);
- }
-
- @Override
- public boolean flush(K key, ValueHolder valueHolder) {
- // TODO wire this once metadata are maintained
- return true;
- }
-
- @Override
- public void setInvalidationValve(InvalidationValve valve) {
- this.invalidationValve = valve;
- }
-
-
- /**
- * Provider of {@link ClusteredStore} instances.
- */
- @ServiceDependencies({TimeSourceService.class, ClusteringService.class})
- public static class Provider implements Store.Provider, AuthoritativeTier.Provider {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(Provider.class);
-
- private static final Set> CLUSTER_RESOURCES;
- static {
- Set> resourceTypes = new HashSet<>();
- Collections.addAll(resourceTypes, ClusteredResourceType.Types.values());
- CLUSTER_RESOURCES = Collections.unmodifiableSet(resourceTypes);
- }
-
- private volatile ServiceProvider serviceProvider;
- private volatile ClusteringService clusteringService;
-
- private final Map, StoreConfig> createdStores = new ConcurrentWeakIdentityHashMap<>();
- private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap<>();
-
- @Override
- public ClusteredStore createStore(final Configuration storeConfig, final ServiceConfiguration>... serviceConfigs) {
- ClusteredStore store = createStoreInternal(storeConfig, serviceConfigs);
- Collection> tieredOps = new ArrayList<>();
-
- MappedOperationStatistic get =
- new MappedOperationStatistic<>(
- store, TierOperationOutcomes.GET_TRANSLATION, "get", TIER_HEIGHT, "get", STATISTICS_TAG);
- StatisticsManager.associate(get).withParent(store);
- tieredOps.add(get);
-
- MappedOperationStatistic evict =
- new MappedOperationStatistic<>(
- store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG);
- StatisticsManager.associate(evict).withParent(store);
- tieredOps.add(evict);
-
- tierOperationStatistics.put(store, tieredOps);
- return store;
- }
-
- private ClusteredStore createStoreInternal(Configuration storeConfig, Object[] serviceConfigs) {
- DefaultCacheLoaderWriterConfiguration loaderWriterConfiguration = findSingletonAmongst(DefaultCacheLoaderWriterConfiguration.class, serviceConfigs);
- if (loaderWriterConfiguration != null) {
- throw new IllegalStateException("CacheLoaderWriter is not supported with clustered tiers");
- }
-
- CacheEventListenerConfiguration eventListenerConfiguration = findSingletonAmongst(CacheEventListenerConfiguration.class, serviceConfigs);
- if (eventListenerConfiguration != null) {
- throw new IllegalStateException("CacheEventListener is not supported with clustered tiers");
- }
-
- if (clusteringService == null) {
- throw new IllegalStateException(Provider.class.getCanonicalName() + ".createStore called without ClusteringServiceConfiguration");
- }
-
- final HashSet> clusteredResourceTypes =
- new HashSet<>(storeConfig.getResourcePools().getResourceTypeSet());
- clusteredResourceTypes.retainAll(CLUSTER_RESOURCES);
-
- if (clusteredResourceTypes.isEmpty()) {
- throw new IllegalStateException(Provider.class.getCanonicalName() + ".createStore called without ClusteredResourcePools");
- }
- if (clusteredResourceTypes.size() != 1) {
- throw new IllegalStateException(Provider.class.getCanonicalName() + ".createStore can not create clustered tier with multiple clustered resources");
- }
-
- ClusteredStoreConfiguration clusteredStoreConfiguration = findSingletonAmongst(ClusteredStoreConfiguration.class, serviceConfigs);
- if (clusteredStoreConfiguration == null) {
- clusteredStoreConfiguration = new ClusteredStoreConfiguration();
- }
- ClusteredCacheIdentifier cacheId = findSingletonAmongst(ClusteredCacheIdentifier.class, serviceConfigs);
-
- TimeSource timeSource = serviceProvider.getService(TimeSourceService.class).getTimeSource();
-
- OperationsCodec codec = new OperationsCodec<>(storeConfig.getKeySerializer(), storeConfig.getValueSerializer());
-
- ChainResolver resolver;
- Expiry super K, ? super V> expiry = storeConfig.getExpiry();
- if (Expirations.noExpiration().equals(expiry)) {
- resolver = new EternalChainResolver<>(codec);
- } else {
- resolver = new ExpiryChainResolver<>(codec, expiry);
- }
-
-
- ClusteredStore store = new ClusteredStore<>(codec, resolver, timeSource);
-
- createdStores.put(store, new StoreConfig(cacheId, storeConfig, clusteredStoreConfiguration.getConsistency()));
- return store;
- }
-
- @Override
- public void releaseStore(final Store, ?> resource) {
- if (createdStores.remove(resource) == null) {
- throw new IllegalArgumentException("Given clustered tier is not managed by this provider : " + resource);
- }
- ClusteredStore clusteredStore = (ClusteredStore)resource;
- this.clusteringService.releaseServerStoreProxy(clusteredStore.storeProxy);
- StatisticsManager.nodeFor(clusteredStore).clean();
- tierOperationStatistics.remove(clusteredStore);
- }
-
- @Override
- public void initStore(final Store, ?> resource) {
- StoreConfig storeConfig = createdStores.get(resource);
- if (storeConfig == null) {
- throw new IllegalArgumentException("Given clustered tier is not managed by this provider : " + resource);
- }
- final ClusteredStore, ?> clusteredStore = (ClusteredStore, ?>) resource;
- ClusteredCacheIdentifier cacheIdentifier = storeConfig.getCacheIdentifier();
- try {
- clusteredStore.storeProxy = clusteringService.getServerStoreProxy(cacheIdentifier, storeConfig.getStoreConfig(), storeConfig.getConsistency(),
- new ServerCallback() {
- @Override
- public void onInvalidateHash(long hash) {
- EvictionOutcome result = EvictionOutcome.SUCCESS;
- clusteredStore.evictionObserver.begin();
- if (clusteredStore.invalidationValve != null) {
- try {
- LOGGER.debug("CLIENT: calling invalidation valve for hash {}", hash);
- clusteredStore.invalidationValve.invalidateAllWithHash(hash);
- } catch (StoreAccessException sae) {
- //TODO: what should be done here? delegate to resilience strategy?
- LOGGER.error("Error invalidating hash {}", hash, sae);
- result = StoreOperationOutcomes.EvictionOutcome.FAILURE;
- }
- }
- clusteredStore.evictionObserver.end(result);
- }
-
- @Override
- public void onInvalidateAll() {
- if (clusteredStore.invalidationValve != null) {
- try {
- LOGGER.debug("CLIENT: calling invalidation valve for all");
- clusteredStore.invalidationValve.invalidateAll();
- } catch (StoreAccessException sae) {
- //TODO: what should be done here? delegate to resilience strategy?
- LOGGER.error("Error invalidating all", sae);
- }
- }
- }
-
- @Override
- public Chain compact(Chain chain) {
- return clusteredStore.resolver.applyOperation(chain, clusteredStore.timeSource.getTimeMillis());
- }
- });
- } catch (CachePersistenceException e) {
- throw new RuntimeException("Unable to create cluster tier proxy - " + cacheIdentifier, e);
- }
-
- Serializer keySerializer = clusteredStore.codec.getKeySerializer();
- if (keySerializer instanceof StatefulSerializer) {
- StateRepository stateRepository = null;
- try {
- stateRepository = clusteringService.getStateRepositoryWithin(cacheIdentifier, cacheIdentifier.getId() + "-Key");
- } catch (CachePersistenceException e) {
- throw new RuntimeException(e);
- }
- ((StatefulSerializer)keySerializer).init(stateRepository);
- }
- Serializer valueSerializer = clusteredStore.codec.getValueSerializer();
- if (valueSerializer instanceof StatefulSerializer) {
- StateRepository stateRepository = null;
- try {
- stateRepository = clusteringService.getStateRepositoryWithin(cacheIdentifier, cacheIdentifier.getId() + "-Value");
- } catch (CachePersistenceException e) {
- throw new RuntimeException(e);
- }
- ((StatefulSerializer)valueSerializer).init(stateRepository);
- }
- }
-
- @Override
- public int rank(final Set> resourceTypes, final Collection> serviceConfigs) {
- if (clusteringService == null || resourceTypes.size() > 1 || Collections.disjoint(resourceTypes, CLUSTER_RESOURCES)) {
- // A ClusteredStore requires a ClusteringService *and* ClusteredResourcePool instances
- return 0;
- }
- return 1;
- }
-
- @Override
- public int rankAuthority(ResourceType> authorityResource, Collection> serviceConfigs) {
- if (clusteringService == null) {
- return 0;
- } else {
- return CLUSTER_RESOURCES.contains(authorityResource) ? 1 : 0;
- }
- }
-
- @Override
- public void start(final ServiceProvider serviceProvider) {
- this.serviceProvider = serviceProvider;
- this.clusteringService = this.serviceProvider.getService(ClusteringService.class);
- }
-
- @Override
- public void stop() {
- this.serviceProvider = null;
- createdStores.clear();
- }
-
- @Override
- public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration>... serviceConfigs) {
- ClusteredStore authoritativeTier = createStoreInternal(storeConfig, serviceConfigs);
- Collection> tieredOps = new ArrayList<>();
-
- MappedOperationStatistic get =
- new MappedOperationStatistic<>(
- authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", TIER_HEIGHT, "getAndFault", STATISTICS_TAG);
- StatisticsManager.associate(get).withParent(authoritativeTier);
- tieredOps.add(get);
-
- MappedOperationStatistic evict =
- new MappedOperationStatistic<>(
- authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG);
- StatisticsManager.associate(evict).withParent(authoritativeTier);
- tieredOps.add(evict);
-
- tierOperationStatistics.put(authoritativeTier, tieredOps);
- return authoritativeTier;
- }
-
- @Override
- public void releaseAuthoritativeTier(AuthoritativeTier, ?> resource) {
- releaseStore(resource);
- }
-
- @Override
- public void initAuthoritativeTier(AuthoritativeTier, ?> resource) {
- initStore(resource);
- }
- }
-
- private static class StoreConfig {
-
- private final ClusteredCacheIdentifier cacheIdentifier;
- private final Store.Configuration, ?> storeConfig;
- private final Consistency consistency;
-
- StoreConfig(ClusteredCacheIdentifier cacheIdentifier, Configuration, ?> storeConfig, Consistency consistency) {
- this.cacheIdentifier = cacheIdentifier;
- this.storeConfig = storeConfig;
- this.consistency = consistency;
- }
-
- public Configuration, ?> getStoreConfig() {
- return this.storeConfig;
- }
-
- public ClusteredCacheIdentifier getCacheIdentifier() {
- return this.cacheIdentifier;
- }
-
- public Consistency getConsistency() {
- return consistency;
- }
- }
-}
diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java
deleted file mode 100644
index 72bf749aa7..0000000000
--- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Copyright Terracotta, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.ehcache.clustered.client.internal.store;
-
-import org.ehcache.clustered.client.internal.store.ClusterTierClientEntity.ResponseListener;
-import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse;
-import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ClientInvalidateAll;
-import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ClientInvalidateHash;
-import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ServerInvalidateHash;
-import org.ehcache.clustered.common.internal.messages.EhcacheResponseType;
-import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage;
-import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.AppendMessage;
-import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAck;
-import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAllAck;
-import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetAndAppendMessage;
-import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetMessage;
-import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ReplaceAtHeadMessage;
-import org.ehcache.clustered.common.internal.store.Chain;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.nio.ByteBuffer;
-import java.util.Objects;
-import java.util.concurrent.TimeoutException;
-
-import static java.util.Objects.requireNonNull;
-
-/**
- * Provides client-side access to the services of a {@code ServerStore}.
- */
-class CommonServerStoreProxy implements ServerStoreProxy {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(CommonServerStoreProxy.class);
-
- private final String cacheId;
- private final ClusterTierClientEntity entity;
-
- CommonServerStoreProxy(final String cacheId, final ClusterTierClientEntity entity, final ServerCallback invalidation) {
- this.cacheId = requireNonNull(cacheId, "Cache-ID must be non-null");
- this.entity = requireNonNull(entity, "ClusterTierClientEntity must be non-null");
- requireNonNull(invalidation, "ServerCallback must be non-null");
-
- entity.addResponseListener(ServerInvalidateHash.class, response -> {
- long key = response.getKey();
- LOGGER.debug("CLIENT: on cache {}, server requesting hash {} to be invalidated", cacheId, key);
- invalidation.onInvalidateHash(key);
- });
- entity.addResponseListener(ClientInvalidateHash.class, response -> {
- long key = response.getKey();
- int invalidationId = response.getInvalidationId();
-
- LOGGER.debug("CLIENT: doing work to invalidate hash {} from cache {} (ID {})", key, cacheId, invalidationId);
- invalidation.onInvalidateHash(key);
-
- try {
- LOGGER.debug("CLIENT: ack'ing invalidation of hash {} from cache {} (ID {})", key, cacheId, invalidationId);
- entity.invokeAndWaitForSend(new ClientInvalidationAck(key, invalidationId), false);
- } catch (Exception e) {
- //TODO: what should be done here?
- LOGGER.error("error acking client invalidation of hash {} on cache {}", key, cacheId, e);
- }
- });
- entity.addResponseListener(ClientInvalidateAll.class, response -> {
- int invalidationId = response.getInvalidationId();
-
- LOGGER.debug("CLIENT: doing work to invalidate all from cache {} (ID {})", cacheId, invalidationId);
- invalidation.onInvalidateAll();
-
- try {
- LOGGER.debug("CLIENT: ack'ing invalidation of all from cache {} (ID {})", cacheId, invalidationId);
- entity.invokeAndWaitForSend(new ClientInvalidationAllAck(invalidationId), false);
- } catch (Exception e) {
- //TODO: what should be done here?
- LOGGER.error("error acking client invalidation of all on cache {}", cacheId, e);
- }
- });
- }
-
- @Override
- public String getCacheId() {
- return cacheId;
- }
-
- void addResponseListener(Class listenerClass, SimpleClusterTierClientEntity.ResponseListener