From d7a28a42610bbaef6e1354a004ba7c0c2b8306bf Mon Sep 17 00:00:00 2001 From: dan-s1 Date: Mon, 6 Oct 2025 20:30:50 +0000 Subject: [PATCH 1/2] NIFI-15020 Aligned those processors and controller services whose property names do not match the human-friendly display names in extended bundles nifi-hadoop-bundle, nifi-hashicorp-vault-bundle, nifi-hazelcast-bundle, nifi-hl7-bundle and nifi-hubspot-bundle. --- .../hadoop/CreateHadoopSequenceFile.java | 10 +++- .../nifi/processors/hadoop/DeleteHDFS.java | 14 ++++-- .../processors/hadoop/GetHDFSFileInfo.java | 45 ++++++++++-------- .../nifi/processors/hadoop/ListHDFS.java | 22 +++++---- .../nifi/processors/hadoop/PutHDFS.java | 10 +++- .../HashiCorpVaultClientService.java | 32 +++++++------ .../cacheclient/HazelcastMapCacheClient.java | 17 ++++--- .../EmbeddedHazelcastCacheManager.java | 31 +++++++------ .../ExternalHazelcastCacheManager.java | 46 +++++++++---------- .../IMapBasedHazelcastCacheManager.java | 9 +++- .../cachemanager/TestHazelcastProcessor.java | 3 +- .../processors/hl7/ExtractHL7Attributes.java | 21 +++++---- .../nifi/processors/hubspot/GetHubSpot.java | 33 +++++++------ 13 files changed, 173 insertions(+), 120 deletions(-) diff --git a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java index b25c3d3291f0..047c6577f7f7 100644 --- a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java +++ b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java @@ -30,6 +30,7 @@ import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.flowfile.attributes.StandardFlowFileMediaType; +import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; @@ -86,8 +87,7 @@ public class CreateHadoopSequenceFile extends AbstractHadoopProcessor { // Optional Properties. static final PropertyDescriptor COMPRESSION_TYPE = new PropertyDescriptor.Builder() - .displayName("Compression type") - .name("compression type") + .name("Compression Type") .description("Type of compression to use when creating Sequence File") .allowableValues(SequenceFile.CompressionType.values()) .build(); @@ -180,4 +180,10 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro } } + + @Override + public void migrateProperties(PropertyConfiguration config) { + super.migrateProperties(config); + config.renameProperty("compression type", COMPRESSION_TYPE.getName()); + } } diff --git a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java index f19faab9957c..fc09cdf10bc3 100644 --- a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java +++ b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java @@ -32,6 +32,7 @@ import org.apache.nifi.components.RequiredPermission; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; @@ -86,8 +87,7 @@ public class DeleteHDFS extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor FILE_OR_DIRECTORY = new PropertyDescriptor.Builder() - .name("file_or_directory") - .displayName("Path") + .name("Path") .description("The HDFS file or directory to delete. A wildcard expression may be used to only delete certain files") .required(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) @@ -95,8 +95,7 @@ public class DeleteHDFS extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor RECURSIVE = new PropertyDescriptor.Builder() - .name("recursive") - .displayName("Recursive") + .name("Recursive") .description("Remove contents of a non-empty directory recursively") .allowableValues("true", "false") .required(true) @@ -223,6 +222,13 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro } + @Override + public void migrateProperties(PropertyConfiguration config) { + super.migrateProperties(config); + config.renameProperty("file_or_directory", FILE_OR_DIRECTORY.getName()); + config.renameProperty("recursive", RECURSIVE.getName()); + } + protected Relationship getSuccessRelationship() { return REL_SUCCESS; } diff --git a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSFileInfo.java b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSFileInfo.java index 134062c4dd95..3cb3aa8bfe3f 100644 --- a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSFileInfo.java +++ b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSFileInfo.java @@ -49,6 +49,7 @@ import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.attributes.CoreAttributes; +import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.ProcessorInitializationContext; @@ -97,8 +98,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor { public static final String APPLICATION_JSON = "application/json"; public static final PropertyDescriptor FULL_PATH = new PropertyDescriptor.Builder() - .displayName("Full path") - .name("gethdfsfileinfo-full-path") + .name("Full Path") .description("A directory to start listing from, or a file's full path.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) @@ -107,8 +107,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor RECURSE_SUBDIRS = new PropertyDescriptor.Builder() - .displayName("Recurse Subdirectories") - .name("gethdfsfileinfo-recurse-subdirs") + .name("Recurse Subdirectories") .description("Indicates whether to list files from subdirectories of the HDFS directory") .required(true) .allowableValues("true", "false") @@ -117,8 +116,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor DIR_FILTER = new PropertyDescriptor.Builder() - .displayName("Directory Filter") - .name("gethdfsfileinfo-dir-filter") + .name("Directory Filter") .description("Regex. Only directories whose names match the given regular expression will be picked up. If not provided, any filter would be apply (performance considerations).") .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .required(false) @@ -126,8 +124,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor FILE_FILTER = new PropertyDescriptor.Builder() - .displayName("File Filter") - .name("gethdfsfileinfo-file-filter") + .name("File Filter") .description("Regex. Only files whose names match the given regular expression will be picked up. If not provided, any filter would be apply (performance considerations).") .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .required(false) @@ -135,8 +132,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor FILE_EXCLUDE_FILTER = new PropertyDescriptor.Builder() - .displayName("Exclude Files") - .name("gethdfsfileinfo-file-exclude-filter") + .name("Exclude Files") .description("Regex. Files whose names match the given regular expression will not be picked up. If not provided, any filter won't be apply (performance considerations).") .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .required(false) @@ -144,8 +140,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor IGNORE_DOTTED_DIRS = new PropertyDescriptor.Builder() - .displayName("Ignore Dotted Directories") - .name("gethdfsfileinfo-ignore-dotted-dirs") + .name("Ignore Dotted Directories") .description("If true, directories whose names begin with a dot (\".\") will be ignored") .required(true) .addValidator(StandardValidators.BOOLEAN_VALIDATOR) @@ -154,8 +149,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor IGNORE_DOTTED_FILES = new PropertyDescriptor.Builder() - .displayName("Ignore Dotted Files") - .name("gethdfsfileinfo-ignore-dotted-files") + .name("Ignore Dotted Files") .description("If true, files whose names begin with a dot (\".\") will be ignored") .required(true) .addValidator(StandardValidators.BOOLEAN_VALIDATOR) @@ -174,8 +168,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor { "Don't group results. Generate flowfile per each HDFS object."); public static final PropertyDescriptor GROUPING = new PropertyDescriptor.Builder() - .displayName("Group Results") - .name("gethdfsfileinfo-group") + .name("Group Results") .description("Groups HDFS objects") .required(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) @@ -184,8 +177,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor BATCH_SIZE = new PropertyDescriptor.Builder() - .displayName("Batch Size") - .name("gethdfsfileinfo-batch-size") + .name("Batch Size") .description("Number of records to put into an output flowfile when 'Destination' is set to 'Content'" + " and 'Group Results' is set to 'None'") .required(false) @@ -201,8 +193,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor { "Details of given HDFS object will be stored in a content in JSON format"); public static final PropertyDescriptor DESTINATION = new PropertyDescriptor.Builder() - .displayName("Destination") - .name("gethdfsfileinfo-destination") + .name("Destination") .description("Sets the destination for the resutls. When set to 'Content', attributes of flowfile won't be used for storing results. ") .required(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) @@ -344,6 +335,20 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } } + @Override + public void migrateProperties(PropertyConfiguration config) { + super.migrateProperties(config); + config.renameProperty("gethdfsfileinfo-full-path", FULL_PATH.getName()); + config.renameProperty("gethdfsfileinfo-recurse-subdirs", RECURSE_SUBDIRS.getName()); + config.renameProperty("gethdfsfileinfo-dir-filter", DIR_FILTER.getName()); + config.renameProperty("gethdfsfileinfo-file-filter", FILE_FILTER.getName()); + config.renameProperty("gethdfsfileinfo-file-exclude-filter", FILE_EXCLUDE_FILTER.getName()); + config.renameProperty("gethdfsfileinfo-ignore-dotted-dirs", IGNORE_DOTTED_DIRS.getName()); + config.renameProperty("gethdfsfileinfo-ignore-dotted-files", IGNORE_DOTTED_FILES.getName()); + config.renameProperty("gethdfsfileinfo-group", GROUPING.getName()); + config.renameProperty("gethdfsfileinfo-batch-size", BATCH_SIZE.getName()); + config.renameProperty("gethdfsfileinfo-destination", DESTINATION.getName()); + } /* * Walks thru HDFS tree. This method will return null to the main if there is no provided path existing. diff --git a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java index ac500a6658c1..10e2aa6cc29d 100644 --- a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java +++ b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java @@ -38,6 +38,7 @@ import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.state.Scope; import org.apache.nifi.components.state.StateMap; +import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; @@ -107,8 +108,7 @@ public class ListHDFS extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor RECORD_WRITER = new PropertyDescriptor.Builder() - .name("record-writer") - .displayName("Record Writer") + .name("Record Writer") .description("Specifies the Record Writer to use for creating the listing. If not specified, one FlowFile will be created for each " + "entity that is listed. If the Record Writer is specified, all entities will be written to a single FlowFile.") .required(false) @@ -124,8 +124,7 @@ public class ListHDFS extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor FILE_FILTER_MODE = new PropertyDescriptor.Builder() - .name("file-filter-mode") - .displayName("File Filter Mode") + .name("File Filter Mode") .description("Determines how the regular expression in " + FILE_FILTER.getDisplayName() + " will be used when retrieving listings.") .required(true) .allowableValues(FilterMode.class) @@ -134,8 +133,7 @@ public class ListHDFS extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor MINIMUM_FILE_AGE = new PropertyDescriptor.Builder() - .name("minimum-file-age") - .displayName("Minimum File Age") + .name("Minimum File Age") .description("The minimum age that a file must be in order to be pulled; any file younger than this " + "amount of time (based on last modification date) will be ignored") .required(false) @@ -143,8 +141,7 @@ public class ListHDFS extends AbstractHadoopProcessor { .build(); public static final PropertyDescriptor MAXIMUM_FILE_AGE = new PropertyDescriptor.Builder() - .name("maximum-file-age") - .displayName("Maximum File Age") + .name("Maximum File Age") .description("The maximum age that a file must be in order to be pulled; any file older than this " + "amount of time (based on last modification date) will be ignored. Minimum value is 100ms.") .required(false) @@ -319,6 +316,15 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } + @Override + public void migrateProperties(PropertyConfiguration config) { + super.migrateProperties(config); + config.renameProperty("record-writer", RECORD_WRITER.getName()); + config.renameProperty("file-filter-mode", FILE_FILTER_MODE.getName()); + config.renameProperty("minimum-file-age", MINIMUM_FILE_AGE.getName()); + config.renameProperty("maximum-file-age", MAXIMUM_FILE_AGE.getName()); + } + private PathFilter createPathFilter(final ProcessContext context) { final FilterMode filterMode = FilterMode.forName(context.getProperty(FILE_FILTER_MODE).getValue()); final boolean recursive = context.getProperty(RECURSE_SUBDIRS).asBoolean(); diff --git a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java index 42ffef97f5a7..7b9ca0b54c90 100644 --- a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java +++ b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java @@ -56,6 +56,7 @@ import org.apache.nifi.fileresource.service.api.FileResource; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.attributes.CoreAttributes; +import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; @@ -162,8 +163,7 @@ public class PutHDFS extends AbstractHadoopProcessor { .build(); protected static final PropertyDescriptor WRITING_STRATEGY = new PropertyDescriptor.Builder() - .name("writing-strategy") - .displayName("Writing Strategy") + .name("Writing Strategy") .description("Defines the approach for writing the FlowFile data.") .required(true) .defaultValue(WRITE_AND_RENAME_AV) @@ -554,6 +554,12 @@ private AclStatus getAclStatus(final Path dirPath) { }); } + @Override + public void migrateProperties(PropertyConfiguration config) { + super.migrateProperties(config); + config.renameProperty("writing-strategy", WRITING_STRATEGY.getName()); + } + protected Relationship getSuccessRelationship() { return REL_SUCCESS; } diff --git a/nifi-extension-bundles/nifi-hashicorp-vault-bundle/nifi-hashicorp-vault-client-service-api/src/main/java/org/apache/nifi/vault/hashicorp/HashiCorpVaultClientService.java b/nifi-extension-bundles/nifi-hashicorp-vault-bundle/nifi-hashicorp-vault-client-service-api/src/main/java/org/apache/nifi/vault/hashicorp/HashiCorpVaultClientService.java index 3858e37116ca..c8fbcc887f2d 100644 --- a/nifi-extension-bundles/nifi-hashicorp-vault-bundle/nifi-hashicorp-vault-client-service-api/src/main/java/org/apache/nifi/vault/hashicorp/HashiCorpVaultClientService.java +++ b/nifi-extension-bundles/nifi-hashicorp-vault-bundle/nifi-hashicorp-vault-client-service-api/src/main/java/org/apache/nifi/vault/hashicorp/HashiCorpVaultClientService.java @@ -23,6 +23,7 @@ import org.apache.nifi.controller.ControllerService; import org.apache.nifi.controller.VerifiableControllerService; import org.apache.nifi.expression.ExpressionLanguageScope; +import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.ssl.SSLContextService; @@ -37,8 +38,7 @@ public interface HashiCorpVaultClientService extends ControllerService, Verifiab "Use one or more '.properties' files to configure the client"); PropertyDescriptor CONFIGURATION_STRATEGY = new PropertyDescriptor.Builder() - .displayName("Configuration Strategy") - .name("configuration-strategy") + .name("Configuration Strategy") .required(true) .allowableValues(DIRECT_PROPERTIES, PROPERTIES_FILES) .defaultValue(DIRECT_PROPERTIES.getValue()) @@ -46,8 +46,7 @@ public interface HashiCorpVaultClientService extends ControllerService, Verifiab .build(); PropertyDescriptor VAULT_URI = new PropertyDescriptor.Builder() - .name("vault.uri") - .displayName("Vault URI") + .name("Vault URI") .description("The URI of the HashiCorp Vault server (e.g., http://localhost:8200). Required if not specified in the " + "Bootstrap HashiCorp Vault Configuration File.") .required(true) @@ -57,8 +56,7 @@ public interface HashiCorpVaultClientService extends ControllerService, Verifiab .build(); PropertyDescriptor VAULT_AUTHENTICATION = new PropertyDescriptor.Builder() - .name("vault.authentication") - .displayName("Vault Authentication") + .name("Vault Authentication") .description("Vault authentication method, as described in the Spring Vault Environment Configuration documentation " + "(https://docs.spring.io/spring-vault/docs/2.3.x/reference/html/#vault.core.environment-vault-configuration).") .required(true) @@ -68,8 +66,7 @@ public interface HashiCorpVaultClientService extends ControllerService, Verifiab .build(); PropertyDescriptor SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder() - .name("vault.ssl.context.service") - .displayName("SSL Context Service") + .name("SSL Context Service") .description("The SSL Context Service used to provide client certificate information for TLS/SSL connections to the " + "HashiCorp Vault server.") .required(false) @@ -78,8 +75,7 @@ public interface HashiCorpVaultClientService extends ControllerService, Verifiab .build(); PropertyDescriptor VAULT_PROPERTIES_FILES = new PropertyDescriptor.Builder() - .name("vault.properties.files") - .displayName("Vault Properties Files") + .name("Vault Properties Files") .description("A comma-separated list of files containing HashiCorp Vault configuration properties, as described in the Spring Vault " + "Environment Configuration documentation (https://docs.spring.io/spring-vault/docs/2.3.x/reference/html/#vault.core.environment-vault-configuration). " + "All of the Spring property keys and authentication-specific property keys are supported.") @@ -89,8 +85,7 @@ public interface HashiCorpVaultClientService extends ControllerService, Verifiab .build(); PropertyDescriptor CONNECTION_TIMEOUT = new PropertyDescriptor.Builder() - .name("vault.connection.timeout") - .displayName("Connection Timeout") + .name("Connection Timeout") .description("The connection timeout for the HashiCorp Vault client") .required(true) .defaultValue("5 sec") @@ -98,8 +93,7 @@ public interface HashiCorpVaultClientService extends ControllerService, Verifiab .build(); PropertyDescriptor READ_TIMEOUT = new PropertyDescriptor.Builder() - .name("vault.read.timeout") - .displayName("Read Timeout") + .name("Read Timeout") .description("The read timeout for the HashiCorp Vault client") .required(true) .defaultValue("15 sec") @@ -112,4 +106,14 @@ public interface HashiCorpVaultClientService extends ControllerService, Verifiab */ HashiCorpVaultCommunicationService getHashiCorpVaultCommunicationService(); + @Override + default void migrateProperties(PropertyConfiguration config) { + config.renameProperty("configuration-strategy", CONFIGURATION_STRATEGY.getName()); + config.renameProperty("vault.uri", VAULT_URI.getName()); + config.renameProperty("vault.authentication", VAULT_AUTHENTICATION.getName()); + config.renameProperty("vault.ssl.context.service", SSL_CONTEXT_SERVICE.getName()); + config.renameProperty("vault.properties.files", VAULT_PROPERTIES_FILES.getName()); + config.renameProperty("vault.connection.timeout", CONNECTION_TIMEOUT.getName()); + config.renameProperty("vault.read.timeout", READ_TIMEOUT.getName()); + } } diff --git a/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java b/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java index 143f9a238edc..ad069efa3015 100644 --- a/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java +++ b/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cacheclient/HazelcastMapCacheClient.java @@ -30,6 +30,7 @@ import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.hazelcast.services.cache.HazelcastCache; import org.apache.nifi.hazelcast.services.cachemanager.HazelcastCacheManager; +import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.util.StandardValidators; import java.io.ByteArrayOutputStream; @@ -54,8 +55,7 @@ public class HazelcastMapCacheClient extends AbstractControllerService implements AtomicDistributedMapCacheClient { public static final PropertyDescriptor HAZELCAST_CACHE_MANAGER = new PropertyDescriptor.Builder() - .name("hazelcast-cache-manager") - .displayName("Hazelcast Cache Manager") + .name("Hazelcast Cache Manager") .description("A Hazelcast Cache Manager which manages connections to Hazelcast and provides cache instances.") .identifiesControllerService(HazelcastCacheManager.class) .required(true) @@ -63,8 +63,7 @@ public class HazelcastMapCacheClient extends AbstractControllerService implement .build(); public static final PropertyDescriptor HAZELCAST_CACHE_NAME = new PropertyDescriptor.Builder() - .name("hazelcast-cache-name") - .displayName("Hazelcast Cache Name") + .name("Hazelcast Cache Name") .description("The name of a given cache. A Hazelcast cluster may handle multiple independent caches, each identified by a name." + " Clients using caches with the same name are working on the same data structure within Hazelcast.") .required(true) @@ -73,8 +72,7 @@ public class HazelcastMapCacheClient extends AbstractControllerService implement .build(); public static final PropertyDescriptor HAZELCAST_ENTRY_TTL = new PropertyDescriptor.Builder() - .name("hazelcast-entry-ttl") - .displayName("Hazelcast Entry Lifetime") + .name("Hazelcast Entry Lifetime") .description("Indicates how long the written entries should exist in Hazelcast. Setting it to '0 secs' means that the data" + "will exists until its deletion or until the Hazelcast server is shut down. Using `EmbeddedHazelcastCacheManager` as" + "cache manager will not provide policies to limit the size of the cache.") @@ -190,6 +188,13 @@ public void close() { getLogger().debug("Closing {}", getClass().getSimpleName()); } + @Override + public void migrateProperties(PropertyConfiguration config) { + config.renameProperty("hazelcast-cache-manager", HAZELCAST_CACHE_MANAGER.getName()); + config.renameProperty("hazelcast-cache-name", HAZELCAST_CACHE_NAME.getName()); + config.renameProperty("hazelcast-entry-ttl", HAZELCAST_ENTRY_TTL.getName()); + } + @Override protected List getSupportedPropertyDescriptors() { return PROPERTY_DESCRIPTORS; diff --git a/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cachemanager/EmbeddedHazelcastCacheManager.java b/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cachemanager/EmbeddedHazelcastCacheManager.java index 4578d44234ef..8a68f7cbec1e 100644 --- a/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cachemanager/EmbeddedHazelcastCacheManager.java +++ b/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cachemanager/EmbeddedHazelcastCacheManager.java @@ -30,12 +30,12 @@ import org.apache.nifi.context.PropertyContext; import org.apache.nifi.controller.ConfigurationContext; import org.apache.nifi.expression.ExpressionLanguageScope; +import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -63,8 +63,7 @@ public class EmbeddedHazelcastCacheManager extends IMapBasedHazelcastCacheManage " is allowed. NiFi nodes are not listed will be join to the Hazelcast cluster as clients."); private static final PropertyDescriptor HAZELCAST_PORT = new PropertyDescriptor.Builder() - .name("hazelcast-port") - .displayName("Hazelcast Port") + .name("Hazelcast Port") .description("Port for the Hazelcast instance to use.") .required(true) .defaultValue(String.valueOf(DEFAULT_HAZELCAST_PORT)) @@ -73,8 +72,7 @@ public class EmbeddedHazelcastCacheManager extends IMapBasedHazelcastCacheManage .build(); private static final PropertyDescriptor HAZELCAST_CLUSTERING_STRATEGY = new PropertyDescriptor.Builder() - .name("hazelcast-clustering-strategy") - .displayName("Hazelcast Clustering Strategy") + .name("Hazelcast Clustering Strategy") .description("Specifies with what strategy the Hazelcast cluster should be created.") .required(true) .allowableValues(CLUSTER_NONE, CLUSTER_ALL_NODES, CLUSTER_EXPLICIT) @@ -82,8 +80,7 @@ public class EmbeddedHazelcastCacheManager extends IMapBasedHazelcastCacheManage .build(); private static final PropertyDescriptor HAZELCAST_INSTANCES = new PropertyDescriptor.Builder() - .name("hazelcast-instances") - .displayName("Hazelcast Instances") + .name("Hazelcast Instances") .description("Only used with \"Explicit\" Clustering Strategy!" + " List of NiFi instance host names which should be part of the Hazelcast cluster. Host names are separated by comma." + " The port specified in the \"Hazelcast Port\" property will be used as server port." + @@ -94,15 +91,19 @@ public class EmbeddedHazelcastCacheManager extends IMapBasedHazelcastCacheManage .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .build(); - private static final List PROPERTY_DESCRIPTORS; + private static final List PROPERTY_DESCRIPTORS = List.of( + HAZELCAST_CLUSTER_NAME, + HAZELCAST_PORT, + HAZELCAST_CLUSTERING_STRATEGY, + HAZELCAST_INSTANCES + ); - static { - PROPERTY_DESCRIPTORS = Collections.unmodifiableList(Arrays.asList( - HAZELCAST_CLUSTER_NAME, - HAZELCAST_PORT, - HAZELCAST_CLUSTERING_STRATEGY, - HAZELCAST_INSTANCES - )); + @Override + public void migrateProperties(PropertyConfiguration config) { + super.migrateProperties(config); + config.renameProperty("hazelcast-port", HAZELCAST_PORT.getName()); + config.renameProperty("hazelcast-clustering-strategy", HAZELCAST_CLUSTERING_STRATEGY.getName()); + config.renameProperty("hazelcast-instances", HAZELCAST_INSTANCES.getName()); } @Override diff --git a/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cachemanager/ExternalHazelcastCacheManager.java b/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cachemanager/ExternalHazelcastCacheManager.java index 19ceeeefebb5..6c52988b2f77 100644 --- a/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cachemanager/ExternalHazelcastCacheManager.java +++ b/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cachemanager/ExternalHazelcastCacheManager.java @@ -24,12 +24,11 @@ import org.apache.nifi.components.ValidationResult; import org.apache.nifi.controller.ConfigurationContext; import org.apache.nifi.expression.ExpressionLanguageScope; +import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.util.StandardValidators; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -40,8 +39,7 @@ public class ExternalHazelcastCacheManager extends IMapBasedHazelcastCacheManager { public static final PropertyDescriptor HAZELCAST_SERVER_ADDRESS = new PropertyDescriptor.Builder() - .name("hazelcast-server-address") - .displayName("Hazelcast Server Address") + .name("Hazelcast Server Address") .description("Addresses of one or more the Hazelcast instances, using {host:port} format, separated by comma.") .required(true) .addValidator(StandardValidators.HOSTNAME_PORT_LIST_VALIDATOR) @@ -49,8 +47,7 @@ public class ExternalHazelcastCacheManager extends IMapBasedHazelcastCacheManage .build(); public static final PropertyDescriptor HAZELCAST_RETRY_BACKOFF_INITIAL = new PropertyDescriptor.Builder() - .name("hazelcast-retry-backoff-initial") - .displayName("Hazelcast Initial Backoff") + .name("Hazelcast Initial Backoff") .description("The amount of time the client waits before it tries to reestablish connection for the first time.") .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .required(true) @@ -58,8 +55,7 @@ public class ExternalHazelcastCacheManager extends IMapBasedHazelcastCacheManage .build(); public static final PropertyDescriptor HAZELCAST_RETRY_BACKOFF_MAXIMUM = new PropertyDescriptor.Builder() - .name("hazelcast-retry-backoff-maximum") - .displayName("Hazelcast Maximum Backoff") + .name("Hazelcast Maximum Backoff") .description("The maximum amount of time the client waits before it tries to reestablish connection.") .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .required(true) @@ -67,8 +63,7 @@ public class ExternalHazelcastCacheManager extends IMapBasedHazelcastCacheManage .build(); public static final PropertyDescriptor HAZELCAST_RETRY_BACKOFF_MULTIPLIER = new PropertyDescriptor.Builder() - .name("hazelcast-retry-backoff-multiplier") - .displayName("Hazelcast Backoff Multiplier") + .name("Hazelcast Backoff Multiplier") .description("A multiplier by which the wait time is increased before each attempt to reestablish connection.") .addValidator(StandardValidators.NUMBER_VALIDATOR) .required(true) @@ -76,25 +71,30 @@ public class ExternalHazelcastCacheManager extends IMapBasedHazelcastCacheManage .build(); public static final PropertyDescriptor HAZELCAST_CONNECTION_TIMEOUT = new PropertyDescriptor.Builder() - .name("hazelcast-connection-timeout") - .displayName("Hazelcast Connection Timeout") + .name("Hazelcast Connection Timeout") .description("The maximum amount of time the client tries to connect or reconnect before giving up.") .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .required(true) .defaultValue(DEFAULT_CLIENT_TIMEOUT_MAXIMUM_IN_SEC + " secs") .build(); - private static final List PROPERTY_DESCRIPTORS; - - static { - final List properties = new ArrayList<>(); - properties.add(HAZELCAST_CLUSTER_NAME); - properties.add(HAZELCAST_SERVER_ADDRESS); - properties.add(HAZELCAST_RETRY_BACKOFF_INITIAL); - properties.add(HAZELCAST_RETRY_BACKOFF_MAXIMUM); - properties.add(HAZELCAST_RETRY_BACKOFF_MULTIPLIER); - properties.add(HAZELCAST_CONNECTION_TIMEOUT); - PROPERTY_DESCRIPTORS = Collections.unmodifiableList(properties); + private static final List PROPERTY_DESCRIPTORS = List.of( + HAZELCAST_CLUSTER_NAME, + HAZELCAST_SERVER_ADDRESS, + HAZELCAST_RETRY_BACKOFF_INITIAL, + HAZELCAST_RETRY_BACKOFF_MAXIMUM, + HAZELCAST_RETRY_BACKOFF_MULTIPLIER, + HAZELCAST_CONNECTION_TIMEOUT + ); + + @Override + public void migrateProperties(PropertyConfiguration config) { + super.migrateProperties(config); + config.renameProperty("hazelcast-server-address", HAZELCAST_SERVER_ADDRESS.getName()); + config.renameProperty("hazelcast-retry-backoff-initial", HAZELCAST_RETRY_BACKOFF_INITIAL.getName()); + config.renameProperty("hazelcast-retry-backoff-maximum", HAZELCAST_RETRY_BACKOFF_MAXIMUM.getName()); + config.renameProperty("hazelcast-retry-backoff-multiplier", HAZELCAST_RETRY_BACKOFF_MULTIPLIER.getName()); + config.renameProperty("hazelcast-connection-timeout", HAZELCAST_CONNECTION_TIMEOUT.getName()); } @Override diff --git a/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cachemanager/IMapBasedHazelcastCacheManager.java b/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cachemanager/IMapBasedHazelcastCacheManager.java index fc22c020ef6a..7800cd248094 100644 --- a/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cachemanager/IMapBasedHazelcastCacheManager.java +++ b/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/main/java/org/apache/nifi/hazelcast/services/cachemanager/IMapBasedHazelcastCacheManager.java @@ -28,6 +28,7 @@ import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.hazelcast.services.cache.HazelcastCache; import org.apache.nifi.hazelcast.services.cache.IMapBasedHazelcastCache; +import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.reporting.InitializationException; @@ -47,8 +48,7 @@ abstract class IMapBasedHazelcastCacheManager extends AbstractControllerService protected static final double DEFAULT_CLIENT_BACKOFF_MULTIPLIER = 1.5; public static final PropertyDescriptor HAZELCAST_CLUSTER_NAME = new PropertyDescriptor.Builder() - .name("hazelcast-cluster-name") - .displayName("Hazelcast Cluster Name") + .name("Hazelcast Cluster Name") .description("Name of the Hazelcast cluster.") .defaultValue("nifi") // Hazelcast's default is "dev", "nifi" overwrites this. .required(true) @@ -88,6 +88,11 @@ public void shutdown() { } } + @Override + public void migrateProperties(PropertyConfiguration config) { + config.renameProperty("hazelcast-cluster-name", HAZELCAST_CLUSTER_NAME.getName()); + } + protected HazelcastInstance getClientInstance( final String clusterName, final List serverAddresses, diff --git a/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/test/java/org/apache/nifi/hazelcast/services/cachemanager/TestHazelcastProcessor.java b/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/test/java/org/apache/nifi/hazelcast/services/cachemanager/TestHazelcastProcessor.java index 76a1474fa9c1..5797ea99a08a 100644 --- a/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/test/java/org/apache/nifi/hazelcast/services/cachemanager/TestHazelcastProcessor.java +++ b/nifi-extension-bundles/nifi-hazelcast-bundle/nifi-hazelcast-services/src/test/java/org/apache/nifi/hazelcast/services/cachemanager/TestHazelcastProcessor.java @@ -48,8 +48,7 @@ class TestHazelcastProcessor extends AbstractProcessor { private static final DummyStringSerializer SERIALIZER = new DummyStringSerializer(); public static final PropertyDescriptor TEST_HAZELCAST_MAP_CACHE_CLIENT = new PropertyDescriptor.Builder() - .name("test-hazelcast-map-cache-client") - .displayName("Test Hazelcast Map Cache Client") + .name("Test Hazelcast Map Cache Client") .identifiesControllerService(HazelcastMapCacheClient.class) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .required(true) diff --git a/nifi-extension-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java b/nifi-extension-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java index 17039ba5eaa3..1b52a831faba 100644 --- a/nifi-extension-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java +++ b/nifi-extension-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java @@ -55,6 +55,7 @@ import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; @@ -87,8 +88,7 @@ public class ExtractHL7Attributes extends AbstractProcessor { .build(); public static final PropertyDescriptor USE_SEGMENT_NAMES = new PropertyDescriptor.Builder() - .name("use-segment-names") - .displayName("Use Segment Names") + .name("Use Segment Names") .description("Whether or not to use HL7 segment names in attributes") .required(true) .allowableValues("true", "false") @@ -97,8 +97,7 @@ public class ExtractHL7Attributes extends AbstractProcessor { .build(); public static final PropertyDescriptor PARSE_SEGMENT_FIELDS = new PropertyDescriptor.Builder() - .name("parse-segment-fields") - .displayName("Parse Segment Fields") + .name("Parse Segment Fields") .description("Whether or not to parse HL7 segment fields into attributes") .required(true) .allowableValues("true", "false") @@ -107,8 +106,7 @@ public class ExtractHL7Attributes extends AbstractProcessor { .build(); public static final PropertyDescriptor SKIP_VALIDATION = new PropertyDescriptor.Builder() - .name("skip-validation") - .displayName("Skip Validation") + .name("Skip Validation") .description("Whether or not to validate HL7 message values") .required(true) .allowableValues("true", "false") @@ -117,8 +115,7 @@ public class ExtractHL7Attributes extends AbstractProcessor { .build(); public static final PropertyDescriptor HL7_INPUT_VERSION = new PropertyDescriptor.Builder() - .name("hl7-input-version") - .displayName("HL7 Input Version") + .name("HL7 Input Version") .description("The HL7 version to use for parsing and validation") .required(true) .allowableValues("autodetect", "2.2", "2.3", "2.3.1", "2.4", "2.5", "2.5.1", "2.6") @@ -200,6 +197,14 @@ public void onTrigger(final ProcessContext context, final ProcessSession session session.transfer(flowFile, REL_SUCCESS); } + @Override + public void migrateProperties(PropertyConfiguration config) { + config.renameProperty("use-segment-names", USE_SEGMENT_NAMES.getName()); + config.renameProperty("parse-segment-fields", PARSE_SEGMENT_FIELDS.getName()); + config.renameProperty("skip-validation", SKIP_VALIDATION.getName()); + config.renameProperty("hl7-input-version", HL7_INPUT_VERSION.getName()); + } + public static Map getAttributes(final Group group, final boolean useNames, final boolean parseFields) throws HL7Exception { final Map attributes = new TreeMap<>(); if (!isEmpty(group)) { diff --git a/nifi-extension-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java b/nifi-extension-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java index 1c350c4f8373..6cad84049374 100644 --- a/nifi-extension-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java +++ b/nifi-extension-bundles/nifi-hubspot-bundle/nifi-hubspot-processors/src/main/java/org/apache/nifi/processors/hubspot/GetHubSpot.java @@ -42,6 +42,7 @@ import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.attributes.CoreAttributes; +import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; @@ -87,16 +88,14 @@ public class GetHubSpot extends AbstractProcessor { static final PropertyDescriptor OBJECT_TYPE = new PropertyDescriptor.Builder() - .name("object-type") - .displayName("Object Type") + .name("Object Type") .description("The HubSpot Object Type requested") .required(true) .allowableValues(HubSpotObjectType.class) .build(); static final PropertyDescriptor ACCESS_TOKEN = new PropertyDescriptor.Builder() - .name("access-token") - .displayName("Access Token") + .name("Access Token") .description("Access Token to authenticate requests") .required(true) .sensitive(true) @@ -105,8 +104,7 @@ public class GetHubSpot extends AbstractProcessor { .build(); static final PropertyDescriptor RESULT_LIMIT = new PropertyDescriptor.Builder() - .name("result-limit") - .displayName("Result Limit") + .name("Result Limit") .description("The maximum number of results to request for each invocation of the Processor") .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .required(false) @@ -114,8 +112,7 @@ public class GetHubSpot extends AbstractProcessor { .build(); static final PropertyDescriptor IS_INCREMENTAL = new PropertyDescriptor.Builder() - .name("is-incremental") - .displayName("Incremental Loading") + .name("Incremental Loading") .description("The processor can incrementally load the queried objects so that each object is queried exactly once." + " For each query, the processor queries objects within a time window where the objects were modified between" + " the previous run time and the current time (optionally adjusted by the Incremental Delay property).") @@ -125,8 +122,7 @@ public class GetHubSpot extends AbstractProcessor { .build(); static final PropertyDescriptor INCREMENTAL_DELAY = new PropertyDescriptor.Builder() - .name("incremental-delay") - .displayName("Incremental Delay") + .name("Incremental Delay") .description(("The ending timestamp of the time window will be adjusted earlier by the amount configured in this property." + " For example, with a property value of 10 seconds, an ending timestamp of 12:30:45 would be changed to 12:30:35." + " Set this property to avoid missing objects when the clock of your local machines and HubSpot servers' clock are not in sync" + @@ -139,8 +135,7 @@ public class GetHubSpot extends AbstractProcessor { .build(); static final PropertyDescriptor INCREMENTAL_INITIAL_START_TIME = new PropertyDescriptor.Builder() - .name("incremental-initial-start-time") - .displayName("Incremental Initial Start Time") + .name("Incremental Initial Start Time") .description("This property specifies the start time that the processor applies when running the first request." + " The expected format is a UTC date-time such as '2011-12-03T10:15:30Z'") .required(false) @@ -150,8 +145,7 @@ public class GetHubSpot extends AbstractProcessor { .build(); static final PropertyDescriptor WEB_CLIENT_SERVICE_PROVIDER = new PropertyDescriptor.Builder() - .name("web-client-service-provider") - .displayName("Web Client Service Provider") + .name("Web Client Service Provider") .description("Controller service for HTTP client operations") .identifiesControllerService(WebClientServiceProvider.class) .required(true) @@ -256,6 +250,17 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } } + @Override + public void migrateProperties(PropertyConfiguration config) { + config.renameProperty("object-type", OBJECT_TYPE.getName()); + config.renameProperty("access-token", ACCESS_TOKEN.getName()); + config.renameProperty("result-limit", RESULT_LIMIT.getName()); + config.renameProperty("is-incremental", IS_INCREMENTAL.getName()); + config.renameProperty("incremental-delay", INCREMENTAL_DELAY.getName()); + config.renameProperty("incremental-initial-start-time", INCREMENTAL_INITIAL_START_TIME.getName()); + config.renameProperty("web-client-service-provider", WEB_CLIENT_SERVICE_PROVIDER.getName()); + } + private String getResponseBodyAsString(final ProcessContext context, final HttpResponseEntity response, final URI uri) { try { return IOUtils.toString(response.body(), StandardCharsets.UTF_8); From a985ca6e06af00d8afe34177803b01ea3dc2ddf7 Mon Sep 17 00:00:00 2001 From: dan-s1 Date: Thu, 16 Oct 2025 16:13:57 +0000 Subject: [PATCH 2/2] NIFI-15020 Reverted property change name on VAULT_URI property. --- .../nifi/vault/hashicorp/HashiCorpVaultClientService.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nifi-extension-bundles/nifi-hashicorp-vault-bundle/nifi-hashicorp-vault-client-service-api/src/main/java/org/apache/nifi/vault/hashicorp/HashiCorpVaultClientService.java b/nifi-extension-bundles/nifi-hashicorp-vault-bundle/nifi-hashicorp-vault-client-service-api/src/main/java/org/apache/nifi/vault/hashicorp/HashiCorpVaultClientService.java index c8fbcc887f2d..c4787c353992 100644 --- a/nifi-extension-bundles/nifi-hashicorp-vault-bundle/nifi-hashicorp-vault-client-service-api/src/main/java/org/apache/nifi/vault/hashicorp/HashiCorpVaultClientService.java +++ b/nifi-extension-bundles/nifi-hashicorp-vault-bundle/nifi-hashicorp-vault-client-service-api/src/main/java/org/apache/nifi/vault/hashicorp/HashiCorpVaultClientService.java @@ -47,6 +47,8 @@ public interface HashiCorpVaultClientService extends ControllerService, Verifiab PropertyDescriptor VAULT_URI = new PropertyDescriptor.Builder() .name("Vault URI") + .name("vault.uri") + .displayName("Vault URI") .description("The URI of the HashiCorp Vault server (e.g., http://localhost:8200). Required if not specified in the " + "Bootstrap HashiCorp Vault Configuration File.") .required(true)