Skip to content

Commit 2e90fe1

Browse files
authored
NIFI-15020 Standardized property names in Hadoop, Hazelcast, HL7, and HubSpot bundles (#10398)
Signed-off-by: David Handermann <[email protected]>
1 parent 7992d06 commit 2e90fe1

File tree

13 files changed

+173
-118
lines changed

13 files changed

+173
-118
lines changed

nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
import org.apache.nifi.flowfile.FlowFile;
3131
import org.apache.nifi.flowfile.attributes.CoreAttributes;
3232
import org.apache.nifi.flowfile.attributes.StandardFlowFileMediaType;
33+
import org.apache.nifi.migration.PropertyConfiguration;
3334
import org.apache.nifi.processor.ProcessContext;
3435
import org.apache.nifi.processor.ProcessSession;
3536
import org.apache.nifi.processor.Relationship;
@@ -86,8 +87,7 @@ public class CreateHadoopSequenceFile extends AbstractHadoopProcessor {
8687

8788
// Optional Properties.
8889
static final PropertyDescriptor COMPRESSION_TYPE = new PropertyDescriptor.Builder()
89-
.displayName("Compression type")
90-
.name("compression type")
90+
.name("Compression Type")
9191
.description("Type of compression to use when creating Sequence File")
9292
.allowableValues(SequenceFile.CompressionType.values())
9393
.build();
@@ -180,4 +180,10 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro
180180
}
181181

182182
}
183+
184+
@Override
185+
public void migrateProperties(PropertyConfiguration config) {
186+
super.migrateProperties(config);
187+
config.renameProperty("compression type", COMPRESSION_TYPE.getName());
188+
}
183189
}

nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@
3232
import org.apache.nifi.components.RequiredPermission;
3333
import org.apache.nifi.expression.ExpressionLanguageScope;
3434
import org.apache.nifi.flowfile.FlowFile;
35+
import org.apache.nifi.migration.PropertyConfiguration;
3536
import org.apache.nifi.processor.ProcessContext;
3637
import org.apache.nifi.processor.ProcessSession;
3738
import org.apache.nifi.processor.Relationship;
@@ -86,17 +87,15 @@ public class DeleteHDFS extends AbstractHadoopProcessor {
8687
.build();
8788

8889
public static final PropertyDescriptor FILE_OR_DIRECTORY = new PropertyDescriptor.Builder()
89-
.name("file_or_directory")
90-
.displayName("Path")
90+
.name("Path")
9191
.description("The HDFS file or directory to delete. A wildcard expression may be used to only delete certain files")
9292
.required(true)
9393
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
9494
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
9595
.build();
9696

9797
public static final PropertyDescriptor RECURSIVE = new PropertyDescriptor.Builder()
98-
.name("recursive")
99-
.displayName("Recursive")
98+
.name("Recursive")
10099
.description("Remove contents of a non-empty directory recursively")
101100
.allowableValues("true", "false")
102101
.required(true)
@@ -223,6 +222,13 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro
223222

224223
}
225224

225+
@Override
226+
public void migrateProperties(PropertyConfiguration config) {
227+
super.migrateProperties(config);
228+
config.renameProperty("file_or_directory", FILE_OR_DIRECTORY.getName());
229+
config.renameProperty("recursive", RECURSIVE.getName());
230+
}
231+
226232
protected Relationship getSuccessRelationship() {
227233
return REL_SUCCESS;
228234
}

nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSFileInfo.java

Lines changed: 25 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@
4949
import org.apache.nifi.expression.ExpressionLanguageScope;
5050
import org.apache.nifi.flowfile.FlowFile;
5151
import org.apache.nifi.flowfile.attributes.CoreAttributes;
52+
import org.apache.nifi.migration.PropertyConfiguration;
5253
import org.apache.nifi.processor.ProcessContext;
5354
import org.apache.nifi.processor.ProcessSession;
5455
import org.apache.nifi.processor.ProcessorInitializationContext;
@@ -97,8 +98,7 @@
9798
public class GetHDFSFileInfo extends AbstractHadoopProcessor {
9899
public static final String APPLICATION_JSON = "application/json";
99100
public static final PropertyDescriptor FULL_PATH = new PropertyDescriptor.Builder()
100-
.displayName("Full path")
101-
.name("gethdfsfileinfo-full-path")
101+
.name("Full Path")
102102
.description("A directory to start listing from, or a file's full path.")
103103
.required(true)
104104
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
@@ -107,8 +107,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor {
107107
.build();
108108

109109
public static final PropertyDescriptor RECURSE_SUBDIRS = new PropertyDescriptor.Builder()
110-
.displayName("Recurse Subdirectories")
111-
.name("gethdfsfileinfo-recurse-subdirs")
110+
.name("Recurse Subdirectories")
112111
.description("Indicates whether to list files from subdirectories of the HDFS directory")
113112
.required(true)
114113
.allowableValues("true", "false")
@@ -117,35 +116,31 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor {
117116
.build();
118117

119118
public static final PropertyDescriptor DIR_FILTER = new PropertyDescriptor.Builder()
120-
.displayName("Directory Filter")
121-
.name("gethdfsfileinfo-dir-filter")
119+
.name("Directory Filter")
122120
.description("Regex. Only directories whose names match the given regular expression will be picked up. If not provided, any filter would be apply (performance considerations).")
123121
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
124122
.required(false)
125123
.addValidator(StandardValidators.createRegexValidator(0, Integer.MAX_VALUE, true))
126124
.build();
127125

128126
public static final PropertyDescriptor FILE_FILTER = new PropertyDescriptor.Builder()
129-
.displayName("File Filter")
130-
.name("gethdfsfileinfo-file-filter")
127+
.name("File Filter")
131128
.description("Regex. Only files whose names match the given regular expression will be picked up. If not provided, any filter would be apply (performance considerations).")
132129
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
133130
.required(false)
134131
.addValidator(StandardValidators.createRegexValidator(0, Integer.MAX_VALUE, true))
135132
.build();
136133

137134
public static final PropertyDescriptor FILE_EXCLUDE_FILTER = new PropertyDescriptor.Builder()
138-
.displayName("Exclude Files")
139-
.name("gethdfsfileinfo-file-exclude-filter")
135+
.name("Exclude Files")
140136
.description("Regex. Files whose names match the given regular expression will not be picked up. If not provided, any filter won't be apply (performance considerations).")
141137
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
142138
.required(false)
143139
.addValidator(StandardValidators.createRegexValidator(0, Integer.MAX_VALUE, true))
144140
.build();
145141

146142
public static final PropertyDescriptor IGNORE_DOTTED_DIRS = new PropertyDescriptor.Builder()
147-
.displayName("Ignore Dotted Directories")
148-
.name("gethdfsfileinfo-ignore-dotted-dirs")
143+
.name("Ignore Dotted Directories")
149144
.description("If true, directories whose names begin with a dot (\".\") will be ignored")
150145
.required(true)
151146
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
@@ -154,8 +149,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor {
154149
.build();
155150

156151
public static final PropertyDescriptor IGNORE_DOTTED_FILES = new PropertyDescriptor.Builder()
157-
.displayName("Ignore Dotted Files")
158-
.name("gethdfsfileinfo-ignore-dotted-files")
152+
.name("Ignore Dotted Files")
159153
.description("If true, files whose names begin with a dot (\".\") will be ignored")
160154
.required(true)
161155
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
@@ -174,8 +168,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor {
174168
"Don't group results. Generate flowfile per each HDFS object.");
175169

176170
public static final PropertyDescriptor GROUPING = new PropertyDescriptor.Builder()
177-
.displayName("Group Results")
178-
.name("gethdfsfileinfo-group")
171+
.name("Group Results")
179172
.description("Groups HDFS objects")
180173
.required(true)
181174
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
@@ -184,8 +177,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor {
184177
.build();
185178

186179
public static final PropertyDescriptor BATCH_SIZE = new PropertyDescriptor.Builder()
187-
.displayName("Batch Size")
188-
.name("gethdfsfileinfo-batch-size")
180+
.name("Batch Size")
189181
.description("Number of records to put into an output flowfile when 'Destination' is set to 'Content'"
190182
+ " and 'Group Results' is set to 'None'")
191183
.required(false)
@@ -201,8 +193,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor {
201193
"Details of given HDFS object will be stored in a content in JSON format");
202194

203195
public static final PropertyDescriptor DESTINATION = new PropertyDescriptor.Builder()
204-
.displayName("Destination")
205-
.name("gethdfsfileinfo-destination")
196+
.name("Destination")
206197
.description("Sets the destination for the resutls. When set to 'Content', attributes of flowfile won't be used for storing results. ")
207198
.required(true)
208199
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
@@ -344,6 +335,20 @@ public void onTrigger(final ProcessContext context, final ProcessSession session
344335
}
345336
}
346337

338+
@Override
339+
public void migrateProperties(PropertyConfiguration config) {
340+
super.migrateProperties(config);
341+
config.renameProperty("gethdfsfileinfo-full-path", FULL_PATH.getName());
342+
config.renameProperty("gethdfsfileinfo-recurse-subdirs", RECURSE_SUBDIRS.getName());
343+
config.renameProperty("gethdfsfileinfo-dir-filter", DIR_FILTER.getName());
344+
config.renameProperty("gethdfsfileinfo-file-filter", FILE_FILTER.getName());
345+
config.renameProperty("gethdfsfileinfo-file-exclude-filter", FILE_EXCLUDE_FILTER.getName());
346+
config.renameProperty("gethdfsfileinfo-ignore-dotted-dirs", IGNORE_DOTTED_DIRS.getName());
347+
config.renameProperty("gethdfsfileinfo-ignore-dotted-files", IGNORE_DOTTED_FILES.getName());
348+
config.renameProperty("gethdfsfileinfo-group", GROUPING.getName());
349+
config.renameProperty("gethdfsfileinfo-batch-size", BATCH_SIZE.getName());
350+
config.renameProperty("gethdfsfileinfo-destination", DESTINATION.getName());
351+
}
347352

348353
/*
349354
* Walks thru HDFS tree. This method will return null to the main if there is no provided path existing.

nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@
3838
import org.apache.nifi.components.ValidationResult;
3939
import org.apache.nifi.components.state.Scope;
4040
import org.apache.nifi.components.state.StateMap;
41+
import org.apache.nifi.migration.PropertyConfiguration;
4142
import org.apache.nifi.processor.ProcessContext;
4243
import org.apache.nifi.processor.ProcessSession;
4344
import org.apache.nifi.processor.Relationship;
@@ -107,8 +108,7 @@ public class ListHDFS extends AbstractHadoopProcessor {
107108
.build();
108109

109110
public static final PropertyDescriptor RECORD_WRITER = new PropertyDescriptor.Builder()
110-
.name("record-writer")
111-
.displayName("Record Writer")
111+
.name("Record Writer")
112112
.description("Specifies the Record Writer to use for creating the listing. If not specified, one FlowFile will be created for each "
113113
+ "entity that is listed. If the Record Writer is specified, all entities will be written to a single FlowFile.")
114114
.required(false)
@@ -124,8 +124,7 @@ public class ListHDFS extends AbstractHadoopProcessor {
124124
.build();
125125

126126
public static final PropertyDescriptor FILE_FILTER_MODE = new PropertyDescriptor.Builder()
127-
.name("file-filter-mode")
128-
.displayName("File Filter Mode")
127+
.name("File Filter Mode")
129128
.description("Determines how the regular expression in " + FILE_FILTER.getDisplayName() + " will be used when retrieving listings.")
130129
.required(true)
131130
.allowableValues(FilterMode.class)
@@ -134,17 +133,15 @@ public class ListHDFS extends AbstractHadoopProcessor {
134133
.build();
135134

136135
public static final PropertyDescriptor MINIMUM_FILE_AGE = new PropertyDescriptor.Builder()
137-
.name("minimum-file-age")
138-
.displayName("Minimum File Age")
136+
.name("Minimum File Age")
139137
.description("The minimum age that a file must be in order to be pulled; any file younger than this "
140138
+ "amount of time (based on last modification date) will be ignored")
141139
.required(false)
142140
.addValidator(StandardValidators.createTimePeriodValidator(0, TimeUnit.MILLISECONDS, Long.MAX_VALUE, TimeUnit.NANOSECONDS))
143141
.build();
144142

145143
public static final PropertyDescriptor MAXIMUM_FILE_AGE = new PropertyDescriptor.Builder()
146-
.name("maximum-file-age")
147-
.displayName("Maximum File Age")
144+
.name("Maximum File Age")
148145
.description("The maximum age that a file must be in order to be pulled; any file older than this "
149146
+ "amount of time (based on last modification date) will be ignored. Minimum value is 100ms.")
150147
.required(false)
@@ -319,6 +316,15 @@ public void onTrigger(final ProcessContext context, final ProcessSession session
319316

320317
}
321318

319+
@Override
320+
public void migrateProperties(PropertyConfiguration config) {
321+
super.migrateProperties(config);
322+
config.renameProperty("record-writer", RECORD_WRITER.getName());
323+
config.renameProperty("file-filter-mode", FILE_FILTER_MODE.getName());
324+
config.renameProperty("minimum-file-age", MINIMUM_FILE_AGE.getName());
325+
config.renameProperty("maximum-file-age", MAXIMUM_FILE_AGE.getName());
326+
}
327+
322328
private PathFilter createPathFilter(final ProcessContext context) {
323329
final FilterMode filterMode = FilterMode.forName(context.getProperty(FILE_FILTER_MODE).getValue());
324330
final boolean recursive = context.getProperty(RECURSE_SUBDIRS).asBoolean();

nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@
5656
import org.apache.nifi.fileresource.service.api.FileResource;
5757
import org.apache.nifi.flowfile.FlowFile;
5858
import org.apache.nifi.flowfile.attributes.CoreAttributes;
59+
import org.apache.nifi.migration.PropertyConfiguration;
5960
import org.apache.nifi.processor.DataUnit;
6061
import org.apache.nifi.processor.ProcessContext;
6162
import org.apache.nifi.processor.ProcessSession;
@@ -162,8 +163,7 @@ public class PutHDFS extends AbstractHadoopProcessor {
162163
.build();
163164

164165
protected static final PropertyDescriptor WRITING_STRATEGY = new PropertyDescriptor.Builder()
165-
.name("writing-strategy")
166-
.displayName("Writing Strategy")
166+
.name("Writing Strategy")
167167
.description("Defines the approach for writing the FlowFile data.")
168168
.required(true)
169169
.defaultValue(WRITE_AND_RENAME_AV)
@@ -554,6 +554,12 @@ private AclStatus getAclStatus(final Path dirPath) {
554554
});
555555
}
556556

557+
@Override
558+
public void migrateProperties(PropertyConfiguration config) {
559+
super.migrateProperties(config);
560+
config.renameProperty("writing-strategy", WRITING_STRATEGY.getName());
561+
}
562+
557563
protected Relationship getSuccessRelationship() {
558564
return REL_SUCCESS;
559565
}

0 commit comments

Comments
 (0)