Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.flowfile.attributes.StandardFlowFileMediaType;
import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
Expand Down Expand Up @@ -86,8 +87,7 @@ public class CreateHadoopSequenceFile extends AbstractHadoopProcessor {

// Optional Properties.
static final PropertyDescriptor COMPRESSION_TYPE = new PropertyDescriptor.Builder()
.displayName("Compression type")
.name("compression type")
.name("Compression Type")
.description("Type of compression to use when creating Sequence File")
.allowableValues(SequenceFile.CompressionType.values())
.build();
Expand Down Expand Up @@ -180,4 +180,10 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro
}

}

@Override
public void migrateProperties(PropertyConfiguration config) {
super.migrateProperties(config);
config.renameProperty("compression type", COMPRESSION_TYPE.getName());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import org.apache.nifi.components.RequiredPermission;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
Expand Down Expand Up @@ -86,17 +87,15 @@ public class DeleteHDFS extends AbstractHadoopProcessor {
.build();

public static final PropertyDescriptor FILE_OR_DIRECTORY = new PropertyDescriptor.Builder()
.name("file_or_directory")
.displayName("Path")
.name("Path")
.description("The HDFS file or directory to delete. A wildcard expression may be used to only delete certain files")
.required(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.build();

public static final PropertyDescriptor RECURSIVE = new PropertyDescriptor.Builder()
.name("recursive")
.displayName("Recursive")
.name("Recursive")
.description("Remove contents of a non-empty directory recursively")
.allowableValues("true", "false")
.required(true)
Expand Down Expand Up @@ -223,6 +222,13 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro

}

@Override
public void migrateProperties(PropertyConfiguration config) {
super.migrateProperties(config);
config.renameProperty("file_or_directory", FILE_OR_DIRECTORY.getName());
config.renameProperty("recursive", RECURSIVE.getName());
}

protected Relationship getSuccessRelationship() {
return REL_SUCCESS;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.ProcessorInitializationContext;
Expand Down Expand Up @@ -97,8 +98,7 @@
public class GetHDFSFileInfo extends AbstractHadoopProcessor {
public static final String APPLICATION_JSON = "application/json";
public static final PropertyDescriptor FULL_PATH = new PropertyDescriptor.Builder()
.displayName("Full path")
.name("gethdfsfileinfo-full-path")
.name("Full Path")
.description("A directory to start listing from, or a file's full path.")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
Expand All @@ -107,8 +107,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor {
.build();

public static final PropertyDescriptor RECURSE_SUBDIRS = new PropertyDescriptor.Builder()
.displayName("Recurse Subdirectories")
.name("gethdfsfileinfo-recurse-subdirs")
.name("Recurse Subdirectories")
.description("Indicates whether to list files from subdirectories of the HDFS directory")
.required(true)
.allowableValues("true", "false")
Expand All @@ -117,35 +116,31 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor {
.build();

public static final PropertyDescriptor DIR_FILTER = new PropertyDescriptor.Builder()
.displayName("Directory Filter")
.name("gethdfsfileinfo-dir-filter")
.name("Directory Filter")
.description("Regex. Only directories whose names match the given regular expression will be picked up. If not provided, any filter would be apply (performance considerations).")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.required(false)
.addValidator(StandardValidators.createRegexValidator(0, Integer.MAX_VALUE, true))
.build();

public static final PropertyDescriptor FILE_FILTER = new PropertyDescriptor.Builder()
.displayName("File Filter")
.name("gethdfsfileinfo-file-filter")
.name("File Filter")
.description("Regex. Only files whose names match the given regular expression will be picked up. If not provided, any filter would be apply (performance considerations).")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.required(false)
.addValidator(StandardValidators.createRegexValidator(0, Integer.MAX_VALUE, true))
.build();

public static final PropertyDescriptor FILE_EXCLUDE_FILTER = new PropertyDescriptor.Builder()
.displayName("Exclude Files")
.name("gethdfsfileinfo-file-exclude-filter")
.name("Exclude Files")
.description("Regex. Files whose names match the given regular expression will not be picked up. If not provided, any filter won't be apply (performance considerations).")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.required(false)
.addValidator(StandardValidators.createRegexValidator(0, Integer.MAX_VALUE, true))
.build();

public static final PropertyDescriptor IGNORE_DOTTED_DIRS = new PropertyDescriptor.Builder()
.displayName("Ignore Dotted Directories")
.name("gethdfsfileinfo-ignore-dotted-dirs")
.name("Ignore Dotted Directories")
.description("If true, directories whose names begin with a dot (\".\") will be ignored")
.required(true)
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
Expand All @@ -154,8 +149,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor {
.build();

public static final PropertyDescriptor IGNORE_DOTTED_FILES = new PropertyDescriptor.Builder()
.displayName("Ignore Dotted Files")
.name("gethdfsfileinfo-ignore-dotted-files")
.name("Ignore Dotted Files")
.description("If true, files whose names begin with a dot (\".\") will be ignored")
.required(true)
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
Expand All @@ -174,8 +168,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor {
"Don't group results. Generate flowfile per each HDFS object.");

public static final PropertyDescriptor GROUPING = new PropertyDescriptor.Builder()
.displayName("Group Results")
.name("gethdfsfileinfo-group")
.name("Group Results")
.description("Groups HDFS objects")
.required(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
Expand All @@ -184,8 +177,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor {
.build();

public static final PropertyDescriptor BATCH_SIZE = new PropertyDescriptor.Builder()
.displayName("Batch Size")
.name("gethdfsfileinfo-batch-size")
.name("Batch Size")
.description("Number of records to put into an output flowfile when 'Destination' is set to 'Content'"
+ " and 'Group Results' is set to 'None'")
.required(false)
Expand All @@ -201,8 +193,7 @@ public class GetHDFSFileInfo extends AbstractHadoopProcessor {
"Details of given HDFS object will be stored in a content in JSON format");

public static final PropertyDescriptor DESTINATION = new PropertyDescriptor.Builder()
.displayName("Destination")
.name("gethdfsfileinfo-destination")
.name("Destination")
.description("Sets the destination for the resutls. When set to 'Content', attributes of flowfile won't be used for storing results. ")
.required(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
Expand Down Expand Up @@ -344,6 +335,20 @@ public void onTrigger(final ProcessContext context, final ProcessSession session
}
}

@Override
public void migrateProperties(PropertyConfiguration config) {
super.migrateProperties(config);
config.renameProperty("gethdfsfileinfo-full-path", FULL_PATH.getName());
config.renameProperty("gethdfsfileinfo-recurse-subdirs", RECURSE_SUBDIRS.getName());
config.renameProperty("gethdfsfileinfo-dir-filter", DIR_FILTER.getName());
config.renameProperty("gethdfsfileinfo-file-filter", FILE_FILTER.getName());
config.renameProperty("gethdfsfileinfo-file-exclude-filter", FILE_EXCLUDE_FILTER.getName());
config.renameProperty("gethdfsfileinfo-ignore-dotted-dirs", IGNORE_DOTTED_DIRS.getName());
config.renameProperty("gethdfsfileinfo-ignore-dotted-files", IGNORE_DOTTED_FILES.getName());
config.renameProperty("gethdfsfileinfo-group", GROUPING.getName());
config.renameProperty("gethdfsfileinfo-batch-size", BATCH_SIZE.getName());
config.renameProperty("gethdfsfileinfo-destination", DESTINATION.getName());
}

/*
* Walks thru HDFS tree. This method will return null to the main if there is no provided path existing.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.components.state.StateMap;
import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
Expand Down Expand Up @@ -107,8 +108,7 @@ public class ListHDFS extends AbstractHadoopProcessor {
.build();

public static final PropertyDescriptor RECORD_WRITER = new PropertyDescriptor.Builder()
.name("record-writer")
.displayName("Record Writer")
.name("Record Writer")
.description("Specifies the Record Writer to use for creating the listing. If not specified, one FlowFile will be created for each "
+ "entity that is listed. If the Record Writer is specified, all entities will be written to a single FlowFile.")
.required(false)
Expand All @@ -124,8 +124,7 @@ public class ListHDFS extends AbstractHadoopProcessor {
.build();

public static final PropertyDescriptor FILE_FILTER_MODE = new PropertyDescriptor.Builder()
.name("file-filter-mode")
.displayName("File Filter Mode")
.name("File Filter Mode")
.description("Determines how the regular expression in " + FILE_FILTER.getDisplayName() + " will be used when retrieving listings.")
.required(true)
.allowableValues(FilterMode.class)
Expand All @@ -134,17 +133,15 @@ public class ListHDFS extends AbstractHadoopProcessor {
.build();

public static final PropertyDescriptor MINIMUM_FILE_AGE = new PropertyDescriptor.Builder()
.name("minimum-file-age")
.displayName("Minimum File Age")
.name("Minimum File Age")
.description("The minimum age that a file must be in order to be pulled; any file younger than this "
+ "amount of time (based on last modification date) will be ignored")
.required(false)
.addValidator(StandardValidators.createTimePeriodValidator(0, TimeUnit.MILLISECONDS, Long.MAX_VALUE, TimeUnit.NANOSECONDS))
.build();

public static final PropertyDescriptor MAXIMUM_FILE_AGE = new PropertyDescriptor.Builder()
.name("maximum-file-age")
.displayName("Maximum File Age")
.name("Maximum File Age")
.description("The maximum age that a file must be in order to be pulled; any file older than this "
+ "amount of time (based on last modification date) will be ignored. Minimum value is 100ms.")
.required(false)
Expand Down Expand Up @@ -319,6 +316,15 @@ public void onTrigger(final ProcessContext context, final ProcessSession session

}

@Override
public void migrateProperties(PropertyConfiguration config) {
super.migrateProperties(config);
config.renameProperty("record-writer", RECORD_WRITER.getName());
config.renameProperty("file-filter-mode", FILE_FILTER_MODE.getName());
config.renameProperty("minimum-file-age", MINIMUM_FILE_AGE.getName());
config.renameProperty("maximum-file-age", MAXIMUM_FILE_AGE.getName());
}

private PathFilter createPathFilter(final ProcessContext context) {
final FilterMode filterMode = FilterMode.forName(context.getProperty(FILE_FILTER_MODE).getValue());
final boolean recursive = context.getProperty(RECURSE_SUBDIRS).asBoolean();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
import org.apache.nifi.fileresource.service.api.FileResource;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
Expand Down Expand Up @@ -162,8 +163,7 @@ public class PutHDFS extends AbstractHadoopProcessor {
.build();

protected static final PropertyDescriptor WRITING_STRATEGY = new PropertyDescriptor.Builder()
.name("writing-strategy")
.displayName("Writing Strategy")
.name("Writing Strategy")
.description("Defines the approach for writing the FlowFile data.")
.required(true)
.defaultValue(WRITE_AND_RENAME_AV)
Expand Down Expand Up @@ -554,6 +554,12 @@ private AclStatus getAclStatus(final Path dirPath) {
});
}

@Override
public void migrateProperties(PropertyConfiguration config) {
super.migrateProperties(config);
config.renameProperty("writing-strategy", WRITING_STRATEGY.getName());
}

protected Relationship getSuccessRelationship() {
return REL_SUCCESS;
}
Expand Down
Loading