diff --git a/DataFileUtil.html b/DataFileUtil.html index a025f6c..3c46184 100644 --- a/DataFileUtil.html +++ b/DataFileUtil.html @@ -1 +1 @@ -DataFileUtil
/*
*Contains utilities for saving and retrieving data to and from KBase data
*services. Requires Shock 0.9.6+ and Workspace Service 0.4.1+.
*
*Note that some calls may create files or directories in the root of the scratch space (typically
*/kb/module/work/tmp). For this reason client programmers should not request that DFU archive from
*the root of the scratch space - always create a new directory (e.g. using a UUID for a name or a
*standard library temporary directory utility) and add the target files to that directory when
*archiving.
*/
moduleDataFileUtil{

/*
*A boolean - 0 for false, 1 for true.
*@range(0, 1)
*/
typedefintboolean;

/*
*A handle for a file stored in Shock.
*hid - the id of the handle in the Handle Service that references this
*shock node
*id - the id for the shock node
*url - the url of the shock server
*type - the type of the handle. This should always be shock.
*file_name - the name of the file
*remote_md5 - the md5 digest of the file.
*/
typedefstructure{
stringhid;
stringfile_name;
stringid;
stringurl;
stringtype;
stringremote_md5;
}
Handle;

/*
*Input for the shock_to_file function.
*
*Required parameters:
*shock_id | handle_id - the ID of the Shock node, or the Handle to a shock node.
*file_path - the location to save the file output. If this is a
*directory, the file will be named as per the filename in Shock.
*
*Optional parameters:
*unpack - either null, 'uncompress', or 'unpack'. 'uncompress' will cause
*any bzip or gzip files to be uncompressed. 'unpack' will behave the
*same way, but it will also unpack tar and zip archive files
*(uncompressing gzipped or bzipped archive files if necessary). If
*'uncompress' is specified and an archive file is encountered, an
*error will be thrown. If the file is an archive, it will be
*unbundled into the directory containing the original output file.
*
*Note that if the file name (either as provided by the user or by
*Shock) without the a decompression extension (e.g. .gz, .zip or
*.tgz -> .tar) points to an existing file and unpack is specified,
*that file will be overwritten by the decompressed Shock file.
*/
typedefstructure{
stringshock_id;
stringhandle_id;
stringfile_path;
stringunpack;
}
ShockToFileParams;

/*
*Output from the shock_to_file function.
*
*node_file_name - the filename of the file as stored in Shock.
*file_path - the path to the downloaded file. If a directory was
*specified in the input, this will be the directory appended with the
*shock file name. If a file was specified, it will be that file path.
*In either case, if the file is uncompressed any compression file
*extensions will be removed (e.g. .gz) and or altered (e.g. .tgz ->
*.tar) as appropriate.
*size - the size of the file in bytes as stored in Shock, prior to
*unpacking.
*attributes - the file attributes, if any, stored in Shock.
*/
typedefstructure{
stringnode_file_name;
stringfile_path;
intsize;
mapping<string,UnspecifiedObject>attributes;
}
ShockToFileOutput;

/*
*Download a file from Shock.
*/
funcdefshock_to_file(ShockToFileParamsparams)returns(ShockToFileOutputout)authenticationrequired;

/*
*Download multiple files from Shock.
*/
funcdefshock_to_file_mass(list<ShockToFileParams>params)returns(list<ShockToFileOutput>out)authenticationrequired;

/*
*Input for the file_to_shock function.
*
*Required parameters:
*file_path - the location of the file (or directory if using the
*pack parameter) to load to Shock.
*
*Optional parameters:
*attributes - DEPRECATED: attributes are currently ignored by the upload function and
*will be removed entirely in a future version. User-specified attributes to save to the
*Shock node along with the file.
*make_handle - make a Handle Service handle for the shock node. Default
*false.
*pack - compress a file or archive a directory before loading to Shock.
*The file_path argument will be appended with the appropriate file
*extension prior to writing. For gzips only, if the file extension
*denotes that the file is already compressed, it will be skipped. If
*file_path is a directory and tarring or zipping is specified, the
*created file name will be set to the directory name, possibly
*overwriting an existing file. Attempting to pack the root directory
*is an error. Do not attempt to pack the scratch space root as noted
*in the module description.
*
*The allowed values are:
*gzip - gzip the file given by file_path.
*targz - tar and gzip the directory specified by the directory
*portion of the file_path into the file specified by the
*file_path.
*zip - as targz but zip the directory.
*/
typedefstructure{
stringfile_path;
mapping<string,UnspecifiedObject>attributes;
booleanmake_handle;
stringpack;
}
FileToShockParams;

/*
*Output of the file_to_shock function.
*
*shock_id - the ID of the new Shock node.
*handle - the new handle, if created. Null otherwise.
*node_file_name - the name of the file stored in Shock.
*size - the size of the file stored in shock.
*/
typedefstructure{
stringshock_id;
Handlehandle;
stringnode_file_name;
stringsize;
}
FileToShockOutput;

/*
*Load a file to Shock.
*/
funcdeffile_to_shock(FileToShockParamsparams)returns(FileToShockOutputout)authenticationrequired;

typedefstructure{
stringfile_path;
}
UnpackFileParams;

typedefstructure{
stringfile_path;
}
UnpackFileResult;

/*
*Using the same logic as unpacking a Shock file, this method will cause
*any bzip or gzip files to be uncompressed, and then unpack tar and zip
*archive files (uncompressing gzipped or bzipped archive files if
*necessary). If the file is an archive, it will be unbundled into the
*directory containing the original output file.
*/
funcdefunpack_file(UnpackFileParamsparams)returns(UnpackFileResultout)authenticationrequired;

/*
*Input for the pack_file function.
*
*Required parameters:
*file_path - the location of the file (or directory if using the
*pack parameter) to load to Shock.
*pack - The format into which the file or files will be packed.
*The file_path argument will be appended with the appropriate file
*extension prior to writing. For gzips only, if the file extension
*denotes that the file is already compressed, it will be skipped. If
*file_path is a directory and tarring or zipping is specified, the
*created file name will be set to the directory name, possibly
*overwriting an existing file. Attempting to pack the root directory
*is an error. Do not attempt to pack the scratch space root as noted
*in the module description.
*
*The allowed values are:
*gzip - gzip the file given by file_path.
*targz - tar and gzip the directory specified by the directory
*portion of the file_path into the file specified by the
*file_path.
*zip - as targz but zip the directory.
*/
typedefstructure{
stringfile_path;
stringpack;
}
PackFileParams;

/*
*Output from the pack_file function.
*
*file_path - the path to the packed file.
*/
typedefstructure{
stringfile_path;
}
PackFileResult;

/*
*Pack a file or directory into gzip, targz, or zip archives.
*/
funcdefpack_file(PackFileParamsparams)returns(PackFileResultout)authenticationrequired;

/*
*Input for the package_for_download function.
*
*Required parameters:
*file_path - the location of the directory to compress as zip archive
*before loading to Shock. This argument will be appended with the
*'.zip' file extension prior to writing. If it is a directory, file
*name of the created archive will be set to the directory name
*followed by '.zip', possibly overwriting an existing file.
*Attempting to pack the root directory is an error. Do not attempt
*to pack the scratch space root as noted in the module description.
*ws_ref - list of references to workspace objects which will be used to
*produce info-files in JSON format containing workspace metadata and
*provenance structures. It produces new files in folder pointed
*by file_path (or folder containing file pointed by file_path if
*it's not folder).
*Optional parameters:
*attributes - DEPRECATED: attributes are currently ignored by the upload function and
*will be removed entirely in a future version. User-specified attributes to save to the
*Shock node along with the file.
*/
typedefstructure{
stringfile_path;
mapping<string,UnspecifiedObject>attributes;
list<string>ws_refs;
}
PackageForDownloadParams;

/*
*Output of the package_for_download function.
*
*shock_id - the ID of the new Shock node.
*node_file_name - the name of the file stored in Shock.
*size - the size of the file stored in shock.
*/
typedefstructure{
stringshock_id;
stringnode_file_name;
stringsize;
}
PackageForDownloadOutput;

funcdefpackage_for_download(PackageForDownloadParamsparams)returns(PackageForDownloadOutput)authenticationrequired;

/*
*Load multiple files to Shock.
*/
funcdeffile_to_shock_mass(list<FileToShockParams>params)returns(list<FileToShockOutput>out)authenticationrequired;

/*
*Input for the copy_shock_node function.
*
*Required parameters:
*shock_id - the id of the node to copy.
*
*Optional parameters:
*make_handle - make a Handle Service handle for the shock node. Default
*false.
*/
typedefstructure{
stringshock_id;
booleanmake_handle;
}
CopyShockNodeParams;

/*
*Output of the copy_shock_node function.
*
*shock_id - the id of the new Shock node.
*handle - the new handle, if created. Null otherwise.
*/
typedefstructure{
stringshock_id;
Handlehandle;
}
CopyShockNodeOutput;

/*
*Copy a Shock node.
*/
funcdefcopy_shock_node(CopyShockNodeParamsparams)returns(CopyShockNodeOutputout)authenticationrequired;

/*
*Input for the own_shock_node function.
*
*Required parameters:
*shock_id - the id of the node for which the user needs ownership.
*
*Optional parameters:
*make_handle - make or find a Handle Service handle for the shock node.
*Default false.
*/
typedefstructure{
stringshock_id;
booleanmake_handle;
}
OwnShockNodeParams;

/*
*Output of the own_shock_node function.
*
*shock_id - the id of the (possibly new) Shock node.
*handle - the handle, if requested. Null otherwise.
*/
typedefstructure{
stringshock_id;
Handlehandle;
}
OwnShockNodeOutput;

/*
*Gain ownership of a Shock node.
*
*Returns a shock node id which is owned by the caller, given a shock
*node id.
*
*If the shock node is already owned by the caller, returns the same
*shock node ID. If not, the ID of a copy of the original node will be
*returned.
*
*If a handle is requested, the node is already owned by the caller, and
*a handle already exists, that handle will be returned. Otherwise a new
*handle will be created and returned.
*/
funcdefown_shock_node(OwnShockNodeParamsparams)returns(OwnShockNodeOutputout)authenticationrequired;

/*
*Translate a workspace name to a workspace ID.
*/
funcdefws_name_to_id(stringname)returns(intid)authenticationrequired;

/*
*Information about an object, including user provided metadata.
*
*objid - the numerical id of the object.
*name - the name of the object.
*type - the type of the object.
*save_date - the save date of the object.
*ver - the version of the object.
*saved_by - the user that saved or copied the object.
*wsid - the id of the workspace containing the object.
*workspace - the name of the workspace containing the object.
*chsum - the md5 checksum of the object.
*size - the size of the object in bytes.
*meta - arbitrary user-supplied metadata about
*the object.
*/
typedeftuple<intobjid,stringname,stringtype,stringsave_date,intversion,stringsaved_by,intwsid,stringworkspace,stringchsum,intsize,mapping<string,string>meta>object_info;

/*
*An object and associated data required for saving.
*
*Required parameters:
*type - the workspace type string for the object. Omit the version
*information to use the latest version.
*data - the object data.
*
*One of an object name or id:
*name - the name of the object.
*objid - the id of the object to save over.
*
*Optional parameters:
*meta - arbitrary user-supplied metadata for the object,
*not to exceed 16kb; if the object type specifies automatic
*metadata extraction with the 'meta ws' annotation, and your
*metadata name conflicts, then your metadata will be silently
*overwritten.
*hidden - true if this object should not be listed when listing
*workspace objects.
*extra_provenance_input_refs - (optional) if set, these refs will
*be appended to the primary ProveanceAction input_ws_objects
*reference list. In general, if the input WS object ref was
*passed in from a narrative App, this will be set for you.
*However, there are cases where the object ref passed to
*the App is a container, and you are operating on a member
*or subobject of the container, in which case to maintain
*that direct mapping to those subobjects in the provenance
*of new objects, you can provide additional object refs
*here. For example, if the input is a ReadsSet, and your
*App creates a new WS object for each read library in the
*set, you may want a direct reference from each new WS
*object not only to the set, but also to the individual
*read library.
*/
typedefstructure{
stringtype;
UnspecifiedObjectdata;
stringname;
intobjid;
mapping<string,string>meta;
booleanhidden;
list<string>extra_provenance_input_refs;
}
ObjectSaveData;

/*
*Input parameters for the "save_objects" function.
*
*Required parameters:
*id - the numerical ID of the workspace.
*objects - the objects to save.
*
*The object provenance is automatically pulled from the SDK runner.
*/
typedefstructure{
intid;
list<ObjectSaveData>objects;
}
SaveObjectsParams;

/*
*Save objects to the workspace.
*
*The objects will be sorted prior to saving to avoid the Workspace sort memory limit.
*Note that if the object contains workspace object refs in mapping keys that may cause
*the Workspace to resort the data. To avoid this, convert any refs in mapping keys to UPA
*format (e.g. #/#/#, where # is a positive integer).
*
*If the data is very large, using the WSLargeDataIO SDK module is advised.
*
*Saving over a deleted object undeletes it.
*/
funcdefsave_objects(SaveObjectsParamsparams)returns(list<object_info>info)authenticationrequired;

/*
*Input parameters for the "get_objects" function.
*
*Required parameters:
*object_refs - a list of object references in the form X/Y/Z, where X is
*the workspace name or id, Y is the object name or id, and Z is the
*(optional) object version. In general, always use ids rather than
*names if possible to avoid race conditions.
*A reference path may be specified by separating references by a semicolon, e.g.
*4/5/6;5/7/2;8/9/4 specifies that the user wishes to retrieve the fourth version of
*the object with id 9 in workspace 8, and that there exists a reference path from
*the sixth version of the object with id 5 in workspace 4, to which the user has access.
*The user may or may not have access to workspaces 5 and 8.
*
*
*Optional parameters:
*ignore_errors - ignore any errors that occur when fetching an object
*and instead insert a null into the returned list.
*/
typedefstructure{
list<string>object_refs;
booleanignore_errors;
}
GetObjectsParams;

/*
*The data and supplemental info for an object.
*
*UnspecifiedObject data - the object's data or subset data.
*object_info info - information about the object.
*/
typedefstructure{
UnspecifiedObjectdata;
object_infoinfo;
}
ObjectData;

/*
*Results from the get_objects function.
*
*list<ObjectData> data - the returned objects.
*/
typedefstructure{
list<ObjectData>data;
}
GetObjectsResults;

/*
*Get objects from the workspace.
*/
funcdefget_objects(GetObjectsParamsparams)returns(GetObjectsResultsresults)authenticationrequired;

/*
*Get the versions of the Workspace service and Shock service.
*/
funcdefversions()returns(stringwsver,stringshockver)authenticationrequired;

/*
*Input parameters for the "download_staging_file" function.
*
*Required parameters:
*staging_file_subdir_path: subdirectory file path
*e.g.
*for file: /data/bulk/user_name/file_name
*staging_file_subdir_path is file_name
*for file: /data/bulk/user_name/subdir_1/subdir_2/file_name
*staging_file_subdir_path is subdir_1/subdir_2/file_name
*/
typedefstructure{
stringstaging_file_subdir_path;
}
DownloadStagingFileParams;

/*
*Results from the download_staging_file function.
*
*copy_file_path: copied file scratch area path
*/
typedefstructure{
stringcopy_file_path;
}
DownloadStagingFileOutput;

/*
*Download a staging area file to scratch area
*/
funcdefdownload_staging_file(DownloadStagingFileParamsparams)returns(DownloadStagingFileOutputresults)authenticationrequired;

/*
*Input parameters for the "download_web_file" function.
*
*Required parameters:
*file_url: file URL
*download_type: one of ['Direct Download', 'FTP', 'DropBox', 'Google Drive']
*/
typedefstructure{
stringfile_url;
stringdownload_type;
}
DownloadWebFileParams;

/*
*Results from the download_web_file function.
*
*copy_file_path: copied file scratch area path
*/
typedefstructure{
stringcopy_file_path;
}
DownloadWebFileOutput;

/*
*Download a web file to scratch area
*/
funcdefdownload_web_file(DownloadWebFileParamsparams)returns(DownloadWebFileOutputresults)authenticationrequired;
};

Function Index

copy_shock_node
download_staging_file
download_web_file
file_to_shock
file_to_shock_mass
get_objects
own_shock_node
pack_file
package_for_download
save_objects
shock_to_file
shock_to_file_mass
unpack_file
versions
ws_name_to_id

Type Index

boolean
CopyShockNodeOutput
CopyShockNodeParams
DownloadStagingFileOutput
DownloadStagingFileParams
DownloadWebFileOutput
DownloadWebFileParams
FileToShockOutput
FileToShockParams
GetObjectsParams
GetObjectsResults
Handle
object_info
ObjectData
ObjectSaveData
OwnShockNodeOutput
OwnShockNodeParams
PackageForDownloadOutput
PackageForDownloadParams
PackFileParams
PackFileResult
SaveObjectsParams
ShockToFileOutput
ShockToFileParams
UnpackFileParams
UnpackFileResult
\ No newline at end of file +DataFileUtil
/*
*Contains utilities for saving and retrieving data to and from KBase data
*services. Requires Shock 0.9.6+ and Workspace Service 0.4.1+.
*
*Note that some calls may create files or directories in the root of the scratch space (typically
*/kb/module/work/tmp). For this reason client programmers should not request that DFU archive from
*the root of the scratch space - always create a new directory (e.g. using a UUID for a name or a
*standard library temporary directory utility) and add the target files to that directory when
*archiving.
*/
moduleDataFileUtil{

/*
*A boolean - 0 for false, 1 for true.
*@range(0, 1)
*/
typedefintboolean;

/*
*A handle for a file stored in Shock.
*hid - the id of the handle in the Handle Service that references this
*shock node
*id - the id for the shock node
*url - the url of the shock server
*type - the type of the handle. This should always be shock.
*file_name - the name of the file
*remote_md5 - the md5 digest of the file.
*/
typedefstructure{
stringhid;
stringfile_name;
stringid;
stringurl;
stringtype;
stringremote_md5;
}
Handle;

/*
*Input for the shock_to_file function.
*
*Required parameters:
*shock_id | handle_id - the ID of the Shock node, or the Handle to a shock node.
*file_path - the location to save the file output. If this is a
*directory, the file will be named as per the filename in Shock.
*
*Optional parameters:
*unpack - either null, 'uncompress', or 'unpack'. 'uncompress' will cause
*any bzip or gzip files to be uncompressed. 'unpack' will behave the
*same way, but it will also unpack tar and zip archive files
*(uncompressing gzipped or bzipped archive files if necessary). If
*'uncompress' is specified and an archive file is encountered, an
*error will be thrown. If the file is an archive, it will be
*unbundled into the directory containing the original output file.
*
*Note that if the file name (either as provided by the user or by
*Shock) without the a decompression extension (e.g. .gz, .zip or
*.tgz -> .tar) points to an existing file and unpack is specified,
*that file will be overwritten by the decompressed Shock file.
*/
typedefstructure{
stringshock_id;
stringhandle_id;
stringfile_path;
stringunpack;
}
ShockToFileParams;

/*
*Output from the shock_to_file function.
*
*node_file_name - the filename of the file as stored in Shock.
*file_path - the path to the downloaded file. If a directory was
*specified in the input, this will be the directory appended with the
*shock file name. If a file was specified, it will be that file path.
*In either case, if the file is uncompressed any compression file
*extensions will be removed (e.g. .gz) and or altered (e.g. .tgz ->
*.tar) as appropriate.
*size - the size of the file in bytes as stored in Shock, prior to
*unpacking.
*attributes - the file attributes, if any, stored in Shock.
*/
typedefstructure{
stringnode_file_name;
stringfile_path;
intsize;
mapping<string,UnspecifiedObject>attributes;
}
ShockToFileOutput;

/*
*Download a file from Shock.
*/
funcdefshock_to_file(ShockToFileParamsparams)returns(ShockToFileOutputout)authenticationrequired;

/*
*Download multiple files from Shock.
*/
funcdefshock_to_file_mass(list<ShockToFileParams>params)returns(list<ShockToFileOutput>out)authenticationrequired;

/*
*Input for the file_to_shock function.
*
*Required parameters:
*file_path - the location of the file (or directory if using the
*pack parameter) to load to Shock.
*
*Optional parameters:
*attributes - DEPRECATED: attributes are currently ignored by the upload function and
*will be removed entirely in a future version. User-specified attributes to save to the
*Shock node along with the file.
*make_handle - make a Handle Service handle for the shock node. Default
*false.
*pack - compress a file or archive a directory before loading to Shock.
*The file_path argument will be appended with the appropriate file
*extension prior to writing. For gzips only, if the file extension
*denotes that the file is already compressed, it will be skipped. If
*file_path is a directory and tarring or zipping is specified, the
*created file name will be set to the directory name, possibly
*overwriting an existing file. Attempting to pack the root directory
*is an error. Do not attempt to pack the scratch space root as noted
*in the module description.
*
*The allowed values are:
*gzip - gzip the file given by file_path.
*targz - tar and gzip the directory specified by the directory
*portion of the file_path into the file specified by the
*file_path.
*zip - as targz but zip the directory.
*/
typedefstructure{
stringfile_path;
mapping<string,UnspecifiedObject>attributes;
booleanmake_handle;
stringpack;
}
FileToShockParams;

/*
*Output of the file_to_shock function.
*
*shock_id - the ID of the new Shock node.
*handle - the new handle, if created. Null otherwise.
*node_file_name - the name of the file stored in Shock.
*size - the size of the file stored in shock.
*/
typedefstructure{
stringshock_id;
Handlehandle;
stringnode_file_name;
stringsize;
}
FileToShockOutput;

/*
*Load a file to Shock.
*/
funcdeffile_to_shock(FileToShockParamsparams)returns(FileToShockOutputout)authenticationrequired;

typedefstructure{
stringfile_path;
}
UnpackFileParams;

typedefstructure{
stringfile_path;
}
UnpackFileResult;

/*
*Using the same logic as unpacking a Shock file, this method will cause
*any bzip or gzip files to be uncompressed, and then unpack tar and zip
*archive files (uncompressing gzipped or bzipped archive files if
*necessary). If the file is an archive, it will be unbundled into the
*directory containing the original output file.
*/
funcdefunpack_file(UnpackFileParamsparams)returns(UnpackFileResultout)authenticationrequired;

/*
*Input parameters for the unpack_files function.
*
*Required parameter:
*file_path - the path to the file to unpack. The file will be unpacked into the file's
*parent directory.
*Optional parameter:
*unpack - either 'uncompress' or 'unpack'. 'uncompress' will cause
*any bzip or gzip files to be uncompressed. 'unpack' will behave the
*same way, but it will also unpack tar and zip archive files
*(uncompressing gzipped or bzipped archive files if necessary). If
*'uncompress' is specified and an archive file is encountered, an
*error will be thrown. If the file is an archive, it will be
*unbundled into the directory containing the original output file.
*
*Defaults to 'unpack'.
*
*Note that if the file name (either as provided by the user or by
*Shock) without the a decompression extension (e.g. .gz, .zip or
*.tgz -> .tar) points to an existing file and unpack is specified,
*that file will be overwritten by the decompressed Shock file.
*/
typedefstructure{
stringfile_path;
stringunpack;
}
UnpackFilesParams;

/*
*Output parameters for the unpack_files function.
*
*file_path - the path to either
*a) the unpacked file or
*b) in the case of archive files, the path to the original archive file, possibly
*uncompressed, or
*c) in the case of regular files that don't need processing, the path to the input
*file.
*/
typedefstructure{
stringfile_path;
}
UnpackFilesResult;

/*
*Using the same logic as unpacking a Shock file, this method will cause
*any bzip or gzip files to be uncompressed, and then unpack tar and zip
*archive files (uncompressing gzipped or bzipped archive files if
*necessary). If the file is an archive, it will be unbundled into the
*directory containing the original output file.
*
*The ordering of the input and output files is preserved in the input and output lists.
*/
funcdefunpack_files(list<UnpackFilesParams>params)returns(list<UnpackFilesResult>out)authenticationrequired;

/*
*Input for the pack_file function.
*
*Required parameters:
*file_path - the location of the file (or directory if using the
*pack parameter) to load to Shock.
*pack - The format into which the file or files will be packed.
*The file_path argument will be appended with the appropriate file
*extension prior to writing. For gzips only, if the file extension
*denotes that the file is already compressed, it will be skipped. If
*file_path is a directory and tarring or zipping is specified, the
*created file name will be set to the directory name, possibly
*overwriting an existing file. Attempting to pack the root directory
*is an error. Do not attempt to pack the scratch space root as noted
*in the module description.
*
*The allowed values are:
*gzip - gzip the file given by file_path.
*targz - tar and gzip the directory specified by the directory
*portion of the file_path into the file specified by the
*file_path.
*zip - as targz but zip the directory.
*/
typedefstructure{
stringfile_path;
stringpack;
}
PackFileParams;

/*
*Output from the pack_file function.
*
*file_path - the path to the packed file.
*/
typedefstructure{
stringfile_path;
}
PackFileResult;

/*
*Pack a file or directory into gzip, targz, or zip archives.
*/
funcdefpack_file(PackFileParamsparams)returns(PackFileResultout)authenticationrequired;

/*
*Input for the package_for_download function.
*
*Required parameters:
*file_path - the location of the directory to compress as zip archive
*before loading to Shock. This argument will be appended with the
*'.zip' file extension prior to writing. If it is a directory, file
*name of the created archive will be set to the directory name
*followed by '.zip', possibly overwriting an existing file.
*Attempting to pack the root directory is an error. Do not attempt
*to pack the scratch space root as noted in the module description.
*ws_ref - list of references to workspace objects which will be used to
*produce info-files in JSON format containing workspace metadata and
*provenance structures. It produces new files in folder pointed
*by file_path (or folder containing file pointed by file_path if
*it's not folder).
*Optional parameters:
*attributes - DEPRECATED: attributes are currently ignored by the upload function and
*will be removed entirely in a future version. User-specified attributes to save to the
*Shock node along with the file.
*/
typedefstructure{
stringfile_path;
mapping<string,UnspecifiedObject>attributes;
list<string>ws_refs;
}
PackageForDownloadParams;

/*
*Output of the package_for_download function.
*
*shock_id - the ID of the new Shock node.
*node_file_name - the name of the file stored in Shock.
*size - the size of the file stored in shock.
*/
typedefstructure{
stringshock_id;
stringnode_file_name;
stringsize;
}
PackageForDownloadOutput;

funcdefpackage_for_download(PackageForDownloadParamsparams)returns(PackageForDownloadOutput)authenticationrequired;

/*
*Load multiple files to Shock.
*/
funcdeffile_to_shock_mass(list<FileToShockParams>params)returns(list<FileToShockOutput>out)authenticationrequired;

/*
*Input for the copy_shock_node function.
*
*Required parameters:
*shock_id - the id of the node to copy.
*
*Optional parameters:
*make_handle - make a Handle Service handle for the shock node. Default
*false.
*/
typedefstructure{
stringshock_id;
booleanmake_handle;
}
CopyShockNodeParams;

/*
*Output of the copy_shock_node function.
*
*shock_id - the id of the new Shock node.
*handle - the new handle, if created. Null otherwise.
*/
typedefstructure{
stringshock_id;
Handlehandle;
}
CopyShockNodeOutput;

/*
*Copy a Shock node.
*/
funcdefcopy_shock_node(CopyShockNodeParamsparams)returns(CopyShockNodeOutputout)authenticationrequired;

/*
*Input for the own_shock_node function.
*
*Required parameters:
*shock_id - the id of the node for which the user needs ownership.
*
*Optional parameters:
*make_handle - make or find a Handle Service handle for the shock node.
*Default false.
*/
typedefstructure{
stringshock_id;
booleanmake_handle;
}
OwnShockNodeParams;

/*
*Output of the own_shock_node function.
*
*shock_id - the id of the (possibly new) Shock node.
*handle - the handle, if requested. Null otherwise.
*/
typedefstructure{
stringshock_id;
Handlehandle;
}
OwnShockNodeOutput;

/*
*Gain ownership of a Shock node.
*
*Returns a shock node id which is owned by the caller, given a shock
*node id.
*
*If the shock node is already owned by the caller, returns the same
*shock node ID. If not, the ID of a copy of the original node will be
*returned.
*
*If a handle is requested, the node is already owned by the caller, and
*a handle already exists, that handle will be returned. Otherwise a new
*handle will be created and returned.
*/
funcdefown_shock_node(OwnShockNodeParamsparams)returns(OwnShockNodeOutputout)authenticationrequired;

/*
*Translate a workspace name to a workspace ID.
*/
funcdefws_name_to_id(stringname)returns(intid)authenticationrequired;

/*
*Information about an object, including user provided metadata.
*
*objid - the numerical id of the object.
*name - the name of the object.
*type - the type of the object.
*save_date - the save date of the object.
*ver - the version of the object.
*saved_by - the user that saved or copied the object.
*wsid - the id of the workspace containing the object.
*workspace - the name of the workspace containing the object.
*chsum - the md5 checksum of the object.
*size - the size of the object in bytes.
*meta - arbitrary user-supplied metadata about
*the object.
*/
typedeftuple<intobjid,stringname,stringtype,stringsave_date,intversion,stringsaved_by,intwsid,stringworkspace,stringchsum,intsize,mapping<string,string>meta>object_info;

/*
*An object and associated data required for saving.
*
*Required parameters:
*type - the workspace type string for the object. Omit the version
*information to use the latest version.
*data - the object data.
*
*One of an object name or id:
*name - the name of the object.
*objid - the id of the object to save over.
*
*Optional parameters:
*meta - arbitrary user-supplied metadata for the object,
*not to exceed 16kb; if the object type specifies automatic
*metadata extraction with the 'meta ws' annotation, and your
*metadata name conflicts, then your metadata will be silently
*overwritten.
*hidden - true if this object should not be listed when listing
*workspace objects.
*extra_provenance_input_refs - (optional) if set, these refs will
*be appended to the primary ProveanceAction input_ws_objects
*reference list. In general, if the input WS object ref was
*passed in from a narrative App, this will be set for you.
*However, there are cases where the object ref passed to
*the App is a container, and you are operating on a member
*or subobject of the container, in which case to maintain
*that direct mapping to those subobjects in the provenance
*of new objects, you can provide additional object refs
*here. For example, if the input is a ReadsSet, and your
*App creates a new WS object for each read library in the
*set, you may want a direct reference from each new WS
*object not only to the set, but also to the individual
*read library.
*/
typedefstructure{
stringtype;
UnspecifiedObjectdata;
stringname;
intobjid;
mapping<string,string>meta;
booleanhidden;
list<string>extra_provenance_input_refs;
}
ObjectSaveData;

/*
*Input parameters for the "save_objects" function.
*
*Required parameters:
*id - the numerical ID of the workspace.
*objects - the objects to save.
*
*The object provenance is automatically pulled from the SDK runner.
*/
typedefstructure{
intid;
list<ObjectSaveData>objects;
}
SaveObjectsParams;

/*
*Save objects to the workspace.
*
*The objects will be sorted prior to saving to avoid the Workspace sort memory limit.
*Note that if the object contains workspace object refs in mapping keys that may cause
*the Workspace to resort the data. To avoid this, convert any refs in mapping keys to UPA
*format (e.g. #/#/#, where # is a positive integer).
*
*If the data is very large, using the WSLargeDataIO SDK module is advised.
*
*Saving over a deleted object undeletes it.
*/
funcdefsave_objects(SaveObjectsParamsparams)returns(list<object_info>info)authenticationrequired;

/*
*Input parameters for the "get_objects" function.
*
*Required parameters:
*object_refs - a list of object references in the form X/Y/Z, where X is
*the workspace name or id, Y is the object name or id, and Z is the
*(optional) object version. In general, always use ids rather than
*names if possible to avoid race conditions.
*A reference path may be specified by separating references by a semicolon, e.g.
*4/5/6;5/7/2;8/9/4 specifies that the user wishes to retrieve the fourth version of
*the object with id 9 in workspace 8, and that there exists a reference path from
*the sixth version of the object with id 5 in workspace 4, to which the user has access.
*The user may or may not have access to workspaces 5 and 8.
*
*
*Optional parameters:
*ignore_errors - ignore any errors that occur when fetching an object
*and instead insert a null into the returned list.
*/
typedefstructure{
list<string>object_refs;
booleanignore_errors;
}
GetObjectsParams;

/*
*The data and supplemental info for an object.
*
*UnspecifiedObject data - the object's data or subset data.
*object_info info - information about the object.
*/
typedefstructure{
UnspecifiedObjectdata;
object_infoinfo;
}
ObjectData;

/*
*Results from the get_objects function.
*
*list<ObjectData> data - the returned objects.
*/
typedefstructure{
list<ObjectData>data;
}
GetObjectsResults;

/*
*Get objects from the workspace.
*/
funcdefget_objects(GetObjectsParamsparams)returns(GetObjectsResultsresults)authenticationrequired;

/*
*Get the versions of the Workspace service and Shock service.
*/
funcdefversions()returns(stringwsver,stringshockver)authenticationrequired;

/*
*Input parameters for the "download_staging_file" function.
*
*Required parameters:
*staging_file_subdir_path: subdirectory file path
*e.g.
*for file: /data/bulk/user_name/file_name
*staging_file_subdir_path is file_name
*for file: /data/bulk/user_name/subdir_1/subdir_2/file_name
*staging_file_subdir_path is subdir_1/subdir_2/file_name
*/
typedefstructure{
stringstaging_file_subdir_path;
}
DownloadStagingFileParams;

/*
*Results from the download_staging_file function.
*
*copy_file_path: copied file scratch area path
*/
typedefstructure{
stringcopy_file_path;
}
DownloadStagingFileOutput;

/*
*Download a staging area file to scratch area
*/
funcdefdownload_staging_file(DownloadStagingFileParamsparams)returns(DownloadStagingFileOutputresults)authenticationrequired;

/*
*Input parameters for the "download_web_file" function.
*
*Required parameters:
*file_url: file URL
*download_type: one of ['Direct Download', 'FTP', 'DropBox', 'Google Drive']
*/
typedefstructure{
stringfile_url;
stringdownload_type;
}
DownloadWebFileParams;

/*
*Results from the download_web_file function.
*
*copy_file_path: copied file scratch area path
*/
typedefstructure{
stringcopy_file_path;
}
DownloadWebFileOutput;

/*
*Download a web file to scratch area
*/
funcdefdownload_web_file(DownloadWebFileParamsparams)returns(DownloadWebFileOutputresults)authenticationrequired;
};

Function Index

copy_shock_node
download_staging_file
download_web_file
file_to_shock
file_to_shock_mass
get_objects
own_shock_node
pack_file
package_for_download
save_objects
shock_to_file
shock_to_file_mass
unpack_file
unpack_files
versions
ws_name_to_id

Type Index

boolean
CopyShockNodeOutput
CopyShockNodeParams
DownloadStagingFileOutput
DownloadStagingFileParams
DownloadWebFileOutput
DownloadWebFileParams
FileToShockOutput
FileToShockParams
GetObjectsParams
GetObjectsResults
Handle
object_info
ObjectData
ObjectSaveData
OwnShockNodeOutput
OwnShockNodeParams
PackageForDownloadOutput
PackageForDownloadParams
PackFileParams
PackFileResult
SaveObjectsParams
ShockToFileOutput
ShockToFileParams
UnpackFileParams
UnpackFileResult
UnpackFilesParams
UnpackFilesResult
\ No newline at end of file diff --git a/Makefile b/Makefile index 3f5136f..66df5bb 100644 --- a/Makefile +++ b/Makefile @@ -20,11 +20,11 @@ all: compile build build-startup-script build-executable-script build-test-scrip compile: kb-sdk compile $(SPEC_FILE) \ --out $(LIB_DIR) \ - --clasyncver \ - --html \ + --clasyncver release \ --pyclname $(SERVICE_CAPS).$(SERVICE_CAPS)Client \ --pyimplname $(SERVICE_CAPS).$(SERVICE_CAPS)Impl \ --pysrvname $(SERVICE_CAPS).$(SERVICE_CAPS)Server + kb-sdk compile $(SPEC_FILE) --html build: chmod +x $(SCRIPTS_DIR)/entrypoint.sh diff --git a/lib/DataFileUtil/DataFileUtilClient.py b/lib/DataFileUtil/DataFileUtilClient.py index 00c8db1..fe2d0b8 100644 --- a/lib/DataFileUtil/DataFileUtilClient.py +++ b/lib/DataFileUtil/DataFileUtilClient.py @@ -24,7 +24,7 @@ def __init__( password=None, token=None, ignore_authrc=False, trust_all_ssl_certificates=False, auth_svc='https://ci.kbase.us/services/auth/api/legacy/KBase/Sessions/Login', - service_ver='--html', + service_ver='release', async_job_check_time_ms=100, async_job_check_time_scale_percent=150, async_job_check_max_time_ms=300000): if url is None: diff --git a/lib/DataFileUtil/DataFileUtilImpl.py b/lib/DataFileUtil/DataFileUtilImpl.py index 23d1856..5cb8640 100644 --- a/lib/DataFileUtil/DataFileUtilImpl.py +++ b/lib/DataFileUtil/DataFileUtilImpl.py @@ -63,7 +63,7 @@ class DataFileUtil: ######################################### noqa VERSION = "0.2.2" GIT_URL = "https://github.com/kbaseapps/DataFileUtil.git" - GIT_COMMIT_HASH = "0d855560a37f8787ab3bb67e464b9c94b299764e" + GIT_COMMIT_HASH = "8645c467e40937a79fd8ebea3aecb853f5fec048" #BEGIN_CLASS_HEADER