diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000..d5e2fb8 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,94 @@ +FROM ubuntu:24.04 + +USER root + +RUN apt-get update --quiet && \ + export DEBIAN_FRONTEND=noninteractive && \ + apt-get install --quiet --yes \ + apt-transport-https \ + apt-utils \ + sudo \ + git \ + less \ + wget \ + curl \ + tree \ + graphviz \ + vim \ + software-properties-common && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +# Install Apptainer (Singularity) +RUN add-apt-repository -y ppa:apptainer/ppa && \ + apt-get update --quiet && \ + apt install -y apptainer && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +# Set up directories +RUN mkdir -p /workspaces/.nextflow + +# Install Seqera Platform "Tower Agent" +RUN curl -fSL https://github.com/seqeralabs/tower-agent/releases/latest/download/tw-agent-linux-x86_64 > tw-agent && \ + chmod +x tw-agent && \ + mv tw-agent /usr/local/bin/tw-agent + +# Uncomment if we need to pin the Nextflow version +ENV NXF_EDGE=0 +ENV NXF_VER=24.10.4 +ENV NXF_HOME=/workspaces/.nextflow + +ENV NFNEURO_RAW_REPOSITORY="https://raw.githubusercontent.com/scilus/nf-neuro/main" +ENV NFCORE_MODULES_GIT_REMOTE=https://github.com/scilus/nf-neuro.git +ENV NFCORE_MODULES_BRANCH=main +ENV NFCORE_SUBWORKFLOWS_GIT_REMOTE=https://github.com/scilus/nf-neuro.git +ENV NFCORE_SUBWORKFLOWS_BRANCH=main + +# Install conda +ENV PATH="/root/miniconda3/bin:${PATH}" +RUN arch=$(uname -m) && \ + if [ "$arch" = "x86_64" ]; then \ + MINICONDA_URL="https://repo.anaconda.com/miniconda/Miniconda3-py312_25.3.1-1-Linux-x86_64.sh"; \ + elif [ "$arch" = "aarch64" ]; then \ + MINICONDA_URL="https://repo.anaconda.com/miniconda/Miniconda3-py312_25.3.1-1-Linux-aarch64.sh"; \ + else \ + echo "Unsupported architecture: $arch"; \ + exit 1; \ + fi && \ + wget $MINICONDA_URL -O miniconda.sh && \ + mkdir -p /root/.conda && \ + bash miniconda.sh -b -p /root/miniconda3 && \ + rm -f miniconda.sh + +# Install nextflow, nf-core, Mamba, and pytest-workflow +RUN conda install --name=base conda-anaconda-tos && \ + conda config --add channels bioconda && \ + conda config --add channels conda-forge && \ + conda config --set channel_priority strict && \ + conda tos accept && \ + conda update --quiet --yes --all && \ + conda install --quiet --yes --name base \ + mamba \ + nextflow=$NXF_VER \ + nf-core=2.14.1 \ + nf-test=0.9.2 \ + black \ + prettier \ + pre-commit \ + linkify-it-py \ + pytest-workflow && \ + conda clean --all --force-pkgs-dirs --yes + +# Cleanup +RUN printf 'unset JAVA_TOOL_OPTIONS\n' >> $HOME/.bashrc + +# Update Nextflow +RUN nextflow self-update && nextflow -version + +RUN unset JAVA_TOOL_OPTIONS +ENV PS1='\[\e[3;36m\]${PWD/*//} ->\[\e[0m\] ' + +# Get history +RUN SNIPPET="export PROMPT_COMMAND='history -a' && export HISTFILE=/commandhistory/.bash_history" \ + && echo "$SNIPPET" >> "/root/.bashrc" diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..f0970df --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,50 @@ +{ + "name": "nextflow-training", + "build": { "dockerfile": "Dockerfile", "context": ".." }, + "forwardPorts": [3000], + "features": { + "ghcr.io/devcontainers/features/docker-in-docker:2": { + "dockerDashComposeVersion": "none", + "installDockerComposeSwitch": false + } + }, + "postCreateCommand": { + "identify": "if [ -z \"$CODESPACES\" ]; then echo \"Devcontainers Development\"; else echo \"Codespaces Development\"; fi", + "trust": "git config --global --add safe.directory /workspaces" + }, + "mounts": [ + { + "target": "/tmp", + "type": "volume" + }, + { + "target": "/root/.vscode-server", + "type": "volume" + }, + { + "target": "/commandhistory", + "type": "volume", + "source": "nf-neuro-tutorial-${devcontainerId}-bashhistory" + } + ], + // Configure tool-specific properties. + "customizations": { + // Configure properties specific to VS Code. + "vscode": { + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/opt/conda/bin/python" + }, + // Add the IDs of extensions you want installed when the container is created. + "extensions": [ + "ms-python.python", + "ms-python.vscode-pylance", + "nf-core.nf-core-extensionpack", // nf-core recommended extensions + "nextflow.nextflow", // Nextflow VS Code extension + "codezombiech.gitignore", // Language support for .gitignore files + "cssho.vscode-svgviewer", // SVG viewer + "nf-neuro.nf-neuro-extensionpack" + ] + } + } +} \ No newline at end of file diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..314766e --- /dev/null +++ b/.gitattributes @@ -0,0 +1,3 @@ +* text=auto eol=lf +*.{cmd,[cC][mM][dD]} text eol=crlf +*.{bat,[bB][aA][tT]} text eol=crlf diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5ffcfca --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +.nextflow.log* +.vscode +.nextflow +.nf-test.log + +tests/.runs/ +work/ +.nf-test/ +extractor_templates/ \ No newline at end of file diff --git a/.nf-core.yml b/.nf-core.yml new file mode 100644 index 0000000..1315ffa --- /dev/null +++ b/.nf-core.yml @@ -0,0 +1,2 @@ +--- +repository_type: pipeline \ No newline at end of file diff --git a/README.md b/README.md index 193eec8..ca154f6 100644 --- a/README.md +++ b/README.md @@ -1,41 +1,118 @@ +# Extractor Flow: streamline filtering pipeline -[![GitHub release (latest by date)](https://img.shields.io/github/v/release/scilus/extractor_flow)](https://github.com/scilus/extractor_flow/releases) -[![Documentation Status](https://readthedocs.org/projects/extractor_flow/badge/?version=latest)](https://extractor_flow.readthedocs.io/en/latest/?badge=latest) -[![Docker container badge](https://img.shields.io/docker/v/scilus/scilus-flows?label=docker&logo=docker&logoColor=white)](https://hub.docker.com/r/scilus/scilus-flows) +ExtractorFlow is a streamline filtering pipeline written in nextflow. In essence, you provide any tractogram file (i.e. `*.trk`) as an input and this pipeline will output which streamlines are deemed as anatomically **plausible** and **implausible** as separate `.trk` files. +Additionnally, as described below, you can use this pipeline to extract recognized (i.e. plausible) bundles of streamlines. -ExtractorFlow pipeline -====================== +When using this work, please cite using the following: -Usage ------ +``` +PETIT, Laurent, ALI, Kariem Mahdy, RHEAULT, François, et al. The structural connectivity of the human angular gyrus as revealed by microdissection and diffusion tractography. Brain Structure and Function, 2023, vol. 228, no 1, p. 103-120. +``` -See *USAGE* or run `nextflow run main.nf --help` +## Requirements +- [Nextflow](https://www.nextflow.io/docs/latest/install.html) +- [Docker](https://www.docker.com/get-started/) (recommended) or [Apptainer](https://apptainer.org/docs/admin/main/installation.html) depending on the runtime you choose. -Singularity ------------ +You should be able to run the following commands without any errors being printed: +```bash +# 1. Make sure Nextflow is installed. +nextflow -v -If you are on Linux, we recommend using the Singularity container to run ExtractorFlow +# 2a. Make sure Docker is installed. +docker ps -Run the command (your working directory has to be the "containers" directory): +# 2b. Make sure Apptainer is installed. +apptainer version +``` -`sudo singularity build singularity_extractorflow.sif singularity_extractorflow.def` +## Getting started +### Understand the input +This nextflow pipeline has **two** mandatory arguments that any user has to provide in order to run the pipeline. +1. `--input` +2. `--templates_dir` -Then you can start the pipeline using this command line: +Both of these arguments point to two different directories that have their own particular structure: +#### `--input` +This argument points to the directory holding all the tractograms you wish to filter. Also, within this directory, each tractogram should probably be associated with a T1w image (in diffusion space). This T1w image is used to register your tractograms to MNI space, which is the space where the filtering is performed. **If no T1w image is provided, your tractograms are assumed to be already in the appropriate template (MNI) space**. This said, your directory structure should look like the following: ``` -nextflow main.nf --root=/path_to_your_data/ -with-singularity singularity_extractorflow.sif -resume +input_example +├── subject-01 +│   ├── subject-01__t1.nii.gz +│   └── subject-01_tracking.trk +├── subject-02 +│   ├── subject-02__t1.nii.gz +│   └── subject-02_tracking.trk +├── ... +└── subject-n + ├── subject-n__t1.nii.gz + └── subject-n_tracking.trk ``` +If your tractograms are already in the right space, keep the same structure, but omit the `*t1.nii.gz` files. + +#### `--templates_dir` +To simplify the usage of this pipeline and to avoid cluttering the containers used, the user must download extract the different templates used and the lists that are used during filtering in this pipeline (don't worry, it's a simple process). +All you have to do is execute the following commands to download and extract the needed files into a new directory that we called `extractor_templates`. -Docker ------- -If you are on MacOS or Windows, we recommend using the Docker container to run ExtractorFlow. +```bash +wget https://github.com/scilus/extractor_flow/raw/refs/heads/master/containers/filtering_lists.tar.bz2 && wget https://github.com/scilus/extractor_flow/raw/refs/heads/master/containers/templates_and_ROIs.tar.bz2 + +mkdir -p extractor_templates + +tar -xjf filtering_lists.tar.bz2 -C extractor_templates && rm filtering_lists.tar.bz2 +tar -xjf templates_and_ROIs.tar.bz2 -C extractor_templates && rm templates_and_ROIs.tar.bz2 +``` -You can build docker image using this command (your working directory has to be the "containers" directory): +> If you don't have the `wget` and/or the `tar` commands and you don't want (or can't) to install them, you can always download the two archives manually from [here](https://github.com/scilus/extractor_flow/raw/refs/heads/master/containers/filtering_lists.tar.bz2) and [here](https://github.com/scilus/extractor_flow/raw/refs/heads/master/containers/templates_and_ROIs.tar.bz2) and extract them into a new directory called `extractor_templates`. Just make sure that you have the following directory structure. -`docker build -t extractor_flow .` +From this point, you should have a directory containing the following structure that you'll use to provide as a value to the `--template_dir` argument in the following steps: +``` +extractor_templates/ +├── filtering_lists +│ ├── ... +└── templates_and_ROIs + └── ... +``` + +#### Complete arguments list +To get a complete list of the available arguments you can provide, always refer to the usage printed by the nextflow script as follows: +```bash +nextflow run scilus/extractor_flow --help +``` + +### Standard filtering using Docker (recommended). +The following example gives a general idea on what arguments you can provide. The key part is to select the `docker` profile as well as providing the mandatory arguments (i.e. `--input` & `--templates_dir`): +```bash +nextflow run scilus/extractor_flow \ + --input \ + --templates_dir \ + -profile docker \ + [--orig] + [--keep_intermediate_steps] + [-resume] +``` +### Standard filering using Apptainer/Singularity. +To use apptainer containers, you should only have to swap the profile used to `apptainer` as shown in the following example: +```bash +nextflow run scilus/extractor_flow \ + --input \ + --templates_dir \ + -profile apptainer \ + [--orig] + [--keep_intermediate_steps] + [-resume] ``` -nextflow main.nf --root=/path_to_your_data/ -with-docker extractor_flow:latest -resume +### Filtering and bundle extraction. +Notice, in the following example, the addition of the `extract_bundles` profile. This will trigger a few additionnal processes that will separate and organize new files refering to the bundles identified by this flow. The example is using Docker (as the docker profile is enabled), but the same applies for Apptainer. +```bash +nextflow run scilus/extractor_flow \ + --input \ + --templates_dir \ + -profile docker,extract_bundles \ + [--orig] + [--keep_intermediate_steps] + [-resume] ``` diff --git a/USAGE b/USAGE index 4511b90..f87ed73 100644 --- a/USAGE +++ b/USAGE @@ -20,6 +20,16 @@ DESCRIPTION └── S2 └── *.trk └── *_t1.nii.gz (diff space, optional) + --templates_dir=/path/to/templates_dir + ABSOLUTE PATH to directory containing the templates + and the filtering lists (e.g. JHU atlas). Structure + should look like: + [templates_dir] + ├── templates_and_ROIs + │ ├── JHU*.nii.gz + └── filtering_lists + └── filtering_list_*.txt + OPTIONAL ARGUMENTS (current value) @@ -33,13 +43,13 @@ OPTIONAL ARGUMENTS (current value) --quick_registration If set, will choose antsRegistrationSyNQuick.sh instead of antsRegistrationSyN.sh ($quick_registration). - --processes_bet_register_t1 Number of processes for T1 brain extraction task ($processes_bet_register_t1). --processes_major_filtering Number of processes for the major filtering task ($processes_major_filtering). --processes The number of parallel processes to launch ($cpu_count). Only affects the local scheduler. AVAILABLE PROFILES (using -profile option (e.g. -profile macos,extract_bundles)) -macos When this profile is used, ExtractorFlow will modify a parameter (scratch) for MacOS users. +docker When this profile is used, all processes will execute within Docker containers (recommended). +apptainer When this profile is used, all processes will execute within Apptainer containers. fully_reproducible When this profile is used, all the parameters will be set to have 100% reproducible results. -extract_bundles Extract bundles \ No newline at end of file +extract_bundles The flow will perform additional processing to extract all recognized bundles. \ No newline at end of file diff --git a/config/resources.config b/config/resources.config new file mode 100644 index 0000000..adcbfc0 --- /dev/null +++ b/config/resources.config @@ -0,0 +1,18 @@ + + +params.max_cpus = 4 +params.max_time = 24.h + + +profiles { + default_github_runner { + params.max_memory = 5.GB + } + big_mem_github_runner { + params.max_memory = 16.GB + } + devcontainer { + params.max_cpus = query_container_limits('cpus') + params.max_memory = query_container_limits('memory') + } +} diff --git a/containers/filtering_lists.tar.bz2 b/containers/filtering_lists.tar.bz2 deleted file mode 100644 index 76bb52b..0000000 Binary files a/containers/filtering_lists.tar.bz2 and /dev/null differ diff --git a/containers/singularity_extractorflow.def b/containers/singularity_extractorflow.def deleted file mode 100755 index 4786fd2..0000000 --- a/containers/singularity_extractorflow.def +++ /dev/null @@ -1,11 +0,0 @@ -BootStrap: docker -From: scilus/scilus:1.6.0 - -%setup - export ROIs=templates_and_ROIs.tar.bz2 - mkdir -p $SINGULARITY_ROOTFS/extractor_flow/templates_and_ROIs - tar -jxf $ROIs -C $SINGULARITY_ROOTFS/extractor_flow/ - - export filtering_lists=filtering_lists.tar.bz2 - mkdir -p $SINGULARITY_ROOTFS/extractor_flow/filtering_lists - tar -jxf $filtering_lists -C $SINGULARITY_ROOTFS/extractor_flow/ diff --git a/containers/templates_and_ROIs.tar.bz2 b/containers/templates_and_ROIs.tar.bz2 deleted file mode 100644 index f06866d..0000000 Binary files a/containers/templates_and_ROIs.tar.bz2 and /dev/null differ diff --git a/main.nf b/main.nf index 0fef658..9c55cb5 100644 --- a/main.nf +++ b/main.nf @@ -1,2327 +1,158 @@ #!/usr/bin/env nextflow +nextflow.enable.dsl = 2 params.input = false params.help = false params.debug = true - -if(params.help) { - usage = file("$baseDir/USAGE") - cpu_count = Runtime.runtime.availableProcessors() - bindings = ["rois_folder":"$params.rois_folder", - "FLF": "$params.FLF", - "run_bet":"$params.run_bet", - "distance": "$params.distance", - "orig":"$params.orig", - "extended":"$params.extended", - "keep_intermediate_steps":"$params.keep_intermediate_steps", - "quick_registration": "$params.quick_registration", - "cpu_count":"$cpu_count", - "processes_bet_register_t1":"$params.processes_bet_register_t1", - "processes_major_filtering":"$params.processes_major_filtering"] - - engine = new groovy.text.SimpleTemplateEngine() - template = engine.createTemplate(usage.text).make(bindings) - print template.toString() - return +// Subworkflows +include { TRANSFORM_TO_MNI; CLEAN_IF_FROM_MNI } from './subworkflows/local/transform.nf' +include { EXTRACT } from './subworkflows/local/extraction.nf' +include { EXTRACT_BUNDLES } from './subworkflows/local/extension.nf' + +// Local modules +include { TRACTOGRAM_MATH as RENAME_CORTICO_STRIATE } from './modules/local/merge/main.nf' +include { MAJOR_FILTERING } from './modules/local/filtering/major_filtering.nf' +include { COPY_FILE as COPY_T1_TO_ORIG } from './modules/local/utils/copy_file.nf' + +// NF-Neuro modules +include { REGISTRATION_TRACTOGRAM as REGISTER_TRACTOGRAM_ORIG } from './modules/nf-neuro/registration/tractogram/main.nf' +include { REGISTRATION_TRACTOGRAM as REGISTER_BUNDLES_ORIG } from './modules/nf-neuro/registration/tractogram/main.nf' + +workflow { + // ** Now call your input workflow to fetch your files ** // + data = get_data() + + transformed = TRANSFORM_TO_MNI(data.tractograms, data.t1s) + cleaned_tractograms = CLEAN_IF_FROM_MNI(data.tractograms, data.t1s) + all_mni_tractograms = cleaned_tractograms.cleaned_mni_tractograms.mix(transformed.tractograms) + + // Major filtering + filtered_tractograms = MAJOR_FILTERING(all_mni_tractograms) + + // Extract plausible and unplausible streamlines + EXTRACT(filtered_tractograms.unplausible, filtered_tractograms.wb, data.sides, all_mni_tractograms) + + if (params.orig) { + // Register the tractograms to the original space + tractograms_to_transform = EXTRACT.out.plausible.concat(EXTRACT.out.unplausible) + + t1s_and_transformations = data.t1s.join(transformed.transformations_for_orig) + trks_for_register = tractograms_to_transform.combine(t1s_and_transformations, by: 0) + .map{ sid, trk, t1, transfo, deformation -> + [sid, t1, transfo, trk, [], deformation]} + REGISTER_TRACTOGRAM_ORIG(trks_for_register) + + // Copy the original T1w to the subject folder. + COPY_T1_TO_ORIG(data.t1s.map{ sid, t1 -> [sid, [], t1] }) + + if (params.extract_bundles) { + // Register the extracted bundles to the original space + t1s_and_transformations = data.t1s.join(transformed.transformations_for_orig) + bundles_to_register = EXTRACT.out.bundles.combine(t1s_and_transformations, by: 0) + .map{ sid, trk, t1, transfo, deformation -> + [sid, t1, transfo, trk, [], deformation]} + REGISTER_BUNDLES_ORIG(bundles_to_register) + } } - -log.info "Extractor_flow pipeline" -log.info "===================" -log.info "Start time: $workflow.start" -log.info "" - -workflow.onComplete { - log.info "Pipeline completed at: $workflow.complete" - log.info "Execution status: ${ workflow.success ? 'OK' : 'failed' }" - log.info "Execution duration: $workflow.duration" -} - -if (!params.keep_intermediate_steps) { - log.info "Warning: You won't be able to resume your processing if you don't use the option --keep_intermediate_steps" - log.info "" } -if (params.input){ - log.info "Input: $params.input" - root = file(params.input) - in_tractogram = Channel - .fromFilePairs("$root/**/*.trk", - size:1, - maxDepth:1, - flat: true) {it.parent.name} - - in_tractogram.into{check_trks; - in_tractogram_for_unplausible; - in_tractogram_for_transformation; - in_tractogram_for_mix} - +workflow get_data { + main: + if(params.help) { + usage = file("$baseDir/USAGE") + cpu_count = Runtime.runtime.availableProcessors() + bindings = [ + "rois_folder": "$params.rois_folder", + "FLF": "$params.FLF", + "run_bet": "$params.run_bet", + "distance": "$params.distance", + "orig": "$params.orig", + "extract_bundles": "$params.extract_bundles", + "keep_intermediate_steps": "$params.keep_intermediate_steps", + "quick_registration": "$params.quick_registration", + "processes_major_filtering": "$params.processes_major_filtering", + "cpu_count": "$cpu_count" + ] + + engine = new groovy.text.SimpleTemplateEngine() + template = engine.createTemplate(usage.text).make(bindings) + print template.toString() + System.exit(0) + } - Channel - .fromPath("$root/**/*_t1.nii.gz", - maxDepth:1) - .map{[it.parent.name, it]} - .into{t1s_for_register; - t1s_for_register_back; - t1s_for_copy_to_orig; - check_t1s; - t1s_empty} -} -else { - error "Error ~ Please use --input for the input data." -} + log.info "Extractor_flow pipeline" + log.info "===================" + log.info "Start time: $workflow.start" + log.info "" -check_trks.count().into{check_subjects_number; number_subj_for_null_check} -check_t1s.count().into{number_t1s_for_compare; number_t1s_check_with_orig} + if (!params.keep_intermediate_steps) { + log.info "Warning: You won't be able to resume your processing if you don't use the option --keep_intermediate_steps" + log.info "" + } -number_subj_for_null_check -.subscribe{a -> if (a == 0) - error "Error ~ No subjects found. Please check the naming convention, your --input path."} + check_required_params(['input', 'templates_dir']) + log.info "Input: $params.input" + log.info "Templates directory: $params.templates_dir" + + root = file(params.input) + in_tractogram = Channel.fromFilePairs("$root/**/*.trk", + size:1, + maxDepth:1, + flat: true) {[id: it.parent.name]} + t1s = Channel.fromPath("$root/**/*_t1.nii.gz", maxDepth:1).map{[[id: it.parent.name], it]} + + number_subjects = in_tractogram.count() + number_t1s = t1s.count() + + number_subjects.subscribe { a -> if (a == 0) + error "Error ~ No subjects found. Please check the naming convention, your --input path." } + + number_subjects + .concat(number_t1s) + .toList() + .subscribe{a, b -> if (a != b && b > 0) + error "Error ~ Some subjects have a T1w and others don't.\n" + + "Please be sure to have the same acquisitions for all subjects."} + + if (params.orig){ + number_t1s + .subscribe{a -> if (a == 0) + error "Error ~ You cannot use --orig without having any T1w in the orig space."} + } -check_subjects_number - .concat(number_t1s_for_compare) - .toList() - .subscribe{a, b -> if (a != b && b > 0) - error "Error ~ Some subjects have a T1w and others don't.\n" + - "Please be sure to have the same acquisitions for all subjects."} + side_values = params.sides?.tokenize(',') + sides = Channel.from(side_values) -if (params.orig){ - number_t1s_check_with_orig - .subscribe{a -> if (a == 0) - error "Error ~ You cannot use --orig without having any T1w in the orig space."} + emit: + tractograms = in_tractogram + t1s = t1s + sides = sides } -sides = params.sides?.tokenize(',') -Channel.from(sides).into{sides_ipsi; - sides_split_CC_BG; - sides_split_BG_Thal; - sides_split_BG_Put; - sides_split_BG_Caud; - side_corticopontineF; - side_corticopontinePOT; - side_cst} - -/* BEGINNING TRANSFO */ - -process Register_T1 { - publishDir = params.final_output_mni_space - cpus params.processes_bet_register_t1 - - input: - set sid, file(t1) from t1s_for_register - - output: - set sid, "${sid}__output0GenericAffine.mat", "${sid}__output1InverseWarp.nii.gz", "${sid}__output1Warp.nii.gz" into transformation_for_trk - file "${sid}__t1_${params.template_space}.nii.gz" - file "${sid}__t1_bet_mask.nii.gz" optional true - file "${sid}__t1_bet.nii.gz" optional true - - script: - if (params.run_bet){ - """ - export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 - export OMP_NUM_THREADS=1 - export OPENBLAS_NUM_THREADS=1 - export ANTS_RANDOM_SEED=1234 - - antsBrainExtraction.sh -d 3 -a $t1 -e $params.template_t1/t1_template.nii.gz\ - -o bet/ -m $params.template_t1/t1_brain_probability_map.nii.gz -u 0 - scil_image_math.py convert bet/BrainExtractionMask.nii.gz ${sid}__t1_bet_mask.nii.gz --data_type uint8 - scil_image_math.py multiplication $t1 ${sid}__t1_bet_mask.nii.gz ${sid}__t1_bet.nii.gz - - ${params.registration_script} -d 3 -m ${sid}__t1_bet.nii.gz -f ${params.rois_folder}${params.atlas.template} -n ${task.cpus} -o "${sid}__output" -t s - mv ${sid}__outputWarped.nii.gz ${sid}__t1_${params.template_space}.nii.gz - """ +def check_required_params(param_names) { + // Loop through each parameter name and check if it exists in params + // We need to accumulate errors to report them all at once + def missing_params = [] + param_names.each { param -> + if (!params.containsKey(param) || params[param] == false || params[param] == '' || params[param] == null) { + missing_params << param + } } - else{ - """ - export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 - export OMP_NUM_THREADS=1 - export OPENBLAS_NUM_THREADS=1 - export ANTS_RANDOM_SEED=1234 - ${params.registration_script} -d 3 -m ${t1} -f ${params.rois_folder}${params.atlas.template} -n ${task.cpus} -o "${sid}__output" -t s - mv ${sid}__outputWarped.nii.gz ${sid}__t1_${params.template_space}.nii.gz - """ + if (missing_params) { + throw new Exception("Missing required parameters: ${missing_params.join(', ')}") } } -transformation_for_trk.into{transformation_for_trk_registration; - transformation_for_join_with_t1} -if (params.orig) { - t1s_for_register_back - .cross(transformation_for_join_with_t1) - .map { [ it[0][0], it[0][1], it[1][1], it[1][2], it[1][3]] } - .into{transformation_and_t1_for_transformation_to_orig; - transformation_and_t1_for_transformation_to_orig_bundles} -} - -transformation_for_trk_registration - .cross(in_tractogram_for_transformation) - .map { [ it[0][0], it[0][1], it[0][2], it[0][3], it[1][1] ] } - .set{trk_and_template_for_transformation_to_template} - - -process Transform_TRK { - publishDir = params.final_output_mni_space - cpus 1 - - input: - set sid, file(transfo), file(inv_deformation), file(deformation), file(trk) from trk_and_template_for_transformation_to_template - - output: - set sid, "${trk.getSimpleName()}_${params.template_space}.trk" into transformed_for_remove_out_not_JHU, transformed_for_unplausible - - script: - """ - scil_apply_transform_to_tractogram.py $trk ${params.rois_folder}${params.atlas.template} ${transfo} ${trk.getSimpleName()}_${params.template_space}.trk --remove_invalid --inverse --in_deformation ${inv_deformation} - """ -} - -/* END TRANSFO */ - -trk_for_remove_invalid_streamlines = Channel.empty() -t1_for_remove_invalid_streamlines = Channel.empty() -if (t1s_empty.count().get()==0){ - in_tractogram_for_unplausible.into{trk_for_extract_first_unplausible; trk_for_extract_unplausible} - in_tractogram_for_mix.into{trk_for_remove_invalid_streamlines; t1_for_remove_invalid_streamlines} -} -else{ - transformed_for_unplausible.into{trk_for_extract_first_unplausible; trk_for_extract_unplausible} -} - -process Remove_invalid_streamlines { - cpus 1 - - input: - set sid, file(tractogram) from trk_for_remove_invalid_streamlines - - output: - set sid, "${sid}__rm_invalid_streamlines.trk" into rm_invalid_for_remove_out_not_JHU - - script: - """ - scil_remove_invalid_streamlines.py ${tractogram} ${sid}__rm_invalid_streamlines.trk --cut_invalid --remove_single_point -f - """ -} - -process Copy_t1_atlas { - publishDir = params.final_output_mni_space - cpus 1 - - input: - set sid, file(tractogram) from t1_for_remove_invalid_streamlines - - output: - file "${sid}__t1_mni_space.nii.gz" - - script: - """ - cp ${params.rois_folder}${params.atlas.template} ${sid}__t1_mni_space.nii.gz - """ -} - -rm_invalid_for_remove_out_not_JHU.mix(transformed_for_remove_out_not_JHU).set{for_major_filtering} - - -process Major_filtering { - cpus params.processes_major_filtering - - input: - set sid, file(tractogram) from for_major_filtering - - output: - set sid, "${sid}__wb_clean01.trk" into wb_for_extract_end_in_cerebellum - set sid, "${sid}__unplausible_streamlines.trk" into unplausible_for_fornix - path "${sid}/*" optional true - - script: - keep_intermediate_trk_flag="" - if (params.keep_intermediate_steps) { - keep_intermediate_trk_flag="--save_intermediate_tractograms" +def check_nb_cpus() { + if(params.processes) { + if(params.processes > Runtime.runtime.availableProcessors()) { + throw new RuntimeException("Number of processes higher than available CPUs.") + } + else if(params.processes < 1) { + throw new RuntimeException("When set, number of processes must be >= 1 " + + "and smaller or equal to the number of CPUs.") } - """ - scil_filter_tractogram_anatomically.py ${tractogram} \ - ${params.rois_folder}${params.atlas.JHU_8} \ - ${sid} \ - --minL ${params.min_streaminline_lenght} \ - --maxL ${params.max_streaminline_lenght} \ - -a ${params.loop_angle_threshold} \ - --csf_bin ${params.rois_folder}${params.atlas.csf} \ - --processes ${params.processes_major_filtering}\ - --save_rejected\ - $keep_intermediate_trk_flag\ - -f - - mv ${sid}/${tractogram.getSimpleName()}_filtered.trk ${sid}__wb_clean01.trk - mv ${sid}/${tractogram.getSimpleName()}_rejected.trk ${sid}__unplausible_streamlines.trk - """ -} - - -process Extract_fornix{ - cpus 1 - - input: - set sid, file(tractogram) from unplausible_for_fornix - - output: - set sid, "${sid}__fornix_f.trk" into fornix_for_trk_plausible, fornix_for_rename - file "${sid}__fornix_f.txt" - file "${sid}__unplausible_streamlines_wo_fornix.trk" optional true - - script: - filtering_list=params.FLF+"fx.txt" - out_extension="fornix_f" - remaining_extension="unplausible_streamlines_wo_fornix" - basename="${sid}" - keep="$params.keep_intermediate_steps" - extract_masks="" - distance=1 - - template "filter_with_list.sh" -} - - -process Extract_ee_cerebellum { - cpus 1 - - input: - set sid, file(tractogram) from wb_for_extract_end_in_cerebellum - - output: - set sid, "${sid}__wb_clean01_nocereb.trk" into wb_for_extract_end_in_brainstem - set sid, "${sid}__all_cerebellum.trk" into ee_cerebellum_for_extract_plausible - file "${sid}__all_cerebellum.txt" - file "${sid}__wb_clean01_nocereb.txt" - - script: - filtering_list=params.FLF+"out_cerebellum.txt" - out_extension="wb_clean01_nocereb" - remaining_extension="all_cerebellum" - basename="${sid}" - keep=true - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - - -process Extract_plausible_cerebellum { - cpus 1 - - input: - set sid, file(tractogram) from ee_cerebellum_for_extract_plausible - - output: - set sid, "${sid}__all_cerebellum_plausibles.trk" into cerebellum_for_trk_plausible, cerebellum_for_rename - file "${sid}__all_in_cerebellum_nocx_nocerebwm.trk" - file "${sid}__all_in_cerebellum_in_Medulla.trk" - file "${sid}__all_in_cerebellum_in_Pons.trk" - file "${sid}__all_in_cerebellum_in_Midbrain.trk" - file "${sid}__all_in_cerebellum_in_redN_and_Thal.trk" - - script: - """ - scil_filter_tractogram.py ${tractogram} ${sid}__tmp_in_cerebellum.trk\ - --filtering_list ${params.FLF}in_cerebellum.txt -f - scil_filter_tractogram.py ${sid}__tmp_in_cerebellum.trk ${sid}__all_in_cerebellum_nocx_nocerebwm.trk\ - --filtering_list ${params.FLF}cerebellum_nocx_in_cereb.txt -f - scil_filter_tractogram.py ${sid}__tmp_in_cerebellum.trk ${sid}__all_in_cerebellum_in_Medulla.trk\ - --filtering_list ${params.FLF}cerebellum_in_medulla.txt -f - scil_filter_tractogram.py ${sid}__tmp_in_cerebellum.trk ${sid}__all_in_cerebellum_in_Pons.trk\ - --filtering_list ${params.FLF}cerebellum_in_pons.txt -f - scil_filter_tractogram.py ${sid}__tmp_in_cerebellum.trk ${sid}__all_in_cerebellum_in_Midbrain.trk\ - --filtering_list ${params.FLF}cerebellum_in_midbrain.txt -f - scil_filter_tractogram.py ${sid}__tmp_in_cerebellum.trk ${sid}__all_in_cerebellum_in_redN_and_Thal.trk\ - --filtering_list ${params.FLF}cerebellum_in_rednucleus_and_thalamus.txt -f - scil_tractogram_math.py union ${sid}__all_in_*.trk ${sid}__all_cerebellum_plausibles.trk -f - """ -} - -/* - END Cerebellum -*/ - -process Extract_ee_brainstem { - cpus 1 - - input: - set sid, file(tractogram) from wb_for_extract_end_in_brainstem - - output: - set sid, "${sid}__wb_clean02.trk" into wb_for_split_end_in_CGMSWI - set sid, "${sid}__all_brainstem.trk" into all_brainstem_for_extract_plausible - file "${sid}__wb_clean02.txt" - file "${sid}__all_brainstem.txt" - - script: - filtering_list=params.FLF+"out_brainstem.txt" - out_extension="wb_clean02" - remaining_extension="all_brainstem" - basename="${sid}" - keep=true - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -/* - Brainstem -*/ - -process Extract_plausible_brainstem { - cpus 1 - - input: - set sid, file(tractogram) from all_brainstem_for_extract_plausible - output: - set sid, "${sid}__all_brainstem_plausibles.trk" into brainstem_for_trk_plausible, brainstem_for_rename - file "${sid}__all_brainstem_unplausibles.trk" optional true - file "${sid}__be_midbrain.trk" - file "${sid}__be_medulla.trk" - file "${sid}__be_pons.trk" - file "${sid}__ee_thalamus.trk" - file "${sid}__ee_red_nucleus.trk" - set sid, "${sid}__ee_fronto_pontine.trk" into brainstem_corticopontine_frontal_for_rename - set sid, "${sid}__ee_parietotemporooccipital_pontine.trk" into brainstem_ee_corticopontine_parietotemporooccipital_for_rename - set sid, "${sid}__ee_pyramidal.trk" into brainstem_pyramidal_for_rename - file "${sid}__ee_cortico_tectal.trk" - - script: - """ - # Extract be midbrain - scil_filter_tractogram.py ${sid}__all_brainstem.trk ${sid}__be_midbrain.trk\ - --filtering_list ${params.FLF}brainstem_be_midbrain.txt -f - # Extract be medulla - scil_filter_tractogram.py ${sid}__all_brainstem.trk ${sid}__be_medulla.trk\ - --filtering_list ${params.FLF}brainstem_be_medulla.txt -f - # Extract be pons - scil_filter_tractogram.py ${sid}__all_brainstem.trk ${sid}__be_pons.trk\ - --filtering_list ${params.FLF}brainstem_be_pons.txt -f - - # Extract ee thalamus - scil_filter_tractogram.py ${sid}__all_brainstem.trk ${sid}__ee_thalamus.trk\ - --filtering_list ${params.FLF}brainstem_ee_thalamus.txt -f - # Extract ee red_nucleus - scil_filter_tractogram.py ${sid}__all_brainstem.trk ${sid}__ee_red_nucleus.trk\ - --filtering_list ${params.FLF}brainstem_ee_red_nucleus.txt -f - - # Prepartion for fronto-pontine, parietotemporooccipito-pontine, pyramidal, cortico-tectal - scil_filter_tractogram.py ${sid}__all_brainstem.trk ${sid}__ee_tmp_01.trk\ - --filtering_list ${params.FLF}brainstem_ee_tmp_01.txt -f - scil_filter_tractogram.py ${sid}__all_brainstem.trk ${sid}__ee_tmp_02.trk\ - --filtering_list ${params.FLF}brainstem_ee_tmp_02.txt -f - - scil_tractogram_math.py union ${sid}__ee_tmp_01.trk ${sid}__ee_tmp_02.trk\ - ${sid}__ee_tmp_03.trk -f - - # Extract ee Fronto-pontine R and L - scil_filter_tractogram.py ${sid}__ee_tmp_03.trk ${sid}__ee_fronto_pontine_R.trk\ - --filtering_list ${params.FLF}brainstem_ee_F_pontine_R.txt -f - scil_filter_tractogram.py ${sid}__ee_tmp_03.trk ${sid}__ee_fronto_pontine_L.trk\ - --filtering_list ${params.FLF}brainstem_ee_F_pontine_L.txt -f - scil_tractogram_math.py union ${sid}__ee_fronto_pontine_L.trk ${sid}__ee_fronto_pontine_R.trk\ - ${sid}__ee_fronto_pontine.trk -f - - # Extract ee ParietoTemporooccipital pontine R and L - scil_filter_tractogram.py ${sid}__ee_tmp_03.trk ${sid}__ee_parietotemporooccipital_pontine_R.trk\ - --filtering_list ${params.FLF}brainstem_ee_PTO_pontine_R.txt -f - scil_filter_tractogram.py ${sid}__ee_tmp_03.trk ${sid}__ee_parietotemporooccipital_pontine_L.trk\ - --filtering_list ${params.FLF}brainstem_ee_PTO_pontine_L.txt -f - scil_tractogram_math.py union ${sid}__ee_parietotemporooccipital_pontine_L.trk ${sid}__ee_parietotemporooccipital_pontine_R.trk\ - ${sid}__ee_parietotemporooccipital_pontine.trk -f - - # Extract ee Pyramidal - scil_filter_tractogram.py ${sid}__ee_tmp_03.trk ${sid}__ee_pyramidal.trk\ - --filtering_list ${params.FLF}brainstem_ee_pyramidal.txt -f - - # Extract ee Tectal - scil_filter_tractogram.py ${sid}__ee_tmp_03.trk ${sid}__ee_cortico_tectal.trk\ - --filtering_list ${params.FLF}brainstem_ee_cortico_tectal.txt -f - scil_filter_streamlines_by_length.py ${sid}__ee_cortico_tectal.trk ${sid}__ee_cortico_tectal.trk --maxL 100 -f - - rm -f ${sid}__*tmp_*.trk - - scil_tractogram_math.py union ${sid}__be_*.trk ${sid}__ee_*.trk ${sid}__all_brainstem_plausibles.trk -f - - if ${params.keep_intermediate_steps} - then - scil_tractogram_math.py difference ${sid}__all_brainstem.trk ${sid}__all_brainstem_plausibles.trk ${sid}__all_brainstem_unplausibles.trk -f - fi - """ -} - -/* -Brain - Either end in CGM SWM -*/ - -process Remove_out_of_CGM_DWM { - cpus 1 - - input: - set sid, file(tractogram) from wb_for_split_end_in_CGMSWI - - output: - set sid, "${sid}__wb_either_CGM_SWM.trk" into wb_for_extract_all_commissural - set sid, "${sid}__no_CGM_SWM.trk" optional true - file "${sid}__wb_either_CGM_SWM.txt" - file "${sid}__no_CGM_SWM.txt" optional true - - script: - filtering_list=params.FLF+"ee_CGM_SWM.txt" - out_extension="wb_either_CGM_SWM" - remaining_extension="no_CGM_SWM" - basename="${sid}" - keep="$params.keep_intermediate_steps" - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -process Extract_all_commissural { - cpus 1 - - input: - set sid, file(tractogram) from wb_for_extract_all_commissural - - output: - set sid, "${sid}__tmp_CC.trk" into cc_for_extract_CC_Cx, cc_for_extract_AC_Cx, cc_for_extract_CC_BG, cc_tmp_for_commissural - set sid, "${sid}__wb_either_CGM_SWM_noCC.trk" into no_cc_for_split_asso_BG - file "${sid}__wb_either_CGM_SWM_noCC.txt" - file "${sid}__tmp_CC.txt" - - script: - filtering_list=params.FLF+"commissural.txt" - out_extension="wb_either_CGM_SWM_noCC" - remaining_extension="tmp_CC" - basename="${sid}" - keep=true - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - - -process Extract_plausible_CC_Cx { - cpus 1 - - input: - set sid, file(tractogram) from cc_for_extract_CC_Cx - - output: - set sid, "${sid}__in_CC_Cx_f.trk" into cc_for_merge_plausible_01 - file "mask_atlas_roi_*.nii.gz" optional true - file "${sid}__in_CC_Cx_f.txt" - - script: - filtering_list=params.FLF+"CC_Cx.txt" - out_extension="in_CC_Cx_f" - remaining_extension="garbage" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -process Extract_plausible_AC_Cx { - cpus 1 - - input: - set sid, file(tractogram) from cc_for_extract_AC_Cx - - output: - set sid, "${sid}__in_AC_Cx_f.trk" into accx_for_trk_plausible, accx_for_rename, accx_for_commissural - file "${sid}__in_AC_Cx_f.txt" - - script: - filtering_list=params.FLF+"AC_Cx.txt" - out_extension="in_AC_Cx_f" - remaining_extension="garbage" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -process Extract_plausible_CC_BG { - cpus 1 - - input: - set sid, file(tractogram) from cc_for_extract_CC_BG - - output: - set sid, "${sid}__in_CC_BG_f.trk" into ccbg_for_trk_plausible, ccbg_for_commissural - file "${sid}__in_CC_BG_f.txt" - - script: - """ - scil_filter_tractogram.py ${tractogram} tmp.trk \ - --filtering_list ${params.FLF}CC_BG.txt -f\ - --overwrite_distance both_ends include 1\ - --overwrite_distance either_end include 1 - - scil_filter_streamlines_by_length.py tmp.trk\ - ${sid}__in_CC_BG_f.trk\ - --maxL 170 - - scil_count_streamlines.py ${sid}__in_CC_BG_f.trk > ${sid}__in_CC_BG_f.txt - """ -} - -/* -Split not CC in asso BG and not BG -*/ - -process Split_no_CC_Asso_and_BG { - cpus 1 - - input: - set sid, file(tractogram) from no_cc_for_split_asso_BG - - output: - set sid, "${sid}__all_subcortical_from_CGM_SWM_noCC_f.trk" into asso_BG_for_split_Thal, asso_BG_for_split_Put, asso_BG_for_split_Caud - file "${sid}__all_subcortical_from_CGM_SWM_noCC_f.txt" - set sid, "${sid}__asso_noBG.trk" into asso_noBG_for_split_hemi - file "${sid}__asso_noBG.txt" - - script: - filtering_list=params.FLF+"all_BG.txt" - out_extension="all_subcortical_from_CGM_SWM_noCC_f" - remaining_extension="asso_noBG" - basename="${sid}" - keep=true - extract_masks="" - distance=1 - - template "filter_with_list.sh" -} - -bg_list=params.bg_lists?.tokenize(',') -Channel.from(bg_list).into{bg_thal_list; - bg_put_list} -/* -BG THAL -*/ -process Split_BG_Thal { - cpus 1 - - input: - set sid, file(tractogram) from asso_BG_for_split_Thal - each list from bg_thal_list - each side from sides_split_BG_Thal - - output: - set sid, "${sid}__BG_ipsi_Thal_${list}_${side}.trk" into BG_ipsi_Thal_for_merge - set sid, val(side), "${sid}__BG_ipsi_Thal_${list}_${side}.trk" into BG_ipsi_Thal_split_for_merge - set sid, val(side), val(list), "${sid}__BG_ipsi_Thal_${list}_${side}.trk" into BG_ipsi_Thal_for_filter_CuGWM, BG_ipsi_Thal_for_filter_LGWM - file "${sid}__BG_ipsi_Thal_${list}_${side}.txt" - - script: - filtering_list=params.FLF+"BG_ipsi_Thal_${list}_${side}.txt" - out_extension="BG_ipsi_Thal_${list}_${side}" - remaining_extension="garbage_BG_ipsi_Thal_${list}_${side}" - basename="${sid}" - keep=false - extract_masks="" - distance=1 - - template "filter_with_list.sh" -} - -BG_ipsi_Thal_split_for_merge.groupTuple(by:[0,1]).set{BG_ipsi_Thal_for_rename} - -BG_ipsi_Thal_for_filter_CuGWM.filter{it[2]=='CuGWM'}.set{CuGWM_for_combine} -BG_ipsi_Thal_for_filter_LGWM.filter{it[2]=='LGWM'}.set{LGWM_for_combine} -CuGWM_for_combine.concat(LGWM_for_combine).groupTuple(by:[0,1]).set{optic_radiation_for_rename} - -BG_ipsi_Thal_for_merge.groupTuple().map{it}.set{BG_ipsi_Thal_list_for_merge} - -process Merge_BG_Thal{ - cpus 1 - - input: - set sid, file(tractogram) from BG_ipsi_Thal_list_for_merge - - output: - set sid, "${sid}__BG_ipsi_Thal_all.trk" into BG_ipsi_Thal_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__BG_ipsi_Thal_all.trk -f - """ -} - -/* -BG PUT -*/ -process Split_BG_Put { - cpus 1 - - input: - set sid, file(tractogram) from asso_BG_for_split_Put - each list from bg_put_list - each side from sides_split_BG_Put - - output: - set sid, "${sid}__BG_ipsi_Put_${list}_${side}.trk" into BG_ipsi_Put_for_merge - set sid, val(side), "${sid}__BG_ipsi_Put_${list}_${side}.trk" into BG_ipsi_Put_for_rename - file "${sid}__BG_ipsi_Put_${list}_${side}.txt" - - script: - filtering_list=params.FLF+"BG_ipsi_Put_${list}_${side}.txt" - out_extension="BG_ipsi_Put_${list}_${side}" - remaining_extension="garbage_BG_ipsi_Put_${list}_${side}" - basename="${sid}" - keep=false - extract_masks="" - distance=1 - - template "filter_with_list.sh" -} - -BG_ipsi_Put_for_merge.groupTuple().map{it}.set{BG_ipsi_Put_list_for_merge} - -process Merge_BG_Put{ - cpus 1 - - input: - set sid, file(tractogram) from BG_ipsi_Put_list_for_merge - - output: - set sid, "${sid}__BG_ipsi_Put_all.trk" into BG_ipsi_Put_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__BG_ipsi_Put_all.trk -f - """ -} - -/* -BG CAUD -*/ -bg_caud_list=params.bg_caud_lists?.tokenize(',') -process Split_BG_Caud { - cpus 1 - - input: - set sid, file(tractogram) from asso_BG_for_split_Caud - each list from bg_caud_list - each side from sides_split_BG_Caud - - output: - set sid, "${sid}__BG_ipsi_Caud_${list}_${side}.trk" into BG_ipsi_Caud_for_merge - set sid, val(side), "${sid}__BG_ipsi_Caud_${list}_${side}.trk" into BG_ipsi_Caud_for_rename - file "${sid}__BG_ipsi_Caud_${list}_${side}.txt" - - script: - filtering_list=params.FLF+"BG_ipsi_Caud_${list}_${side}.txt" - out_extension="BG_ipsi_Caud_${list}_${side}" - remaining_extension="garbage_BG_ipsi_Caud_${list}_${side}" - basename="${sid}" - keep=false - extract_masks="" - distance=1 - - template "filter_with_list.sh" -} - -BG_ipsi_Caud_for_merge.groupTuple().map{it}.set{BG_ipsi_Caud_list_for_merge} - -process Merge_BG_Caud{ - cpus 1 - - input: - set sid, file(tractogram) from BG_ipsi_Caud_list_for_merge - - output: - set sid, "${sid}__BG_ipsi_Caud_all.trk" into BG_ipsi_Caud_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__BG_ipsi_Caud_all.trk -f - """ -} - -process Split_asso_in_hemi { - cpus 1 - - input: - set sid, file(tractogram) from asso_noBG_for_split_hemi - each side from sides - - output: - set sid, val(side), "${sid}__asso_${side}.trk" into asso_for_extract_u_shape - file "${sid}__asso_${side}.txt" optional true - - script: - """ - scil_filter_tractogram.py ${tractogram} ${sid}__asso_L.trk\ - --filtering_list ${params.FLF}asso_L.txt -f - scil_filter_tractogram.py ${tractogram} ${sid}__asso_R.trk\ - --filtering_list ${params.FLF}asso_R.txt -f - """ -} - -/* -Extracting U-shaped and streamlines restricted to Cortical GM and removing them from asso -*/ - -process Split_ushape_CGM_asso { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_for_extract_u_shape - - output: - set sid, val(side), "${sid}__asso_only_in_CGM_${side}.trk" into assoCGM - set sid, val(side), "${sid}__asso_Ushape_${side}.trk" into assoUShape - set sid, "${sid}__asso_Ushape_${side}_u.trk" into asso_u_shape_for_trk_plausible - set sid, val(side), "${sid}__asso_Ushape_${side}_u.trk" into asso_u_shape_for_rename - - set sid, val(side), "${sid}__asso_f_${side}.trk" into asso_for_remove_long_range - file "${sid}__asso_only_in_CGM_${side}.txt" optional true - file "${sid}__asso_Ushape_${side}.txt" optional true - file "${sid}__asso_f_${side}.txt" optional true - - script: - """ - scil_filter_tractogram.py ${tractogram} ${sid}__tmp1_${side}.trk \ - --filtering_list ${params.FLF}all_in_CGM_${side}.txt -f - - scil_tractogram_math.py difference ${tractogram} ${sid}__tmp1_${side}.trk \ - ${sid}__asso_SWM_${side}.trk -f - - scil_filter_tractogram.py ${sid}__tmp1_${side}.trk ${sid}__asso_only_in_CGM_${side}.trk \ - --filtering_list ${params.FLF}not_in_SWM_${side}.txt -f - - scil_tractogram_math.py difference ${sid}__tmp1_${side}.trk ${sid}__asso_only_in_CGM_${side}.trk \ - ${sid}__tmp2_${side}.trk -f - - scil_filter_tractogram.py ${sid}__tmp2_${side}.trk ${sid}__asso_Ushape_${side}.trk \ - --filtering_list ${params.FLF}not_in_DWM_${side}.txt -f - - scil_extract_ushape.py ${sid}__asso_Ushape_${side}.trk --minU 0.5 --maxU 1 ${sid}__asso_Ushape_${side}_u.trk -f - - scil_tractogram_math.py difference ${sid}__tmp2_${side}.trk ${sid}__asso_Ushape_${side}.trk \ - ${sid}__asso_DWM_${side}.trk -f - - scil_tractogram_math.py union ${sid}__asso_DWM_${side}.trk ${sid}__asso_SWM_${side}.trk ${sid}__asso_f_${side}.trk -f - - if ${params.keep_intermediate_steps} - then - scil_count_streamlines.py ${sid}__asso_only_in_CGM_${side}.trk > ${sid}__asso_only_in_CGM_${side}.txt - scil_count_streamlines.py ${sid}__asso_Ushape_${side}.trk > ${sid}__asso_Ushape_${side}.txt - scil_count_streamlines.py ${sid}__asso_f_${side}.trk > ${sid}__asso_f_${side}.txt - fi - """ -} - -/* -Extracting unplausible long-range association streamlines passing through subcortical structures (Cd, Put, GP, Thal, Amyg) -*/ - -process Remove_Unplausible_Long_Range_Asso { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_for_remove_long_range - - output: - set sid, val(side), "${sid}__asso_all_intra_inter_${side}.trk" into asso_all_intra_inter - set sid, "${sid}__asso_lost2_${side}.trk" optional true - file "${sid}__asso_all_intra_inter_${side}.txt" - file "${sid}__asso_lost2_${side}.txt" optional true - - script: - filtering_list=params.FLF+"not_in_BG.txt" - out_extension="asso_all_intra_inter_${side}" - remaining_extension="asso_lost2_${side}" - basename="${sid}" - keep="$params.keep_intermediate_steps" - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -/* -inCCBG.groupTuple().map{it.flatten().toList()}.set{inCCBG_List} -assoUShape.groupTuple().map{it.flatten().toList()}.set{assoUShape_list} -*/ - -asso_all_intra_inter.into{asso_all_intra_inter_for_ventral_filtering; - asso_all_intra_inter_for_dorsal_f_p_filtering; - asso_all_intra_inter_for_dorsal_f_o_f_t_filtering; - asso_all_intra_inter_for_p_o_filtering; - asso_all_intra_inter_for_p_t_filtering; - asso_all_intra_inter_for_o_t_filtering; - asso_all_intra_inter_for_ins_filtering; - asso_all_intra_inter_for_cing_filtering; - asso_all_intra_inter_for_be_frontal_filtering; - asso_all_intra_inter_for_ee_frontal_filtering; - asso_all_intra_inter_for_be_occipital_filtering; - asso_all_intra_inter_for_ee_occipital_filtering; - asso_all_intra_inter_for_be_parietal_filtering; - asso_all_intra_inter_for_ee_parietal_filtering; - asso_all_intra_inter_for_be_temporal_filtering; - asso_all_intra_inter_for_ee_temporal_filtering - asso_all_intra_inter_plausible} - -asso_all_intra_inter_plausible.groupTuple().map{it.flatten().toList()}.set{asso_all_intra_inter_list} - -cc_for_merge_plausible_01.into{ccCleanedPlausible; CC_for_homotopic} - -assoCGM.groupTuple().map{it.flatten().toList()}.set{assoCGM_list} - - -/* - CC Homotopic -*/ - -cc_homotopic_pairs=params.cc_homotopic_pairs?.tokenize(',') - -process CC_Homotopic { - cpus 1 - - input: - set sid, file(tractogram) from CC_for_homotopic - each pair from cc_homotopic_pairs - - output: - set sid, "${sid}__cc_homotopic_${pair}.trk" into CC_Homotopic_for_merge - set sid, val(pair), "${sid}__cc_homotopic_${pair}.trk" into CC_Homotopic_for_filter_AGWM, CC_Homotopic_for_filter_CingGWM, CC_Homotopic_for_filter_CuGWM, CC_Homotopic_for_filter_FuGWM, CC_Homotopic_for_filter_Hippo, CC_Homotopic_for_filter_IFGWM, CC_Homotopic_for_filter_Ins, CC_Homotopic_for_filter_IOGWM, CC_Homotopic_for_filter_ITGWM, CC_Homotopic_for_filter_LFOGWM, CC_Homotopic_for_filter_LGWM, CC_Homotopic_for_filter_MFGWM, CC_Homotopic_for_filter_MFOGWM, CC_Homotopic_for_filter_MOGWM, CC_Homotopic_for_filter_MTGWM, CC_Homotopic_for_filter_PHG, CC_Homotopic_for_filter_PoCGWM, CC_Homotopic_for_filter_PrCGWM, CC_Homotopic_for_filter_PrCuGWM, CC_Homotopic_for_filter_RGGWM, CC_Homotopic_for_filter_SFGWM, CC_Homotopic_for_filter_SMGWM, CC_Homotopic_for_filter_SOGWM, CC_Homotopic_for_filter_SPGWM, CC_Homotopic_for_filter_STGWM, CC_Homotopic_for_filter_T_pole_gwm - file "${sid}__cc_homotopic_${pair}.txt" - - script: - filtering_list=params.FLF+"CC_homo_${pair}.txt" - out_extension="cc_homotopic_${pair}" - remaining_extension="garbage_${pair}" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -/* -Filter + Concat frontal -*/ -CC_Homotopic_for_filter_IFGWM.filter{it[1]=='IFGWM'}.set{CC_IFGWM_for_combine_frontal} -CC_Homotopic_for_filter_SFGWM.filter{it[1]=='SFGWM'}.set{CC_SFGWM_for_combine_frontal} -CC_Homotopic_for_filter_MFGWM.filter{it[1]=='MFGWM'}.set{CC_MFGWM_for_combine_frontal} -CC_Homotopic_for_filter_MFOGWM.filter{it[1]=='MFOGWM'}.set{CC_MFOGWM_for_combine_frontal} -CC_Homotopic_for_filter_LFOGWM.filter{it[1]=='LFOGWM'}.set{CC_LFOGWM_for_combine_frontal} -CC_Homotopic_for_filter_PrCGWM.filter{it[1]=='PrCGWM'}.set{CC_PrCGWM_for_combine_frontal} -CC_Homotopic_for_filter_RGGWM.filter{it[1]=='RGGWM'}.set{CC_RGGWM_for_combine_frontal} - -CC_IFGWM_for_combine_frontal.concat(CC_SFGWM_for_combine_frontal).concat(CC_MFGWM_for_combine_frontal).concat(CC_MFOGWM_for_combine_frontal).concat(CC_LFOGWM_for_combine_frontal).concat(CC_PrCGWM_for_combine_frontal).concat(CC_RGGWM_for_combine_frontal).groupTuple(by:0).set{CC_Homotopic_frontal_for_rename} - -/* -Filter + Concat occipital -*/ -CC_Homotopic_for_filter_SOGWM.filter{it[1]=='SOGWM'}.set{CC_SOGWM_for_combine_occipital} -CC_Homotopic_for_filter_MOGWM.filter{it[1]=='MOGWM'}.set{CC_MOGWM_for_combine_occipital} -CC_Homotopic_for_filter_IOGWM.filter{it[1]=='IOGWM'}.set{CC_IOGWM_for_combine_occipital} -CC_Homotopic_for_filter_CuGWM.filter{it[1]=='CuGWM'}.set{CC_CuGWM_for_combine_occipital} -CC_Homotopic_for_filter_LGWM.filter{it[1]=='LGWM'}.set{CC_LGWM_for_combine_occipital} - -CC_SOGWM_for_combine_occipital.concat(CC_MOGWM_for_combine_occipital).concat(CC_IOGWM_for_combine_occipital).concat(CC_CuGWM_for_combine_occipital).concat(CC_LGWM_for_combine_occipital).groupTuple(by:0).set{CC_Homotopic_occipital_for_rename} - -/* -Filter + Concat temporal -*/ -CC_Homotopic_for_filter_STGWM.filter{it[1]=='STGWM'}.set{CC_STGWM_for_combine_temporal} -CC_Homotopic_for_filter_T_pole_gwm.filter{it[1]=='T_pole_gwm'}.set{CC_T_pole_gwm_for_combine_temporal} -CC_Homotopic_for_filter_MTGWM.filter{it[1]=='MTGWM'}.set{CC_MTGWM_for_combine_temporal} -CC_Homotopic_for_filter_ITGWM.filter{it[1]=='ITGWM'}.set{CC_ITGWM_for_combine_temporal} -CC_Homotopic_for_filter_PHG.filter{it[1]=='PHG'}.set{CC_PHG_for_combine_temporal} -CC_Homotopic_for_filter_Hippo.filter{it[1]=='Hippo'}.set{CC_Hippo_for_combine_temporal} -CC_Homotopic_for_filter_FuGWM.filter{it[1]=='FuGWM'}.set{CC_FuGWM_for_combine_temporal} - -CC_STGWM_for_combine_temporal.concat(CC_T_pole_gwm_for_combine_temporal).concat(CC_MTGWM_for_combine_temporal).concat(CC_ITGWM_for_combine_temporal).concat(CC_PHG_for_combine_temporal).concat(CC_Hippo_for_combine_temporal).concat(CC_FuGWM_for_combine_temporal).groupTuple(by:0).set{CC_Homotopic_temporal_for_rename} - -/* -Filter + Concat parietal -*/ -CC_Homotopic_for_filter_SPGWM.filter{it[1]=='SPGWM'}.set{CC_SPGWM_for_combine_parietal} -CC_Homotopic_for_filter_SMGWM.filter{it[1]=='SMGWM'}.set{CC_SMGWM_for_combine_parietal} -CC_Homotopic_for_filter_PrCuGWM.filter{it[1]=='PrCuGWM'}.set{CC_PrCuGWM_for_combine_parietal} -CC_Homotopic_for_filter_PoCGWM.filter{it[1]=='PoCGWM'}.set{CC_PoCGWM_for_combine_parietal} -CC_Homotopic_for_filter_AGWM.filter{it[1]=='AGWM'}.set{CC_AGWM_for_combine_parietal} - -CC_SPGWM_for_combine_parietal.concat(CC_SMGWM_for_combine_parietal).concat(CC_PrCuGWM_for_combine_parietal).concat(CC_PoCGWM_for_combine_parietal).concat(CC_AGWM_for_combine_parietal).groupTuple(by:0).set{CC_Homotopic_parietal_for_rename} - - -/* -Filter CC Cingulum -*/ -CC_Homotopic_for_filter_CingGWM.filter{it[1]=='CingGWM'}.set{CC_Homotopic_cingulum_for_rename} - -/* -Filter CC Ins -*/ -CC_Homotopic_for_filter_Ins.filter{it[1]=='Ins'}.set{CC_Homotopic_insular_for_rename} - - -/* -MERGE CC_Homotopic -*/ -CC_Homotopic_for_merge.groupTuple().map{it}.set{CC_Homotopic_list_for_merge} - -process CC_Homotopic_merge { - cpus 1 - -input: - set sid, file(tractogram) from CC_Homotopic_list_for_merge - -output: - set sid, "${sid}__CC_homo.trk" into CC_homo_for_trk_plausible, CC_homo_for_renaming, cc_homo_for_commissural - -script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__CC_homo.trk - """ -} - -/* - COMMISSURAL -*/ - -cc_tmp_for_commissural.join(accx_for_commissural).join(ccbg_for_commissural).join(cc_homo_for_commissural).set{all_cc_for_commissural} - -process CC_all_commissural { - cpus 1 - - input: - set sid, file(tmp_cc), file(accx), file(ccbg), file(cc_homo) from all_cc_for_commissural - - output: - set sid, "${sid}__plausible_commissural_${params.template_space}.trk" into plausible_commissural_for_register_to_orig - file "${sid}__unplausible_commissural.trk" optional true - - script: - """ - scil_tractogram_math.py union ${accx} ${ccbg} ${cc_homo} ${sid}__plausible_commissural_${params.template_space}.trk -f - - if ${params.keep_intermediate_steps} - then - scil_tractogram_math.py difference ${tmp_cc} ${sid}__plausible_commissural_${params.template_space}.trk ${sid}__unplausible_commissural.trk -f - fi - """ -} - -/* - ASSO VENTRAL -*/ - -asso_ventral_lists=params.asso_ventral_lists?.tokenize(',') - -process Asso_ventral { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_for_ventral_filtering - each asso_list from asso_ventral_lists - - output: - set sid, val(side), "${sid}__asso_F_${asso_list}_ventral_f_${side}.trk" into asso_all_intra_inter_ventral_for_merge - file "${sid}__asso_F_${asso_list}_ventral_f_${side}.txt" - - script: - filtering_list=params.FLF+"ASSO_F_${asso_list}_ventral_${side}.txt" - out_extension="asso_F_${asso_list}_ventral_f_${side}" - remaining_extension="asso_lost_${side}" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -asso_all_intra_inter_ventral_for_merge.groupTuple(by:[0,1]).map{it.flatten().toList()}.set{asso_all_intra_inter_ventral_all_for_merge} - -process Merge_asso_ventral { - cpus 1 - - input: - set sid, val(side), file(trk01), file(trk02), file(trk03) from asso_all_intra_inter_ventral_all_for_merge - - output: - set sid, "${sid}__asso_all_ventral_f_${side}.trk" into asso_all_ventral_for_trk_plausible - set sid, val(side), "${sid}__asso_all_ventral_f_${side}.trk" into asso_all_ventral_for_split_ifof_uf - - script: - """ - scil_tractogram_math.py union ${trk01} ${trk02} ${trk03} ${sid}__asso_all_ventral_f_${side}.trk -f - """ -} - -process Split_asso_ventral_ifof_uf { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_ventral_for_split_ifof_uf - - output: - set sid, val(side), "${sid}__asso_IFOF_f_${side}.trk" into asso_IFOF_for_rename - set sid, val(side), "${sid}__asso_UF_f_${side}.trk" into asso_UF_for_rename - file "${sid}__asso_IFOF_f_${side}.txt" - file "${sid}__asso_UF_f_${side}.txt" - - script: - filtering_list=params.FLF+"split_IFOF_UF_${side}.txt" - out_extension="asso_IFOF_f_${side}" - remaining_extension="asso_UF_f_${side}" - basename="${sid}" - keep=true - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -/* - ASSO DORSAL -*/ - -asso_dorsal_f_p_lists=params.asso_dorsal_f_p_lists?.tokenize(',') - -process Asso_dorsal_f_p { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_for_dorsal_f_p_filtering - each asso_list from asso_dorsal_f_p_lists - - output: - set sid, val(side), "${sid}__asso_${asso_list}_${side}.trk" into asso_all_intra_inter_dorsal_f_p_for_merge - set sid, val(side), val(asso_list), "${sid}__asso_${asso_list}_${side}.trk" into asso_all_intra_inter_dorsal_f_p_for_rename - file "${sid}__asso_${asso_list}_${side}.txt" - - script: - filtering_list=params.FLF+"ASSO_${asso_list}_${side}.txt" - out_extension="asso_${asso_list}_${side}" - remaining_extension="asso_lost_${asso_list}_${side}" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -asso_all_intra_inter_dorsal_f_p_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_all_intra_inter_dorsal_f_p_list_for_merge} - -process Merge_asso_dorsal_f_p { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_dorsal_f_p_list_for_merge - - output: - set sid, val(side), "${sid}__asso_F_P_dorsal_f_${side}.trk" into asso_all_intra_inter_dorsal_all_f_p_for_merge - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__asso_F_P_dorsal_f_${side}.trk -f - """ -} - -asso_dorsal_f_o_f_t_list=params.asso_dorsal_f_o_f_t_lists?.tokenize(',') - -process Asso_dorsal_f_o_f_t { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_for_dorsal_f_o_f_t_filtering - each asso_list from asso_dorsal_f_o_f_t_list - - output: - set sid, val(side), "${sid}__asso_${asso_list}_${side}.trk" into asso_all_intra_inter_dorsal_all_f_o_f_t_for_merge - set sid, val(side), val(asso_list), "${sid}__asso_${asso_list}_${side}.trk" into asso_all_intra_inter_dorsal_all_f_T_for_filter, asso_all_intra_inter_dorsal_all_f_O_for_filter - file "${sid}__asso_${asso_list}_${side}.txt" - - script: - filtering_list=params.FLF+"ASSO_${asso_list}_${side}.txt" - out_extension="asso_${asso_list}_${side}" - remaining_extension="asso_lost_${asso_list}_${side}" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -asso_all_intra_inter_dorsal_all_f_T_for_filter.filter{it[2]=='F_T_dorsal'}.set{asso_all_intra_inter_dorsal_all_f_T_for_rename} -asso_all_intra_inter_dorsal_all_f_O_for_filter.filter{it[2]=='F_O_dorsal'}.set{asso_all_intra_inter_dorsal_all_f_O_for_rename} - -asso_all_intra_inter_dorsal_all_f_p_for_merge.groupTuple(by:[0,1]).join(asso_all_intra_inter_dorsal_all_f_o_f_t_for_merge.groupTuple(by:[0,1]), by:[0,1]).map{it.flatten().toList()}.set{asso_all_intra_inter_dorsal_all_for_merge} - -process Merge_asso_dorsal { - cpus 1 - - input: - set sid, val(side), file(trk01), file(trk02), file(trk03) from asso_all_intra_inter_dorsal_all_for_merge - - output: - set sid, "${sid}__asso_all_dorsal_f_${side}.trk" into asso_all_dorsal_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${trk01} ${trk02} ${trk03} ${sid}__asso_all_dorsal_f_${side}.trk -f - """ -} - -/* - ASSO P_O -*/ - -asso_p_o_list=params.asso_p_o_lists?.tokenize(',') - -process Asso_p_o { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_for_p_o_filtering - each asso_list from asso_p_o_list - - output: - set sid, val(side), "${sid}__asso_${asso_list}_${side}.trk" into asso_intra_inter_p_o_for_merge - file "${sid}__asso_${asso_list}_${side}.txt" - - script: - filtering_list=params.FLF+"ASSO_${asso_list}_${side}.txt" - out_extension="asso_${asso_list}_${side}" - remaining_extension="asso_lost_${asso_list}_${side}" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -asso_intra_inter_p_o_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_intra_inter_p_o_list_for_merge} - -process Merge_p_o { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_intra_inter_p_o_list_for_merge - - output: - set sid, "${sid}__asso_all_P_O_f_${side}.trk" into all_P_O_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__asso_all_P_O_f_${side}.trk -f - """ -} - -/* - ASSO P_T -*/ - -asso_p_t_list=params.asso_p_t_lists?.tokenize(',') - -process Asso_p_t { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_for_p_t_filtering - each asso_list from asso_p_t_list - - output: - set sid, val(side), "${sid}__asso_${asso_list}_${side}.trk" into asso_intra_inter_p_t_for_merge - file "${sid}__asso_${asso_list}_${side}.txt" - - script: - filtering_list=params.FLF+"ASSO_${asso_list}_${side}.txt" - out_extension="asso_${asso_list}_${side}" - remaining_extension="asso_lost_${asso_list}_${side}" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -asso_intra_inter_p_t_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_intra_inter_p_t_list_for_merge} - -process Merge_p_t { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_intra_inter_p_t_list_for_merge - - output: - set sid, "${sid}__asso_all_P_T_f_${side}.trk" into all_P_T_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__asso_all_P_T_f_${side}.trk -f - """ -} - -/* - ASSO O_T -*/ - -asso_o_t_list=params.asso_o_t_lists?.tokenize(',') - -process Asso_o_t { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_for_o_t_filtering - each asso_list from asso_o_t_list - - output: - set sid, val(side), "${sid}__asso_${asso_list}_${side}.trk" into asso_intra_inter_o_t_for_merge - file "${sid}__asso_${asso_list}_${side}.txt" - - script: - filtering_list=params.FLF+"ASSO_${asso_list}_${side}.txt" - out_extension="asso_${asso_list}_${side}" - remaining_extension="asso_lost_${asso_list}_${side}" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -asso_intra_inter_o_t_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_intra_inter_o_t_list_for_merge} - -process Merge_o_t { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_intra_inter_o_t_list_for_merge - - output: - set sid, "${sid}__asso_all_O_T_f_${side}.trk" into all_O_T_for_trk_plausible - set sid, val(side), "${sid}__asso_all_O_T_f_${side}.trk" into all_O_T_for_rename - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__asso_all_O_T_f_${side}.trk -f - """ -} - -/* - ASSO Ins -*/ - -asso_ins_list=params.asso_ins_lists?.tokenize(',') - -process Asso_ins { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_for_ins_filtering - each asso_list from asso_ins_list - - output: - set sid, val(side), "${sid}__asso_${asso_list}_${side}.trk" into asso_intra_inter_ins_for_merge - file "${sid}__asso_${asso_list}_${side}.txt" - - script: - filtering_list=params.FLF+"ASSO_${asso_list}_${side}.txt" - out_extension="asso_${asso_list}_${side}" - remaining_extension="asso_lost_${asso_list}_${side}" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -asso_intra_inter_ins_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_intra_inter_ins_list_for_merge} - -process Merge_ins { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_intra_inter_ins_list_for_merge - - output: - set sid, "${sid}__asso_all_Ins_f_${side}.trk" into Ins_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__asso_all_Ins_f_${side}.trk -f - """ -} - -/* - ASSO CING -*/ - -process Asso_Cing { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_for_cing_filtering - - output: - set sid, "${sid}__asso_all_Cing_${side}.trk" into Cing_for_trk_plausible - set sid, val(side), "${sid}__asso_all_Cing_${side}.trk" into Cing_for_rename - file "${sid}__asso_all_Cing_${side}.txt" - - script: - filtering_list=params.FLF+"ASSO_Cing_${side}.txt" - out_extension="asso_all_Cing_${side}" - remaining_extension="asso_lost_Cing_${side}" - basename="${sid}" - keep=false - extract_masks="" - distance=1 - - template "filter_with_list.sh" -} - -/* - BE ASSO FRONTAL: extracting all streamlines with both ends in a frontal gyrus (U-shape > 20 mm) -*/ - -asso_frontal_be_list=params.asso_frontal_be_lists?.tokenize(',') -process Asso_be_frontal_gyrus { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_for_be_frontal_filtering - each gyrus from asso_frontal_be_list - - output: - set sid, val(side), val(gyrus), "${sid}_asso_intra_be_frontal_${gyrus}_${side}_u.trk" into asso_frontal_be_for_merge - - script: - """ - scil_filter_tractogram.py ${tractogram} tmp.trk\ - --filtering_list ${params.FLF}ASSO_be_${gyrus}_${side}.txt -f - scil_extract_ushape.py tmp.trk --minU 0.5 --maxU 1\ - ${sid}_asso_intra_be_frontal_${gyrus}_${side}_u.trk -f - """ -} - -asso_frontal_be_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_frontal_be_list_for_merge} -process Merge_asso_be_frontal_gyrus{ - cpus 1 - - input: - set sid, val(side), val(gyrus), file(tractogram) from asso_frontal_be_list_for_merge - - output: - set sid, "${sid}_asso_all_intraF_be_f_${side}_u.trk" into asso_all_intraF_be_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram}\ - ${sid}_asso_all_intraF_be_f_${side}_u.trk -f - """ -} - -/* - EE ASSO FRONTAL: extracting all streamlines with either ends in a frontal gyrus (U-shape > 20 mm) -*/ - -asso_frontal_ee_list = Channel.from(['SFG_MFG', 70], - ['SFG_IFG', 70], - ['SFG_PrCG', 90], - ['SFG_FrOrbG', 70], - ['MFG_IFG', 70], - ['MFG_PrCG', 110], - ['MFG_FrOrbG', 60], - ['IFG_PrCG', 110], - ['IFG_FrOrbG', 60]) -asso_all_intra_inter_for_ee_frontal_filtering.combine(asso_frontal_ee_list).set{asso_frontal_ee_for_extract} -process Asso_ee_frontal_gyrus { - cpus 1 - - input: - set sid, val(side), file(tractogram), val(gyrus), val(max_length) from asso_frontal_ee_for_extract - - output: - set sid, val(side), val(gyrus), "${sid}_asso_intra_ee_frontal_${gyrus}_${side}.trk" into asso_frontal_ee_for_merge - - script: - """ - scil_filter_tractogram.py ${tractogram} tmp_01.trk\ - --filtering_list ${params.FLF}ASSO_ee_${gyrus}_${side}.txt -f - scil_filter_streamlines_by_length.py tmp_01.trk tmp_02.trk\ - --maxL ${max_length} -f - scil_extract_ushape.py tmp_02.trk\ - --minU 0.5\ - --maxU 1\ - ${sid}_asso_intra_ee_frontal_${gyrus}_${side}.trk -f - """ -} - -asso_frontal_ee_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_frontal_ee_list_for_merge} -process Merge_asso_ee_frontal_gyrus{ - cpus 1 - - input: - set sid, val(side), val(gyrus), file(tractogram) from asso_frontal_ee_list_for_merge - - output: - set sid, "${sid}_asso_all_intraF_ee_f_${side}_u.trk" into asso_all_intraF_ee_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}_asso_all_intraF_ee_f_${side}_u.trk -f - """ -} - -/* - BE ASSO OCCIPITAL: extracting all streamlines with both ends in a occipital gyrus (U-shape > 20 mm) -*/ - -asso_occipital_be_list=params.asso_occipital_be_lists?.tokenize(',') -process Asso_be_occipital_gyrus { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_for_be_occipital_filtering - each gyrus from asso_occipital_be_list - - output: - set sid, val(side), val(gyrus), "${sid}_asso_intra_be_occipital_${gyrus}_${side}_u.trk" into asso_occipital_be_for_merge - - script: - """ - scil_filter_tractogram.py ${tractogram} tmp.trk \ - --filtering_list ${params.FLF}ASSO_be_${gyrus}_${side}.txt -f - scil_extract_ushape.py tmp.trk\ - --minU 0.5\ - --maxU 1\ - ${sid}_asso_intra_be_occipital_${gyrus}_${side}_u.trk -f - """ -} - -asso_occipital_be_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_occipital_be_list_for_merge} -process Merge_asso_be_occipital_gyrus{ - cpus 1 - - input: - set sid, val(side), val(gyrus), file(tractogram) from asso_occipital_be_list_for_merge - - output: - set sid, "${sid}_asso_all_intraO_be_f_${side}_u.trk" into asso_all_intraO_be_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}_asso_all_intraO_be_f_${side}_u.trk -f - """ -} - -/* - EE ASSO OCCIPITAL: extracting all streamlines with either ends in a occipital gyrus (U-shape > 20 mm) -*/ - -asso_occipital_ee_list = Channel.from(['MOG_SOG', 60],['MOG_IOG', 50], ['MOG_CuG', 60], ['SOG_CuG', 30], ['CuG_LG', 60]) -asso_all_intra_inter_for_ee_occipital_filtering.combine(asso_occipital_ee_list).set{asso_occipital_ee_for_extract} -process Asso_ee_occipital_gyrus { - cpus 1 - - input: - set sid, val(side), file(tractogram), val(gyrus), val(max_length) from asso_occipital_ee_for_extract - - output: - set sid, val(side), val(gyrus), "${sid}_asso_intra_ee_occipital_${gyrus}_${side}.trk" into asso_occipital_ee_for_merge - - script: - """ - scil_filter_tractogram.py ${tractogram} tmp_01.trk\ - --filtering_list ${params.FLF}ASSO_ee_${gyrus}_${side}.txt -f - scil_filter_streamlines_by_length.py tmp_01.trk tmp_02.trk\ - --maxL ${max_length} -f - scil_extract_ushape.py tmp_02.trk\ - --minU 0.5\ - --maxU 1\ - ${sid}_asso_intra_ee_occipital_${gyrus}_${side}.trk -f - """ -} - -asso_occipital_ee_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_occipital_ee_list_for_merge} -process Merge_asso_ee_occipital_gyrus{ - cpus 1 - - input: - set sid, val(side), val(gyrus), file(tractogram) from asso_occipital_ee_list_for_merge - - output: - set sid, "${sid}_asso_all_intraO_ee_f_${side}_u.trk" into asso_all_intraO_ee_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}_asso_all_intraO_ee_f_${side}_u.trk -f - """ -} - -/* - BE ASSO PARIETAL: extracting all streamlines with both ends in a parietal gyrus (U-shape > 20 mm) -*/ - -asso_parietal_be_list=params.asso_parietal_be_lists?.tokenize(',') -process Asso_be_parietal_gyrus { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_for_be_parietal_filtering - each gyrus from asso_parietal_be_list - - output: - set sid, val(side), val(gyrus), "${sid}_asso_intra_be_parietal_${gyrus}_${side}_u.trk" into asso_parietal_be_for_merge - - script: - """ - scil_filter_tractogram.py ${tractogram} tmp.trk\ - --filtering_list ${params.FLF}ASSO_be_${gyrus}_${side}.txt -f - scil_extract_ushape.py tmp.trk\ - --minU 0.5\ - --maxU 1\ - ${sid}_asso_intra_be_parietal_${gyrus}_${side}_u.trk -f - """ -} - -asso_parietal_be_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_parietal_be_list_for_merge} -process Merge_asso_be_parietal_gyrus{ - cpus 1 - - input: - set sid, val(side), val(gyrus), file(tractogram) from asso_parietal_be_list_for_merge - - output: - set sid, "${sid}_asso_all_intraP_be_f_${side}_u.trk" into asso_all_intraP_be_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}_asso_all_intraP_be_f_${side}_u.trk -f - """ -} - -/* - EE ASSO PARIETAL: extracting all streamlines with either ends in a parietal gyrus (U-shape > 20 mm) -*/ - -asso_parietal_ee_list = Channel.from(['SPG_PoCG', 50], ['SPG_AG', 80], ['SPG_SMG', 70], ['SPG_PrCuG', 50], ['AG_PoCG', 10000], ['AG_SMG', 90], ['AG_PrCuG', 90] , ['SMG_PoCG', 60], ['SMG_PrCuG',100], ['PoCG_PrCuG', 80]) -asso_all_intra_inter_for_ee_parietal_filtering.combine(asso_parietal_ee_list).set{asso_parietal_ee_for_extract} -process Asso_ee_parietal_gyrus { - cpus 1 - - input: - set sid, val(side), file(tractogram), val(gyrus), val(max_length) from asso_parietal_ee_for_extract - - output: - set sid, val(side), val(gyrus), "${sid}_asso_intra_ee_parietal_${gyrus}_${side}.trk" into asso_parietal_ee_for_merge - - script: - """ - scil_filter_tractogram.py ${tractogram} tmp_01.trk\ - --filtering_list ${params.FLF}ASSO_ee_${gyrus}_${side}.txt -f - scil_filter_streamlines_by_length.py tmp_01.trk tmp_02.trk\ - --maxL ${max_length} -f - scil_extract_ushape.py tmp_02.trk\ - --minU 0.5\ - --maxU 1\ - ${sid}_asso_intra_ee_parietal_${gyrus}_${side}.trk -f - """ -} - -asso_parietal_ee_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_parietal_ee_list_for_merge} -process Merge_asso_ee_parietal_gyrus{ - cpus 1 - - input: - set sid, val(side), val(gyrus), file(tractogram) from asso_parietal_ee_list_for_merge - - output: - set sid, "${sid}_asso_all_intraP_ee_f_${side}.trk" into asso_all_intraP_ee_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}_asso_all_intraP_ee_f_${side}.trk -f - """ -} - -/* - BE ASSO TEMPORAL: extracting all streamlines with both ends in a temporal gyrus and merge (U-shape > 20 mm) -*/ - -asso_temporal_be_list=params.asso_temporal_be_lists?.tokenize(',') -process Asso_be_temporal_gyrus { - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_all_intra_inter_for_be_temporal_filtering - each gyrus from asso_temporal_be_list - - output: - set sid, val(side), val(gyrus), "${sid}_asso_intra_be_temporal_${gyrus}_${side}_u.trk" into asso_temporal_be_for_merge - - script: - """ - scil_filter_tractogram.py ${tractogram} tmp.trk\ - --filtering_list ${params.FLF}ASSO_be_${gyrus}_${side}.txt -f - scil_extract_ushape.py tmp.trk\ - --minU 0.5\ - --maxU 1\ - ${sid}_asso_intra_be_temporal_${gyrus}_${side}_u.trk -f - """ -} - -asso_temporal_be_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_temporal_be_list_for_merge} -process Merge_asso_be_temporal_gyrus{ - cpus 1 - - input: - set sid, val(side), val(gyrus), file(tractogram) from asso_temporal_be_list_for_merge - - output: - set sid, "${sid}_asso_all_intraT_be_f_${side}_u.trk" into asso_all_intraT_be_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}_asso_all_intraT_be_f_${side}_u.trk -f - """ -} - -/* - EE ASSO TEMPORAL: extracting all streamlines with either ends in a temporal gyrus and merge (U-shape > 20 mm) -*/ - -asso_temporal_ee_list = Channel.from(['STG_MTG', 60], ['STG_ITG',80], ['STG_Tpole',110], ['MTG_ITG',60], ['MTG_Tpole', 100000], ['ITG_Tpole', 60]) -asso_all_intra_inter_for_ee_temporal_filtering.combine(asso_temporal_ee_list).set{asso_temporal_ee_for_extract} -process Asso_ee_temporal_gyrus { - cpus 1 - - input: - set sid, val(side), file(tractogram), val(gyrus), val(max_length) from asso_temporal_ee_for_extract - - output: - set sid, val(side), val(gyrus), "${sid}_asso_intra_ee_temporal_${gyrus}_${side}.trk" into asso_temporal_ee_for_merge - - script: - """ - scil_filter_tractogram.py ${tractogram} tmp_01.trk\ - --filtering_list ${params.FLF}ASSO_ee_${gyrus}_${side}.txt -f - scil_filter_streamlines_by_length.py tmp_01.trk tmp_02.trk\ - --maxL ${max_length} -f - scil_extract_ushape.py tmp_02.trk\ - --minU 0.5\ - --maxU 1\ - ${sid}_asso_intra_ee_temporal_${gyrus}_${side}.trk -f - """ -} - -asso_temporal_ee_for_merge.groupTuple(by:[0,1]).map{it}.set{asso_temporal_ee_list_for_merge} -process Merge_asso_ee_temporal_gyrus{ - cpus 1 - - input: - set sid, val(side), val(gyrus), file(tractogram) from asso_temporal_ee_list_for_merge - - output: - set sid, "${sid}_asso_all_intraT_ee_f_${side}.trk" into asso_all_intraT_ee_for_trk_plausible - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}_asso_all_intraT_ee_f_${side}.trk -f - """ -} - -fornix_for_trk_plausible.concat(cerebellum_for_trk_plausible,brainstem_for_trk_plausible,BG_ipsi_Thal_for_trk_plausible,BG_ipsi_Put_for_trk_plausible,BG_ipsi_Caud_for_trk_plausible,asso_u_shape_for_trk_plausible,CC_homo_for_trk_plausible,asso_all_dorsal_for_trk_plausible,asso_all_ventral_for_trk_plausible,all_P_O_for_trk_plausible,all_P_T_for_trk_plausible,all_O_T_for_trk_plausible,Ins_for_trk_plausible,Cing_for_trk_plausible,asso_all_intraF_be_for_trk_plausible,asso_all_intraF_ee_for_trk_plausible,asso_all_intraP_be_for_trk_plausible,asso_all_intraP_ee_for_trk_plausible,asso_all_intraO_be_for_trk_plausible,asso_all_intraO_ee_for_trk_plausible,asso_all_intraT_be_for_trk_plausible,asso_all_intraT_ee_for_trk_plausible, accx_for_trk_plausible, ccbg_for_trk_plausible).groupTuple(by: 0).set{merge_trk_plausible} - -process Merge_trk_plausible{ - publishDir = params.final_output_mni_space - cpus 1 - - input: - set sid, file(tractogram) from merge_trk_plausible - - output: - set sid, "${sid}__plausible_${params.template_space}.trk" into plausible_for_extract_unplausible, trk_plausible_for_register_plausible_to_orig - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__plausible_${params.template_space}_tmp.trk -f --no_metadata - scil_shuffle_streamlines.py ${sid}__plausible_${params.template_space}_tmp.trk ${sid}__plausible_${params.template_space}.trk -f - """ -} - -trk_for_extract_unplausible.join(plausible_for_extract_unplausible).set{for_trk_unplausible} - -process Extract_trk_unplausible{ - publishDir = params.final_output_mni_space - cpus 1 - - input: - set sid, file(trk01), file(trk02) from for_trk_unplausible - output: - set sid, "${sid}__unplausible_${params.template_space}.trk" into trk_unplausible_for_register_to_orig - - script: - """ - scil_tractogram_math.py difference ${trk01} ${trk02} ${sid}__unplausible_${params.template_space}.trk -f - """ -} - -/* -RENAME CC CC_Homotopic -*/ -process Rename_cc_homotopic { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, val(list), file(trk01) from CC_Homotopic_frontal_for_rename - set sid, val(list), file(trk02) from CC_Homotopic_occipital_for_rename - set sid, val(list), file(trk03) from CC_Homotopic_temporal_for_rename - set sid, val(list), file(trk04) from CC_Homotopic_parietal_for_rename - set sid, val(list), file(trk05) from CC_Homotopic_insular_for_rename - set sid, val(list), file(trk06) from CC_Homotopic_cingulum_for_rename - output: - set sid, "${sid}__cc_homotopic_frontal_${params.template_space}.trk" into cc_homotopic_frontal_for_register_to_orig - set sid, "${sid}__cc_homotopic_occipital_${params.template_space}.trk" into cc_homotopic_occipital_for_register_to_orig - set sid, "${sid}__cc_homotopic_temporal_${params.template_space}.trk" into cc_homotopic_temporal_for_register_to_orig - set sid, "${sid}__cc_homotopic_parietal_${params.template_space}.trk" into cc_homotopic_parietal_for_register_to_orig - set sid, "${sid}__cc_homotopic_insular_${params.template_space}.trk" into cc_homotopic_insular_for_register_to_orig - set sid, "${sid}__cc_homotopic_cingulum_${params.template_space}.trk" into cc_homotopic_cingulum_for_register_to_orig - - when: - params.extended - - script: - """ - scil_tractogram_math.py union ${trk01} "${sid}__cc_homotopic_frontal_${params.template_space}.trk" -f - scil_tractogram_math.py union ${trk02} "${sid}__cc_homotopic_occipital_${params.template_space}.trk" -f - scil_tractogram_math.py union ${trk03} "${sid}__cc_homotopic_temporal_${params.template_space}.trk" -f - scil_tractogram_math.py union ${trk04} "${sid}__cc_homotopic_parietal_${params.template_space}.trk" -f - cp ${trk05} ${sid}__cc_homotopic_insular_${params.template_space}.trk -f - cp ${trk06} ${sid}__cc_homotopic_cingulum_${params.template_space}.trk -f - """ -} - -/* -RENAME CORTICO_STRIATE -*/ -BG_ipsi_Caud_for_rename.concat(BG_ipsi_Put_for_rename).groupTuple(by:[0,1]).set{corticostriate_for_rename} -process Rename_cortico_striate { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, val(side), file(tractogram) from corticostriate_for_rename - - output: - set sid, "${sid}__corticostriatal_${side}_${params.template_space}.trk" into corticostriatal_for_register_to_orig - - when: - params.extended - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__corticostriatal_${side}_${params.template_space}.trk -f - """ -} - -/* -RENAME Corona radiata -*/ -process Rename_coronaradiata { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, val(side), file(tractogram) from BG_ipsi_Thal_for_rename - - output: - set sid, "${sid}__coronaradiata_${side}_${params.template_space}.trk" into coronaradiata_for_register_to_orig - - when: - params.extended - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__coronaradiata_${side}_${params.template_space}.trk -f - """ -} - -/* -RENAME OPTICAL RADIATION -*/ -process Rename_optical_radiation { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, val(side), val(list), file(tractogram) from optic_radiation_for_rename - - output: - set sid, "${sid}__optical_radiation_${side}_${params.template_space}.trk" into optical_radiation_for_register_to_orig - - when: - params.extended - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__optical_radiation_${side}_${params.template_space}.trk -f - """ -} - -/* -RENAME U SHAPE -*/ -process Rename_ushape { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_u_shape_for_rename - - output: - set sid, "${sid}__ushape_${side}_${params.template_space}.trk" into ushape_for_register_to_orig - - when: - params.extended - - script: - """ - cp ${tractogram} ${sid}__ushape_${side}_${params.template_space}.trk - """ -} - -/* -RENAME CING -*/ -process Rename_cing { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, val(side), file(tractogram) from Cing_for_rename - - output: - set sid, "${sid}__cing_${side}_${params.template_space}.trk" into cing_for_register_to_orig - - when: - params.extended - - script: - """ - cp ${tractogram} ${sid}__cing_${side}_${params.template_space}.trk - """ -} - -/* -RENAME SLF -*/ -asso_all_intra_inter_dorsal_all_f_O_for_rename.concat(asso_all_intra_inter_dorsal_f_p_for_rename).groupTuple(by:[0,1]).set{slf_for_rename} -process Rename_slf { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, val(side), val(list), file(tractogram) from slf_for_rename - - output: - set sid, "${sid}__slf_${side}_${params.template_space}.trk" into slf_for_register_to_orig - - when: - params.extended - - script: - """ - scil_tractogram_math.py union ${tractogram} ${sid}__slf_${side}_${params.template_space}.trk -f - """ -} - -/* -RENAME AF -*/ -process Rename_af { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, val(side), asso_list, file(tractogram) from asso_all_intra_inter_dorsal_all_f_T_for_rename - - output: - set sid, "${sid}__af_${side}_${params.template_space}.trk" into af_for_register_to_orig - - when: - params.extended - - script: - """ - cp ${tractogram} ${sid}__af_${side}_${params.template_space}.trk -f - """ -} - -/* -RENAME Cortico-pontine_F -*/ -process Rename_corticopontine_F { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, file(tractogram) from brainstem_corticopontine_frontal_for_rename - each side from side_corticopontineF - - output: - set sid, "${sid}__corticopontine_frontal_${side}_${params.template_space}.trk" into corticopontine_frontal_for_register_to_orig - file "${sid}__corticopontine_frontal_${side}_${params.template_space}.txt" - - when: - params.extended - - script: - filtering_list=params.FLF+"frontal_${side}.txt" - out_extension="corticopontine_frontal_${side}_${params.template_space}" - remaining_extension="lost" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -/* -RENAME cortico-pontine_POT -*/ -process Rename_corticopontine_POT { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, file(tractogram) from brainstem_ee_corticopontine_parietotemporooccipital_for_rename - each side from side_corticopontinePOT - - output: - set sid, "${sid}__corticopontine_POT_${side}_${params.template_space}.trk" into corticopontine_POT_for_register_to_orig - file "${sid}__corticopontine_POT_${side}_${params.template_space}.txt" - - when: - params.extended - - script: - filtering_list=params.FLF+"parieto_temporo_occipital_${side}.txt" - out_extension="corticopontine_POT_${side}_${params.template_space}" - remaining_extension="lost" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -/* -RENAME Pyramidal tract (CST) -*/ -process Rename_cst { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, file(tractogram) from brainstem_pyramidal_for_rename - each side from side_cst - - output: - set sid, "${sid}__cst_${side}_${params.template_space}.trk" into cst_for_register_to_orig - file "${sid}__cst_${side}_${params.template_space}.txt" - - when: - params.extended - - script: - filtering_list=params.FLF+"fronto_parietal_${side}.txt" - out_extension="cst_${side}_${params.template_space}" - remaining_extension="lost" - basename="${sid}" - keep=false - extract_masks="" - distance="$params.distance" - - template "filter_with_list.sh" -} - -/* -RENAME fornix -*/ -process Rename_fornix { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, file(tractogram) from fornix_for_rename - - output: - set sid, "${sid}__fornix_${params.template_space}.trk" into fornix_for_register_to_orig - - when: - params.extended - - script: - """ - cp ${tractogram} ${sid}__fornix_${params.template_space}.trk -f - """ -} - -/* -RENAME IFOF -*/ -process Rename_ifof { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_IFOF_for_rename - - output: - set sid, "${sid}__ifof_${side}_${params.template_space}.trk" into ifof_for_register_to_orig - - when: - params.extended - - script: - """ - cp ${tractogram} ${sid}__ifof_${side}_${params.template_space}.trk -f - """ -} - -/* -RENAME UF -*/ -process Rename_uf { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, val(side), file(tractogram) from asso_UF_for_rename - - output: - set sid, "${sid}__uf_${side}_${params.template_space}.trk" into uf_for_register_to_orig - - when: - params.extended - - script: - """ - cp ${tractogram} ${sid}__uf_${side}_${params.template_space}.trk -f - """ -} - -/* -RENAME ILF -*/ -process Rename_ilf { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, val(side), file(tractogram) from all_O_T_for_rename - - output: - set sid, "${sid}__ilf_${side}_${params.template_space}.trk" into ilf_for_register_to_orig - - when: - params.extended - - script: - """ - cp ${tractogram} ${sid}__ilf_${side}_${params.template_space}.trk -f - """ -} - -/* -RENAME BRAINSTEM -*/ -process Rename_brainstem { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, file(tractogram) from brainstem_for_rename - - output: - set sid, "${sid}__brainstem_${params.template_space}.trk" into brainstem_for_register_to_orig - - when: - params.extended - - script: - """ - cp ${tractogram} ${sid}__brainstem_${params.template_space}.trk -f - """ -} - -/* -RENAME CEREBELLUM -*/ -process Rename_cerebellum { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, file(tractogram) from cerebellum_for_rename - - output: - set sid, "${sid}__cerebellum_${params.template_space}.trk" into cerebellum_for_register_to_orig - - when: - params.extended - - script: - """ - cp ${tractogram} ${sid}__cerebellum_${params.template_space}.trk -f - """ -} - -/* -RENAME AC_CX -*/ -process Rename_accx { - publishDir = params.final_output_bundles_mni_space - cpus 1 - - input: - set sid, file(tractogram) from accx_for_rename - - output: - set sid, "${sid}__accx_${params.template_space}.trk" into accx_for_register_to_orig - - when: - params.extended - - script: - """ - cp ${tractogram} ${sid}__accx_${params.template_space}.trk -f - """ -} - - -trks_for_register = Channel.empty() -bundles_for_register = Channel.empty() - -if (params.orig){ - if (params.extended){ - cc_homotopic_frontal_for_register_to_orig - .concat(cc_homotopic_occipital_for_register_to_orig) - .concat(cc_homotopic_temporal_for_register_to_orig) - .concat(cc_homotopic_parietal_for_register_to_orig) - .concat(cc_homotopic_insular_for_register_to_orig) - .concat(cc_homotopic_cingulum_for_register_to_orig) - .concat(corticostriatal_for_register_to_orig) - .concat(coronaradiata_for_register_to_orig) - .concat(optical_radiation_for_register_to_orig) - .concat(ushape_for_register_to_orig) - .concat(cing_for_register_to_orig) - .concat(slf_for_register_to_orig) - .concat(af_for_register_to_orig) - .concat(corticopontine_frontal_for_register_to_orig) - .concat(corticopontine_POT_for_register_to_orig) - .concat(cst_for_register_to_orig) - .concat(fornix_for_register_to_orig) - .concat(ifof_for_register_to_orig) - .concat(uf_for_register_to_orig) - .concat(ilf_for_register_to_orig) - .concat(brainstem_for_register_to_orig) - .concat(cerebellum_for_register_to_orig) - .concat(accx_for_register_to_orig) - .concat(plausible_commissural_for_register_to_orig) - .combine(transformation_and_t1_for_transformation_to_orig_bundles, by: 0) - .set{bundles_for_register} - - trk_plausible_for_register_plausible_to_orig - .concat(trk_unplausible_for_register_to_orig) - .combine(transformation_and_t1_for_transformation_to_orig, by: 0) - .set{trks_for_register} - } - else{ - trk_plausible_for_register_plausible_to_orig - .concat(trk_unplausible_for_register_to_orig) - .combine(transformation_and_t1_for_transformation_to_orig, by: 0) - .set{trks_for_register} } } -else{ - trks_for_register = Channel.create() - trks_for_register.close() -} - -process Register_to_orig{ - publishDir = params.final_output_orig_space - cpus 1 - - input: - set sid, file(trk), file(t1), file(transfo), file(inv_deformation), file(deformation) from trks_for_register - - output: - set sid, "${sid}__*_${params.orig_space}.trk" - - when: - params.orig - - script: - """ - scil_apply_transform_to_tractogram.py ${trk} ${t1} ${transfo} ${trk.getSimpleName().replaceAll("mni_space", "orig_space")}.trk --in_deformation ${deformation} --reverse_operation --keep_invalid - """ -} - -process Register_bundles_to_orig{ - publishDir = params.final_output_bundles_orig_space - cpus 1 - - input: - set sid, file(trk), file(t1), file(transfo), file(inv_deformation), file(deformation) from bundles_for_register - - output: - set sid, "${sid}__*_${params.orig_space}.trk" - - when: - params.orig - - script: - """ - scil_apply_transform_to_tractogram.py ${trk} ${t1} ${transfo} ${trk.getSimpleName().replaceAll("mni_space", "orig_space")}.trk --in_deformation ${deformation} --reverse_operation --keep_invalid - """ -} - -process Copy_t1_to_orig{ - publishDir = params.final_output_orig_space - cpus 1 - - input: - tuple sid, file(t1) from t1s_for_copy_to_orig - - output: - file("${sid}__t1_orig_space.nii.gz") - - when: - params.orig - - script: - """ - cp ${t1} ${sid}__t1_orig_space.nii.gz - """ -} diff --git a/modules.json b/modules.json new file mode 100644 index 0000000..f840dd7 --- /dev/null +++ b/modules.json @@ -0,0 +1,41 @@ +{ + "name": "", + "homePage": "", + "repos": { + "https://github.com/scilus/nf-neuro.git": { + "modules": { + "nf-neuro": { + "betcrop/antsbet": { + "branch": "main", + "git_sha": "0135a3ba712e89ad44419476494fca6fa3dd67ff", + "installed_by": ["modules"] + }, + "registration/ants": { + "branch": "main", + "git_sha": "2b45c6a74dc9b5b793d7e40c6a715f4c07064864", + "installed_by": ["modules"] + }, + "registration/tractogram": { + "branch": "main", + "git_sha": "0135a3ba712e89ad44419476494fca6fa3dd67ff", + "installed_by": ["modules"] + }, + "tractogram/removeinvalid": { + "branch": "main", + "git_sha": "0135a3ba712e89ad44419476494fca6fa3dd67ff", + "installed_by": ["modules"] + } + } + }, + "subworkflows": { + "nf-neuro": { + "load_test_data": { + "branch": "main", + "git_sha": "a79cb5c9645269db389c563f674b17c5e900a50b", + "installed_by": ["subworkflows"] + } + } + } + } + } +} diff --git a/modules/local/filtering/filter_with_list.nf b/modules/local/filtering/filter_with_list.nf new file mode 100644 index 0000000..2a700be --- /dev/null +++ b/modules/local/filtering/filter_with_list.nf @@ -0,0 +1,152 @@ +process FILTER_LIST { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), path(tractogram) + + output: + tuple val(meta), path("${meta.id}__${task.ext.out_extension}.trk"), emit: extracted + tuple val(meta), path("${meta.id}__${task.ext.remaining_extension}.trk"), optional: true, emit: remaining + path "${meta.id}__${task.ext.out_extension}.txt" + + script: + basename = "${meta.id}" + filtering_list = task.ext.filtering_list + out_extension = task.ext.out_extension + remaining_extension = task.ext.remaining_extension + keep = task.ext.keep + extract_masks = "" + distance = task.ext.distance + + template "filter_with_list.sh" +} + +// This is the same as above, except that it supports input repeater for lists +// and sides. +process FILTER_LIST_EACH { + tag "${meta.id}" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), path(tractogram) + each list + each side + + output: + tuple val(meta), path("${meta.id}__${task.ext.out_extension}${out_suffix}.trk"), emit: extracted + tuple val(meta), val(side), path("${meta.id}__${task.ext.out_extension}${out_suffix}.trk"), emit: extracted_with_side + tuple val(meta), val(list), path("${meta.id}__${task.ext.out_extension}${out_suffix}.trk"), emit: extracted_with_list + tuple val(meta), val(side), val(list), path("${meta.id}__${task.ext.out_extension}${out_suffix}.trk"), emit: extracted_with_side_list + tuple val(meta), path("${meta.id}__${task.ext.remaining_extension}${out_suffix}.trk"), optional: true, emit: remaining + tuple val(meta), val(side), path("${meta.id}__${task.ext.remaining_extension}${out_suffix}.trk"), optional: true, emit: remaining_with_side + tuple val(meta), val(list), path("${meta.id}__${task.ext.remaining_extension}${out_suffix}.trk"), optional: true, emit: remaining_with_list + tuple val(meta), val(side), val(list), path("${meta.id}__${task.ext.remaining_extension}${out_suffix}.trk"), optional: true, emit: remaining_with_side_list + path "${meta.id}__${task.ext.out_extension}${out_suffix}.txt" + + script: + mid_suffix = task.ext.mid_suffix != null ? task.ext.mid_suffix : "" + reverse_suffix = task.ext.reverse_suffix != null ? task.ext.reverse_suffix : false + out_suffix = task.ext.out_suffix != null ? task.ext.out_suffix : buildSuffix(side, list, mid_suffix, reverse_suffix) + list_suffix = task.ext.list_suffix != null ? task.ext.list_suffix : buildSuffix(side, list, mid_suffix, reverse_suffix) + basename = "${meta.id}" + filtering_list = addSuffixToFile(task.ext.filtering_list, list_suffix) + out_extension = task.ext.out_extension + out_suffix + remaining_extension = task.ext.remaining_extension + out_suffix + keep = task.ext.keep + extract_masks = "" + distance = task.ext.distance + + template "filter_with_list.sh" +} + +// This is the same as above, except that it takes a side as an input. +process FILTER_LIST_SIDE { + tag "${meta.id}" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), val(side), path(tractogram) + each list + + output: + tuple val(meta), path("${meta.id}__${task.ext.out_extension}${out_suffix}.trk"), emit: extracted + tuple val(meta), val(side), path("${meta.id}__${task.ext.out_extension}${out_suffix}.trk"), emit: extracted_with_side + tuple val(meta), val(list), path("${meta.id}__${task.ext.out_extension}${out_suffix}.trk"), emit: extracted_with_list + tuple val(meta), val(side), val(list), path("${meta.id}__${task.ext.out_extension}${out_suffix}.trk"), emit: extracted_with_side_list + tuple val(meta), path("${meta.id}__${task.ext.remaining_extension}${out_suffix}.trk"), optional: true, emit: remaining + tuple val(meta), val(side), path("${meta.id}__${task.ext.remaining_extension}${out_suffix}.trk"), optional: true, emit: remaining_with_side + tuple val(meta), val(list), path("${meta.id}__${task.ext.remaining_extension}${out_suffix}.trk"), optional: true, emit: remaining_with_list + tuple val(meta), val(side), val(list), path("${meta.id}__${task.ext.remaining_extension}${out_suffix}.trk"), optional: true, emit: remaining_with_side_list + path "${meta.id}__${task.ext.out_extension}${out_suffix}.txt" + + script: + mid_suffix = task.ext.mid_suffix != null ? task.ext.mid_suffix : "" + reverse_suffix = task.ext.reverse_suffix != null ? task.ext.reverse_suffix : false + out_suffix = task.ext.out_suffix != null ? task.ext.out_suffix : buildSuffix(side, list, mid_suffix, reverse_suffix) + list_suffix = task.ext.list_suffix != null ? task.ext.list_suffix : buildSuffix(side, list, mid_suffix, reverse_suffix) + basename = "${meta.id}" + filtering_list = addSuffixToFile(task.ext.filtering_list, list_suffix) + out_extension = task.ext.out_extension + out_suffix + remaining_extension = task.ext.remaining_extension + out_suffix + keep = task.ext.keep + extract_masks = "" + distance = task.ext.distance + + template "filter_with_list.sh" +} + +def addSuffixToFile(str, suffix) { + def file = new File(str) + + def name = file.name // 'file.txt' + def baseName = name.lastIndexOf('.') >= 0 ? name[0..name.lastIndexOf('.') - 1] : name + def extension = name.lastIndexOf('.') >= 0 ? name[name.lastIndexOf('.')..-1] : '' + + def newFileName = "${baseName}${suffix}${extension}" + def newPath = new File(file.parent, newFileName).path + + return newPath +} + +def buildSuffix(side, list, mid_suffix, reversed) { + def suffix = "" + // The suffix has the following format: + // _${side}_${mid_suffix}_${list} + // or + // _${list}_${mid_suffix}_${side} (if reversed) + if (!reversed) { + if (side != null && side.trim() != "") { + suffix += "_${side}" + } + } + else { + if (list != null && list.trim() != "") { + suffix += "_${list}" + } + } + + // Middle part of the suffix if there is one. + if (mid_suffix != null && mid_suffix.trim() != "") { + suffix += "_${mid_suffix}" + } + + if (!reversed) { + if (list != null && list.trim() != "") { + suffix += "_${list}" + } + } + else { + if (side != null && side.trim() != "") { + suffix += "_${side}" + } + } + + return suffix +} \ No newline at end of file diff --git a/modules/local/filtering/major_filtering.nf b/modules/local/filtering/major_filtering.nf new file mode 100644 index 0000000..b288da4 --- /dev/null +++ b/modules/local/filtering/major_filtering.nf @@ -0,0 +1,36 @@ +process MAJOR_FILTERING { + tag "$meta.id" + cpus params.processes_major_filtering + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), path(tractogram) + + output: + tuple val(meta), path("${meta.id}__wb_clean01.trk"), emit: wb + tuple val(meta), path("${meta.id}__unplausible_streamlines.trk"), emit: unplausible + path("${meta.id}/*"), optional: true + + script: + keep_intermediate_trk_flag="" + if (params.keep_intermediate_steps) { + keep_intermediate_trk_flag="--save_intermediate_tractograms" + } + """ + scil_tractogram_filter_by_anatomy ${tractogram} \ + ${params.rois_folder}/${params.atlas.JHU_8} \ + ${meta.id} \ + --minL ${params.min_streamline_length} \ + --maxL ${params.max_streamline_length} \ + --angle ${params.loop_angle_threshold} \ + --csf_bin ${params.rois_folder}/${params.atlas.csf} \ + --processes ${params.processes_major_filtering} \ + --save_rejected \ + $keep_intermediate_trk_flag \ + -f + + mv ${meta.id}/${tractogram.getSimpleName()}_filtered.trk ${meta.id}__wb_clean01.trk + mv ${meta.id}/${tractogram.getSimpleName()}_rejected.trk ${meta.id}__unplausible_streamlines.trk + """ +} \ No newline at end of file diff --git a/modules/local/merge/main.nf b/modules/local/merge/main.nf new file mode 100644 index 0000000..9ed4cbf --- /dev/null +++ b/modules/local/merge/main.nf @@ -0,0 +1,40 @@ +process TRACTOGRAM_MATH { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), val(side), path(in_tractograms) + + output: + tuple val(meta), path("${out_path}"), emit: tractogram + tuple val(meta), val(side), path("${out_path}"), emit: tractogram_with_side + + script: + operation = task.ext.op + out_name = task.ext.out_name ? task.ext.out_name : "" + out_suffix = task.ext.out_suffix ? task.ext.out_suffix : "" + save_empty = task.ext.save_empty ? task.ext.save_empty : false + force = task.ext.force ? task.ext.force : false + + if (!operation) { + error 'Error ~ No operation specified for TRACTOGRAM_MATH process. Please set "op" in the task.ext configuration.' + } + + save_empty_str = save_empty ? "--save_empty" : "" + force_str = force ? "-f" : "" + + tractograms = in_tractograms.join(' ') + side_suffix = side ? "_${side}" : "" + out_path = "${meta.id}__${out_name}${side_suffix}${out_suffix}.trk" + """ + scil_tractogram_math \ + ${operation} \ + ${tractograms} \ + ${out_path} \ + ${save_empty_str} \ + ${force_str} + ls -lh + """ +} \ No newline at end of file diff --git a/modules/local/utils/copy_file.nf b/modules/local/utils/copy_file.nf new file mode 100644 index 0000000..b656258 --- /dev/null +++ b/modules/local/utils/copy_file.nf @@ -0,0 +1,22 @@ +process COPY_FILE { + tag "${meta.id}" + cpus 1 + + input: + tuple val(meta), val(side), path(file) + output: + tuple val(meta), path("${filename}"), emit: output_file + + script: + out_name = task.ext.out_name ? task.ext.out_name : "" + out_suffix = task.ext.out_suffix ? task.ext.out_suffix : "" + force = task.ext.force ? task.ext.force : false + ext = task.ext.ext ? task.ext.ext : ".trk" + + side_suffix = side ? "_${side}" : "" + force_str = force ? "-f" : "" + filename = "${meta.id}__${out_name}${side_suffix}${out_suffix}${ext}" + """ + cp ${file} ${filename} ${force_str} + """ +} \ No newline at end of file diff --git a/modules/nf-neuro/betcrop/antsbet/environment.yml b/modules/nf-neuro/betcrop/antsbet/environment.yml new file mode 100644 index 0000000..893edb3 --- /dev/null +++ b/modules/nf-neuro/betcrop/antsbet/environment.yml @@ -0,0 +1,3 @@ +channels: [] +dependencies: [] +name: betcrop_antsbet diff --git a/modules/nf-neuro/betcrop/antsbet/main.nf b/modules/nf-neuro/betcrop/antsbet/main.nf new file mode 100644 index 0000000..bc42872 --- /dev/null +++ b/modules/nf-neuro/betcrop/antsbet/main.nf @@ -0,0 +1,71 @@ + +process BETCROP_ANTSBET { + tag "$meta.id" + label 'process_high' + + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://scil.usherbrooke.ca/containers/scilus_2.0.2.sif': + 'scilus/scilus:2.0.2' }" + + input: + tuple val(meta), path(t1), path(template), path(tissues_probabilities), path(mask), path(initial_affine) + + output: + tuple val(meta), path("*t1_bet.nii.gz") , emit: t1 + tuple val(meta), path("*t1_bet_mask.nii.gz"), emit: mask + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def prefix = task.ext.prefix ?: "${meta.id}" + def args = [] + if (mask) args += ["-f $mask"] + if (initial_affine) args += ["-r $initial_affine"] + + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + export ANTS_RANDOM_SEED=1234 + + antsBrainExtraction.sh -d 3 -a $t1 -o bet/ -u 0 \ + -e $template -m $tissues_probabilities ${args.join(' ')} + scil_volume_math.py convert bet/BrainExtractionMask.nii.gz \ + ${prefix}__t1_bet_mask.nii.gz --data_type uint8 + scil_volume_math.py multiplication $t1 ${prefix}__t1_bet_mask.nii.gz \ + ${prefix}__t1_bet.nii.gz --data_type float32 + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + scilpy: \$(pip list | grep scilpy | tr -s ' ' | cut -d' ' -f2) + ants: \$(antsRegistration --version | grep "Version" | sed -E 's/.*v([0-9]+\\+\\).*/\\1/') + END_VERSIONS + """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + + """ + set +e + function handle_code () { + local code=\$? + ignore=( 1 ) + [[ " \${ignore[@]} " =~ " \$code " ]] || exit \$code + } + trap 'handle_code' ERR + + antsBrainExtraction.sh + scil_volume_math.py -h + + touch ${prefix}__t1_bet.nii.gz + touch ${prefix}__t1_bet_mask.nii.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + scilpy: \$(pip list | grep scilpy | tr -s ' ' | cut -d' ' -f2) + ants: \$(antsRegistration --version | grep "Version" | sed -E 's/.*v([0-9]+\\+\\).*/\\1/') + END_VERSIONS + """ +} diff --git a/modules/nf-neuro/betcrop/antsbet/meta.yml b/modules/nf-neuro/betcrop/antsbet/meta.yml new file mode 100644 index 0000000..abb938c --- /dev/null +++ b/modules/nf-neuro/betcrop/antsbet/meta.yml @@ -0,0 +1,83 @@ +--- +name: "betcrop_antsbet" +description: Perform Brain extraction using antsBrainExtraction.sh on T1 image. +keywords: + - T1 + - BET + - ants + - scilpy +tools: + - "scilpy": + description: "The Sherbrooke Connectivity Imaging Lab (SCIL) Python dMRI processing toolbox." + homepage: "https://github.com/scilus/scilpy.git" + - "ants": + description: "Advanced Normalization Tools." + homepage: "https://github.com/ANTsX/ANTs" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'test', single_end:false ]` + + - t1: + type: file + description: Nifti t1 volume to perform BET. + pattern: "*.{nii,nii.gz}" + mandatory: true + + - template: + type: file + description: Nifti Anatomical template to perform BET. + pattern: "*.{nii,nii.gz}" + mandatory: true + + - tissues_probabilities: + type: file + description: | + Brain probability mask (in template space), with intensity + range 1 (definitely brain) to 0 (definitely background). + pattern: "*.{nii,nii.gz}" + mandatory: true + + - mask: + type: file + description: | + Brain mask (in template space) used to restrict metric + computation when performing registration. + pattern: "*.{nii,nii.gz}" + mandatory: false + + - initial_affine: + type: file + description: | + Affine transform from T1w space to DWI space, used as + initialization for registration algorithms. + pattern: "*.{mat/txt}" + mandatory: false + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'test', single_end:false ]` + + - t1: + type: file + description: Nifti T1 volume brain-extracted. + pattern: "*t1_bet.{nii,nii.gz}" + + - mask: + type: file + description: T1 mask brain-extracted and cropped. + pattern: "*t1_bet_mask.{nii,nii.gz}" + + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@ThoumyreStanislas" diff --git a/modules/nf-neuro/betcrop/antsbet/tests/main.nf.test b/modules/nf-neuro/betcrop/antsbet/tests/main.nf.test new file mode 100644 index 0000000..c3c8075 --- /dev/null +++ b/modules/nf-neuro/betcrop/antsbet/tests/main.nf.test @@ -0,0 +1,142 @@ +nextflow_process { + + name "Test Process BETCROP_ANTSBET" + script "../main.nf" + process "BETCROP_ANTSBET" + config "./nextflow.config" + + tag "modules" + tag "modules_nfcore" + tag "betcrop" + tag "betcrop/antsbet" + + tag "subworkflows" + tag "subworkflows/load_test_data" + + test("betcrop - antsbet") { + setup { + run("LOAD_TEST_DATA", alias: "LOAD_DATA") { + script "../../../../../subworkflows/nf-neuro/load_test_data/main.nf" + process { + """ + input[0] = Channel.from( [ "T1w.zip", "transform.zip", "antsbet.zip" ] ) + input[1] = "test.load-test-data" + """ + } + } + } + when { + process { + """ + ch_split_test_data = LOAD_DATA.out.test_data_directory + .branch{ + t1: it.simpleName == "T1w" + transform: it.simpleName == "transform" + template: it.simpleName == "antsbet" + } + ch_t1 = ch_split_test_data.t1.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/T1w.nii.gz") + ] + } + ch_template = ch_split_test_data.template.map{ + test_data_directory -> [ + [ id: 'test' ], + file("\${test_data_directory}/t1_template.nii.gz"), + file("\${test_data_directory}/t1_brain_probability_map.nii.gz") + ] + } + ch_mask = ch_split_test_data.transform.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/t1_to_bet_template_mask.nii.gz") + ] + } + ch_transform = ch_split_test_data.transform.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/t1_to_bet_template.mat") + ] + } + input[0] = ch_t1 + .join(ch_template) + .join(ch_mask) + .join(ch_transform) + """ + } + } + then { + assertAll( + { assert process.success }, + { assert snapshot( + file(process.out.t1.get(0).get(1)).name, + file(process.out.mask.get(0).get(1)).name, + process.out.versions + ).match() } + ) + } + } + test("betcrop - antsbet - stub-run") { + tag "stub" + options "-stub-run" + setup { + run("LOAD_TEST_DATA", alias: "LOAD_DATA") { + script "../../../../../subworkflows/nf-neuro/load_test_data/main.nf" + process { + """ + input[0] = Channel.from( [ "T1w.zip", "transform.zip", "antsbet.zip" ] ) + input[1] = "test.load-test-data" + """ + } + } + } + when { + process { + """ + ch_split_test_data = LOAD_DATA.out.test_data_directory + .branch{ + t1: it.simpleName == "T1w" + transform: it.simpleName == "transform" + template: it.simpleName == "antsbet" + } + ch_t1 = ch_split_test_data.t1.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/T1w.nii.gz") + ] + } + ch_template = ch_split_test_data.template.map{ + test_data_directory -> [ + [ id: 'test' ], + file("\${test_data_directory}/t1_template.nii.gz"), + file("\${test_data_directory}/t1_brain_probability_map.nii.gz") + ] + } + ch_mask = ch_split_test_data.transform.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/t1_to_bet_template_mask.nii.gz") + ] + } + ch_transform = ch_split_test_data.transform.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/t1_to_bet_template.mat") + ] + } + input[0] = ch_t1 + .join(ch_template) + .join(ch_mask) + .join(ch_transform) + """ + } + } + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out.versions).match() } + ) + } + } +} diff --git a/modules/nf-neuro/betcrop/antsbet/tests/main.nf.test.snap b/modules/nf-neuro/betcrop/antsbet/tests/main.nf.test.snap new file mode 100644 index 0000000..aed6d4b --- /dev/null +++ b/modules/nf-neuro/betcrop/antsbet/tests/main.nf.test.snap @@ -0,0 +1,28 @@ +{ + "betcrop - antsbet": { + "content": [ + "test__t1_bet.nii.gz", + "test__t1_bet_mask.nii.gz", + [ + "versions.yml:md5,bb378e913d4002d0b644d3277892ae20" + ] + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.10.5" + }, + "timestamp": "2025-03-24T17:47:54.836534884" + }, + "betcrop - antsbet - stub-run": { + "content": [ + [ + "versions.yml:md5,bb378e913d4002d0b644d3277892ae20" + ] + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.10.3" + }, + "timestamp": "2025-01-14T20:42:47.191136661" + } +} \ No newline at end of file diff --git a/modules/nf-neuro/betcrop/antsbet/tests/nextflow.config b/modules/nf-neuro/betcrop/antsbet/tests/nextflow.config new file mode 100644 index 0000000..d8b1ce8 --- /dev/null +++ b/modules/nf-neuro/betcrop/antsbet/tests/nextflow.config @@ -0,0 +1,6 @@ +process { + withName: "BETCROP_ANTSBET" { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + } + cpus = 1 +} diff --git a/modules/nf-neuro/betcrop/antsbet/tests/tags.yml b/modules/nf-neuro/betcrop/antsbet/tests/tags.yml new file mode 100644 index 0000000..e4387ae --- /dev/null +++ b/modules/nf-neuro/betcrop/antsbet/tests/tags.yml @@ -0,0 +1,2 @@ +betcrop/antsbet: + - "modules/nf-neuro/betcrop/antsbet/**" diff --git a/modules/nf-neuro/registration/ants/environment.yml b/modules/nf-neuro/registration/ants/environment.yml new file mode 100644 index 0000000..2c00919 --- /dev/null +++ b/modules/nf-neuro/registration/ants/environment.yml @@ -0,0 +1,3 @@ +channels: [] +dependencies: [] +name: registration_ants diff --git a/modules/nf-neuro/registration/ants/main.nf b/modules/nf-neuro/registration/ants/main.nf new file mode 100644 index 0000000..289b837 --- /dev/null +++ b/modules/nf-neuro/registration/ants/main.nf @@ -0,0 +1,150 @@ + +process REGISTRATION_ANTS { + tag "$meta.id" + label 'process_medium' + + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://scil.usherbrooke.ca/containers/scilus_latest.sif': + 'scilus/scilus:19c87b72bcbc683fb827097dda7f917940fda123'}" + + input: + tuple val(meta), path(fixedimage), path(movingimage), path(mask) //** optional, input = [] **// + + output: + tuple val(meta), path("*_warped.nii.gz") , emit: image + tuple val(meta), path("*__output0Warp.nii.gz") , emit: warp, optional:true + tuple val(meta), path("*__output1GenericAffine.mat") , emit: affine + tuple val(meta), path("*__output1InverseWarp.nii.gz") , emit: inverse_warp, optional: true + tuple val(meta), path("*__output0InverseAffine.mat") , emit: inverse_affine + tuple val(meta), path("*_registration_ants_mqc.gif") , emit: mqc, optional: true + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def suffix_qc = task.ext.suffix_qc ? "${task.ext.suffix_qc}" : "" + def ants = task.ext.quick ? "antsRegistrationSyNQuick.sh " : "antsRegistrationSyN.sh " + def dimension = task.ext.dimension ? "-d " + task.ext.dimension : "-d 3" + def transform = task.ext.transform ? task.ext.transform : "s" + def seed = task.ext.random_seed ? " -e " + task.ext.random_seed : "-e 1234" + def run_qc = task.ext.run_qc ? task.ext.run_qc : false + + if ( task.ext.threads ) args += "-n " + task.ext.threads + if ( task.ext.initial_transform ) args += " -i " + task.ext.initial_transform + if ( task.ext.histogram_bins ) args += " -r " + task.ext.histogram_bins + if ( task.ext.spline_distance ) args += " -s " + task.ext.spline_distance + if ( task.ext.gradient_step ) args += " -g " + task.ext.gradient_step + if ( task.ext.mask ) args += " -x $mask" + if ( task.ext.type ) args += " -p " + task.ext.type + if ( task.ext.histogram_matching ) args += " -j " + task.ext.histogram_matching + if ( task.ext.repro_mode ) args += " -y " + task.ext.repro_mode + if ( task.ext.collapse_output ) args += " -z " + task.ext.collapse_output + + """ + export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=$task.cpus + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + + $ants $dimension -f $fixedimage -m $movingimage -o output -t $transform $args $seed + + mv outputWarped.nii.gz ${prefix}__warped.nii.gz + mv output0GenericAffine.mat ${prefix}__output1GenericAffine.mat + + if [ $transform != "t" ] && [ $transform != "r" ] && [ $transform != "a" ]; + then + mv output1InverseWarp.nii.gz ${prefix}__output1InverseWarp.nii.gz + mv output1Warp.nii.gz ${prefix}__output0Warp.nii.gz + fi + + antsApplyTransforms -d 3 -i $fixedimage -r $movingimage -o Linear[output.mat]\ + -t [${prefix}__output1GenericAffine.mat,1] + + mv output.mat ${prefix}__output0InverseAffine.mat + + ls + + ### ** QC ** ### + if $run_qc; + then + mv $fixedimage fixedimage.nii.gz + extract_dim=\$(mrinfo fixedimage.nii.gz -size) + read sagittal_dim coronal_dim axial_dim <<< "\${extract_dim}" + + # Get the middle slice + coronal_dim=\$((\$coronal_dim / 2)) + axial_dim=\$((\$axial_dim / 2)) + sagittal_dim=\$((\$sagittal_dim / 2)) + + # Set viz params. + viz_params="--display_slice_number --display_lr --size 256 256" + # Iterate over images. + for image in fixedimage warped; + do + scil_viz_volume_screenshot.py *\${image}.nii.gz \${image}_coronal.png \ + --slices \$coronal_dim --axis coronal \$viz_params + scil_viz_volume_screenshot.py *\${image}.nii.gz \${image}_sagittal.png \ + --slices \$sagittal_dim --axis sagittal \$viz_params + scil_viz_volume_screenshot.py *\${image}.nii.gz \${image}_axial.png \ + --slices \$axial_dim --axis axial \$viz_params + if [ \$image != fixedimage ]; + then + title="T1 Warped" + else + title="fixedimage" + fi + convert +append \${image}_coronal*.png \${image}_axial*.png \ + \${image}_sagittal*.png \${image}_mosaic.png + convert -annotate +20+230 "\${title}" -fill white -pointsize 30 \ + \${image}_mosaic.png \${image}_mosaic.png + # Clean up. + rm \${image}_coronal*.png \${image}_sagittal*.png \${image}_axial*.png + done + # Create GIF. + convert -delay 10 -loop 0 -morph 10 \ + warped_mosaic.png fixedimage_mosaic.png warped_mosaic.png \ + ${prefix}_${suffix_qc}_registration_ants_mqc.gif + # Clean up. + rm *_mosaic.png + fi + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + ants: \$(antsRegistration --version | grep "Version" | sed -E 's/.*v([0-9]+\\.[0-9]+\\.[0-9]+).*/\\1/') + mrtrix: \$(mrinfo -version 2>&1 | sed -n 's/== mrinfo \\([0-9.]\\+\\).*/\\1/p') + imagemagick: \$(magick -version | sed -n 's/.*ImageMagick \\([0-9]\\{1,\\}\\.[0-9]\\{1,\\}\\.[0-9]\\{1,\\}\\).*/\\1/p') + END_VERSIONS + """ + + stub: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + + """ + set +e + function handle_code () { + local code=\$? + ignore=( 1 ) + [[ " \${ignore[@]} " =~ " \$code " ]] || exit \$code + } + trap 'handle_code' ERR + + antsRegistrationSyNQuick.sh -h + antsApplyTransforms -h + + touch ${prefix}__t1_warped.nii.gz + touch ${prefix}__output1GenericAffine.mat + touch ${prefix}__output0InverseAffine.mat + touch ${prefix}__output1InverseWarp.nii.gz + touch ${prefix}__output0Warp.nii.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + ants: \$(antsRegistration --version | grep "Version" | sed -E 's/.*v([0-9]+\\.[0-9]+\\.[0-9]+).*/\\1/') + mrtrix: \$(mrinfo -version 2>&1 | sed -n 's/== mrinfo \\([0-9.]\\+\\).*/\\1/p') + imagemagick: \$(magick -version | sed -n 's/.*ImageMagick \\([0-9]\\{1,\\}\\.[0-9]\\{1,\\}\\.[0-9]\\{1,\\}\\).*/\\1/p') + END_VERSIONS + """ +} diff --git a/modules/nf-neuro/registration/ants/meta.yml b/modules/nf-neuro/registration/ants/meta.yml new file mode 100644 index 0000000..aeb5502 --- /dev/null +++ b/modules/nf-neuro/registration/ants/meta.yml @@ -0,0 +1,73 @@ +--- +# yaml-language-server: $schema=https://raw.githubusercontent.com/scilus/nf-neuro/main/modules/meta-schema.json +name: "registration_ants" +description: Image registration with antsRegistrationSyN or antsRegistrationSyNQuick +keywords: + - nifti + - registration + - antsRegistrationSyN + - antsRegistrationSyNQuick + - trk +tools: + - "ants": + description: "Advanced Normalization Tools." + homepage: "https://github.com/ANTsX/ANTs" + +input: + # Only when we have meta + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'test', single_end:false ]` + + - fixedimage: + type: file + description: Fixed image(s) or source image(s) or reference image(s) + pattern: "*.{nii,nii.gz}" + + - movingimage: + type: file + description: Moving image(s) or target image(s) + pattern: "*.{nii,nii.gz}" + + - mask: + type: file + description: Mask(s) for the fixed image space + pattern: "*.{nii,nii.gz}" + +output: + #Only when we have meta + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'test', single_end:false ]` + + - image: + type: file + description: Nifti volume after registration. + pattern: "*_warped.nii.gz" + + - transfo_image: + type: list + description: Tuple, Transformation files to warp images (nii Warp, mat file) + pattern: "*.{nii,nii.gz,mat}" + + - transfo_trk: + type: list + description: Tuple, Transformation files to warp trk (nii InverseWarp, mat file) + pattern: "*.{nii,nii.gz,mat}" + + - mqc: + type: file + description: .gif file containing quality control image for the registration process. Made for use in MultiQC report. + pattern: "*.gif" + + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@ThoumyreStanislas" diff --git a/modules/nf-neuro/registration/ants/tests/main.nf.test b/modules/nf-neuro/registration/ants/tests/main.nf.test new file mode 100644 index 0000000..2158dbe --- /dev/null +++ b/modules/nf-neuro/registration/ants/tests/main.nf.test @@ -0,0 +1,171 @@ +nextflow_process { + + name "Test Process REGISTRATION_ANTS" + script "../main.nf" + process "REGISTRATION_ANTS" + + tag "modules" + tag "modules_nfcore" + tag "registration" + tag "registration/ants" + + tag "subworkflows" + tag "subworkflows/load_test_data" + + setup { + run("LOAD_TEST_DATA", alias: "LOAD_DATA") { + script "../../../../../subworkflows/nf-neuro/load_test_data/main.nf" + process { + """ + input[0] = Channel.from( [ "T1w.zip", "b0.zip" ] ) + input[1] = "test.load-test-data" + """ + } + } + } + + test("registration - ants") { + config "./nextflow.config" + when { + process { + """ + ch_split_test_data = LOAD_DATA.out.test_data_directory + .branch{ + T1w: it.simpleName == "T1w" + b0: it.simpleName == "b0" + } + ch_T1w = ch_split_test_data.T1w.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/T1w.nii.gz") + ] + } + ch_b0 = ch_split_test_data.b0.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/b0.nii.gz") + ] + } + input[0] = ch_b0 + .join(ch_T1w) + .map{ it + [[]] } + """ + } + } + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("registration - ants - quick") { + config "./nextflow_quick.config" + when { + process { + """ + ch_split_test_data = LOAD_DATA.out.test_data_directory + .branch{ + T1w: it.simpleName == "T1w" + b0: it.simpleName == "b0" + } + ch_T1w = ch_split_test_data.T1w.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/T1w.nii.gz") + ] + } + ch_b0 = ch_split_test_data.b0.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/b0.nii.gz") + ] + } + input[0] = ch_b0 + .join(ch_T1w) + .map{ it + [[]] } + """ + } + } + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("registration - ants - options") { + config "./nextflow_options.config" + when { + process { + """ + ch_split_test_data = LOAD_DATA.out.test_data_directory + .branch{ + T1w: it.simpleName == "T1w" + b0: it.simpleName == "b0" + } + ch_T1w = ch_split_test_data.T1w.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/T1w.nii.gz"), + file("\${test_data_directory}/T1w_mask.nii.gz") + ] + } + ch_b0 = ch_split_test_data.b0.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/b0.nii.gz") + ] + } + input[0] = ch_b0 + .join(ch_T1w) + """ + } + } + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("registration - ants - stub") { + tag "stub" + options "-stub-run" + when { + process { + """ + ch_split_test_data = LOAD_DATA.out.test_data_directory + .branch{ + T1w: it.simpleName == "T1w" + b0: it.simpleName == "b0" + } + ch_T1w = ch_split_test_data.T1w.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/T1w.nii.gz"), + file("\${test_data_directory}/T1w_mask.nii.gz") + ] + } + ch_b0 = ch_split_test_data.b0.map{ + test_data_directory -> [ + [ id:'test' ], + file("\${test_data_directory}/b0.nii.gz") + ] + } + input[0] = ch_b0 + .join(ch_T1w) + """ + } + } + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out.versions).match() } + ) + } + } +} diff --git a/modules/nf-neuro/registration/ants/tests/main.nf.test.snap b/modules/nf-neuro/registration/ants/tests/main.nf.test.snap new file mode 100644 index 0000000..f1ba276 --- /dev/null +++ b/modules/nf-neuro/registration/ants/tests/main.nf.test.snap @@ -0,0 +1,323 @@ +{ + "registration - ants": { + "content": [ + { + "0": [ + [ + { + "id": "test" + }, + "test__warped.nii.gz:md5,4382217c04988eb01ed8f99916d7631c" + ] + ], + "1": [ + [ + { + "id": "test" + }, + "test__output0Warp.nii.gz:md5,6fee747df2c30bb48cf2b6897b5d3e17" + ] + ], + "2": [ + [ + { + "id": "test" + }, + "test__output1GenericAffine.mat:md5,8e987eb08cc568478c37ae79a8824f8b" + ] + ], + "3": [ + [ + { + "id": "test" + }, + "test__output1InverseWarp.nii.gz:md5,1608837de13478dff81fe51ac1720d28" + ] + ], + "4": [ + [ + { + "id": "test" + }, + "test__output0InverseAffine.mat:md5,d342d5f1c35a53cadcdf2d49a25f5ce8" + ] + ], + "5": [ + + ], + "6": [ + "versions.yml:md5,2c26609651ebb851765327617e7aec96" + ], + "affine": [ + [ + { + "id": "test" + }, + "test__output1GenericAffine.mat:md5,8e987eb08cc568478c37ae79a8824f8b" + ] + ], + "image": [ + [ + { + "id": "test" + }, + "test__warped.nii.gz:md5,4382217c04988eb01ed8f99916d7631c" + ] + ], + "inverse_affine": [ + [ + { + "id": "test" + }, + "test__output0InverseAffine.mat:md5,d342d5f1c35a53cadcdf2d49a25f5ce8" + ] + ], + "inverse_warp": [ + [ + { + "id": "test" + }, + "test__output1InverseWarp.nii.gz:md5,1608837de13478dff81fe51ac1720d28" + ] + ], + "mqc": [ + + ], + "versions": [ + "versions.yml:md5,2c26609651ebb851765327617e7aec96" + ], + "warp": [ + [ + { + "id": "test" + }, + "test__output0Warp.nii.gz:md5,6fee747df2c30bb48cf2b6897b5d3e17" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.10.3" + }, + "timestamp": "2025-02-24T07:52:30.173552689" + }, + "registration - ants - quick": { + "content": [ + { + "0": [ + [ + { + "id": "test" + }, + "test__warped.nii.gz:md5,81358dd2f3223f567f4101e61490ae0f" + ] + ], + "1": [ + [ + { + "id": "test" + }, + "test__output0Warp.nii.gz:md5,24e6f53eaa3e4476bb627c374bc809ca" + ] + ], + "2": [ + [ + { + "id": "test" + }, + "test__output1GenericAffine.mat:md5,48c8479a403e0c0ec29c69d37bb4c91a" + ] + ], + "3": [ + [ + { + "id": "test" + }, + "test__output1InverseWarp.nii.gz:md5,24e6f53eaa3e4476bb627c374bc809ca" + ] + ], + "4": [ + [ + { + "id": "test" + }, + "test__output0InverseAffine.mat:md5,25d0b8a83a922f2e4e89c6a86258af15" + ] + ], + "5": [ + [ + { + "id": "test" + }, + "test_T1_to_DWI_registration_ants_mqc.gif:md5,ebf58acb9c3f3b9bc8c5a08105d4ceff" + ] + ], + "6": [ + "versions.yml:md5,2c26609651ebb851765327617e7aec96" + ], + "affine": [ + [ + { + "id": "test" + }, + "test__output1GenericAffine.mat:md5,48c8479a403e0c0ec29c69d37bb4c91a" + ] + ], + "image": [ + [ + { + "id": "test" + }, + "test__warped.nii.gz:md5,81358dd2f3223f567f4101e61490ae0f" + ] + ], + "inverse_affine": [ + [ + { + "id": "test" + }, + "test__output0InverseAffine.mat:md5,25d0b8a83a922f2e4e89c6a86258af15" + ] + ], + "inverse_warp": [ + [ + { + "id": "test" + }, + "test__output1InverseWarp.nii.gz:md5,24e6f53eaa3e4476bb627c374bc809ca" + ] + ], + "mqc": [ + [ + { + "id": "test" + }, + "test_T1_to_DWI_registration_ants_mqc.gif:md5,ebf58acb9c3f3b9bc8c5a08105d4ceff" + ] + ], + "versions": [ + "versions.yml:md5,2c26609651ebb851765327617e7aec96" + ], + "warp": [ + [ + { + "id": "test" + }, + "test__output0Warp.nii.gz:md5,24e6f53eaa3e4476bb627c374bc809ca" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.10.3" + }, + "timestamp": "2025-03-06T16:44:46.474693596" + }, + "registration - ants - options": { + "content": [ + { + "0": [ + [ + { + "id": "test" + }, + "test__warped.nii.gz:md5,7cb6839d4af18d9d37a1bd9d07c8c57b" + ] + ], + "1": [ + + ], + "2": [ + [ + { + "id": "test" + }, + "test__output1GenericAffine.mat:md5,c3a48d19c815206de2b140f505969317" + ] + ], + "3": [ + + ], + "4": [ + [ + { + "id": "test" + }, + "test__output0InverseAffine.mat:md5,f473c1e548be1540cbf757202ffd5984" + ] + ], + "5": [ + [ + { + "id": "test" + }, + "test__registration_ants_mqc.gif:md5,23a92899290210c2fed8dbd9ed6b946b" + ] + ], + "6": [ + "versions.yml:md5,2c26609651ebb851765327617e7aec96" + ], + "affine": [ + [ + { + "id": "test" + }, + "test__output1GenericAffine.mat:md5,c3a48d19c815206de2b140f505969317" + ] + ], + "image": [ + [ + { + "id": "test" + }, + "test__warped.nii.gz:md5,7cb6839d4af18d9d37a1bd9d07c8c57b" + ] + ], + "inverse_affine": [ + [ + { + "id": "test" + }, + "test__output0InverseAffine.mat:md5,f473c1e548be1540cbf757202ffd5984" + ] + ], + "inverse_warp": [ + + ], + "mqc": [ + [ + { + "id": "test" + }, + "test__registration_ants_mqc.gif:md5,23a92899290210c2fed8dbd9ed6b946b" + ] + ], + "versions": [ + "versions.yml:md5,2c26609651ebb851765327617e7aec96" + ], + "warp": [ + + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.10.3" + }, + "timestamp": "2025-03-06T16:44:58.966002773" + }, + "registration - ants - stub": { + "content": [ + [ + "versions.yml:md5,2c26609651ebb851765327617e7aec96" + ] + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.10.3" + }, + "timestamp": "2025-02-24T07:52:56.165561569" + } +} \ No newline at end of file diff --git a/modules/nf-neuro/registration/ants/tests/nextflow.config b/modules/nf-neuro/registration/ants/tests/nextflow.config new file mode 100644 index 0000000..3750dac --- /dev/null +++ b/modules/nf-neuro/registration/ants/tests/nextflow.config @@ -0,0 +1,6 @@ +process { + withName: "REGISTRATION_ANTS" { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + ext.repro_mode = 1 + } +} diff --git a/modules/nf-neuro/registration/ants/tests/nextflow_options.config b/modules/nf-neuro/registration/ants/tests/nextflow_options.config new file mode 100644 index 0000000..88c9a90 --- /dev/null +++ b/modules/nf-neuro/registration/ants/tests/nextflow_options.config @@ -0,0 +1,16 @@ +process { + withName: "REGISTRATION_ANTS" { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + ext.quick = true + ext.run_qc = true + ext.threads = 1 + ext.transform = "r" + ext.histogram_bins = 4 + ext.spline_distance = 26 + ext.gradient_step = 0.1 + ext.histogram_matching = 0 + ext.repro_mode = 0 + ext.collapse_output = 0 + ext.random_seed = 1234 + } +} diff --git a/modules/nf-neuro/registration/ants/tests/nextflow_quick.config b/modules/nf-neuro/registration/ants/tests/nextflow_quick.config new file mode 100644 index 0000000..9fd98e5 --- /dev/null +++ b/modules/nf-neuro/registration/ants/tests/nextflow_quick.config @@ -0,0 +1,9 @@ +process { + withName: "REGISTRATION_ANTS" { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + ext.quick = true + ext.repro_mode = 1 + ext.run_qc = true + ext.suffix_qc = "T1_to_DWI" + } +} diff --git a/modules/nf-neuro/registration/ants/tests/tags.yml b/modules/nf-neuro/registration/ants/tests/tags.yml new file mode 100644 index 0000000..8aa857e --- /dev/null +++ b/modules/nf-neuro/registration/ants/tests/tags.yml @@ -0,0 +1,2 @@ +registration/ants: + - "modules/nf-neuro/registration/ants/**" diff --git a/modules/nf-neuro/registration/tractogram/environment.yml b/modules/nf-neuro/registration/tractogram/environment.yml new file mode 100644 index 0000000..a3dd489 --- /dev/null +++ b/modules/nf-neuro/registration/tractogram/environment.yml @@ -0,0 +1,3 @@ +channels: [] +dependencies: [] +name: registration_tractogram diff --git a/modules/nf-neuro/registration/tractogram/main.nf b/modules/nf-neuro/registration/tractogram/main.nf new file mode 100644 index 0000000..2b5a6be --- /dev/null +++ b/modules/nf-neuro/registration/tractogram/main.nf @@ -0,0 +1,101 @@ +process REGISTRATION_TRACTOGRAM { + tag "$meta.id" + label 'process_single' + + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://scil.usherbrooke.ca/containers/scilus_2.1.0.sif': + 'scilus/scilus:2.1.0' }" + + input: + tuple val(meta), path(anat), path(affine), path(tractogram), path(ref), path(deformation) + + output: + tuple val(meta), path("*__*.{trk,tck}"), emit: tractogram + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def prefix = task.ext.prefix ?: "${meta.id}" + def suffix = task.ext.suffix ? "_${task.ext.suffix}" : "" + def reference = "$ref" ? "--reference $ref" : "" + def in_deformation = "$deformation" ? "--in_deformation $deformation" : "" + + def inverse = task.ext.inverse ? "--inverse" : "" + def reverse_operation = task.ext.reverse_operation ? "--reverse_operation" : "" + + def invalid_management = task.ext.invalid_streamlines ?: "cut" + def cut_invalid = invalid_management == "cut" ? "--cut_invalid" : "" + def remove_single_point = task.ext.remove_single_point ? "--remove_single_point" : "" + def remove_overlapping_points = task.ext.remove_overlapping_points ? "--remove_overlapping_points" : "" + def threshold = task.ext.threshold ? "--threshold " + task.ext.threshold : "" + def no_empty = task.ext.no_empty ? "--no_empty" : "" + + """ + affine=$affine + if [[ "$affine" == *.txt ]]; then + ConvertTransformFile 3 $affine affine.mat --convertToAffineType \ + && affine="affine.mat" \ + || echo "TXT affine transform file conversion failed, using original file." + fi + + for tractogram in ${tractogram}; + do + + ext=\${tractogram#*.} + bname=\$(basename \${tractogram} .\${ext}) + + scil_tractogram_apply_transform.py \$tractogram $anat \$affine \ + ${prefix}__\${bname}${suffix}.\${ext} \ + $in_deformation \ + $inverse \ + $reverse_operation \ + $reference \ + --keep_invalid -f + + if [[ "$invalid_management" == "keep" ]]; then + echo "Skip invalid streamline detection: ${prefix}__\${bname}${suffix}.\${ext}" + continue + fi + + scil_tractogram_remove_invalid.py ${prefix}__\${bname}${suffix}.\${ext} \ + ${prefix}__\${bname}${suffix}.\${ext} \ + $cut_invalid \ + $remove_single_point \ + $remove_overlapping_points \ + $threshold \ + $no_empty \ + -f + + done + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + scilpy: \$(pip list --disable-pip-version-check --no-python-version-warning | grep scilpy | tr -s ' ' | cut -d' ' -f2) + END_VERSIONS + """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + def suffix = task.ext.suffix ? "_${task.ext.suffix}" : "" + """ + scil_tractogram_apply_transform.py -h + scil_tractogram_remove_invalid.py -h + + for tractogram in ${tractogram}; + do + + ext=\${tractogram#*.} + bname=\$(basename \${tractogram} .\${ext}) + + touch ${prefix}__\${bname}${suffix}.\${ext} + + done + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + scilpy: \$(pip list --disable-pip-version-check --no-python-version-warning | grep scilpy | tr -s ' ' | cut -d' ' -f2) + END_VERSIONS + """ +} \ No newline at end of file diff --git a/modules/nf-neuro/registration/tractogram/meta.yml b/modules/nf-neuro/registration/tractogram/meta.yml new file mode 100644 index 0000000..6be0cf7 --- /dev/null +++ b/modules/nf-neuro/registration/tractogram/meta.yml @@ -0,0 +1,64 @@ +--- +name: "registration_tractogram" +description: Apply a transform to a tractogram or multiple tractograms +keywords: + - Diffusion MRI + - Registration + - Bundles + - Tractogram +tools: + - "scilpy": + description: "The Sherbrooke Connectivity Imaging Lab (SCIL) Python dMRI processing toolbox." + homepage: "https://github.com/scilus/scilpy.git" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'test', single_end:false ]` + + - anat: + type: file + description: FA nifti format as anatomical image + pattern: "*.{nii,nii.gz}" + + - transfo: + type: file + description: ANTs affine transform + pattern: "*.mat" + + - tractogram: + type: file + description: Tractogram or list of tractograms to register + pattern: "*.{trk,tck}" + + - ref: + type: file + description: Reference anatomy for tck/vtk/fib/dpy file support (.nii or .nii.gz) (optional) + pattern: "*.{tck,vtk,fib,dpy}" + + - deformation: + type: file + description: Path to the file containing a deformation field (optional) + pattern: "*.{nii,nii.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'test', single_end:false ]` + + - tractogram: + type: file + description: Warped tractogram(s) + pattern: "*.{trk,tck}" + + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@scilus" diff --git a/modules/nf-neuro/registration/tractogram/tests/main.nf.test b/modules/nf-neuro/registration/tractogram/tests/main.nf.test new file mode 100644 index 0000000..c93df55 --- /dev/null +++ b/modules/nf-neuro/registration/tractogram/tests/main.nf.test @@ -0,0 +1,118 @@ + +nextflow_process { + + name "Test Process REGISTRATION_TRACTOGRAM" + script "../main.nf" + process "REGISTRATION_TRACTOGRAM" + config "./nextflow.config" + + tag "modules" + tag "modules_nfcore" + tag "registration" + tag "registration/tractogram" + + tag "subworkflows" + tag "subworkflows/load_test_data" + + setup { + run("LOAD_TEST_DATA", alias: "LOAD_DATA") { + script "../../../../../subworkflows/nf-neuro/load_test_data/main.nf" + process { + """ + input[0] = Channel.from( [ "bundles.zip" ] ) + input[1] = "test.load-test-data" + """ + } + } + } + + + test("registration - tractogram_bundles") { + + + when { + process { + """ + input[0] = LOAD_DATA.out.test_data_directory.map{ + test_data_directory -> [ + [ id:'test', single_end:false ], // meta map + file("\${test_data_directory}/bundle_all_1mm.nii.gz", checkIfExists: true), + file("\${test_data_directory}/affine.txt", checkIfExists: true), + file("\${test_data_directory}/fibercup_atlas/subj_1/*.trk", checkIfExists: true), + [], + [] + ] + } + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + + } + + test("registration - tractogram") { + + config "./nextflow_suffix.config" + + when { + process { + """ + input[0] = LOAD_DATA.out.test_data_directory.map{ + test_data_directory -> [ + [ id:'test', single_end:false ], // meta map + file("\${test_data_directory}/bundle_all_1mm.nii.gz", checkIfExists: true), + file("\${test_data_directory}/affine.txt", checkIfExists: true), + file("\${test_data_directory}/fibercup_atlas/subj_1/bundle_2.trk", checkIfExists: true), + [], + [] + ] + } + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + + } + + test("registration - tractogram - stub-run") { + tag "stub" + options "-stub-run" + + when { + process { + """ + input[0] = LOAD_DATA.out.test_data_directory.map{ + test_data_directory -> [ + [ id:'test', single_end:false ], // meta map + file("\${test_data_directory}/bundle_all_1mm.nii.gz", checkIfExists: true), + file("\${test_data_directory}/affine.txt", checkIfExists: true), + file("\${test_data_directory}/fibercup_atlas/subj_1/bundle_2.trk", checkIfExists: true), + [], + [] + ] + } + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out.versions).match() } + ) + } + + } +} diff --git a/modules/nf-neuro/registration/tractogram/tests/main.nf.test.snap b/modules/nf-neuro/registration/tractogram/tests/main.nf.test.snap new file mode 100644 index 0000000..fb2a8cb --- /dev/null +++ b/modules/nf-neuro/registration/tractogram/tests/main.nf.test.snap @@ -0,0 +1,100 @@ +{ + "registration - tractogram_bundles": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + [ + "test__bundle_0.trk:md5,6e14cc02b66d12d5dde0a0701918385a", + "test__bundle_1.trk:md5,ef5a759144ab7e8d2f5abf30a463a152", + "test__bundle_2.trk:md5,824bcbf796cbce3e1c93e39d1c98dcf9", + "test__bundle_3.trk:md5,8877161b9aebaa6b12a4d7a8f8410407", + "test__bundle_4.trk:md5,6c8658b946154896952ca89e88fcf29f", + "test__bundle_5.trk:md5,fa058f93d9bfe4bed2ff027e9acd5682", + "test__bundle_6.trk:md5,125e50561b4b0c5fd8ee23867c28bc51" + ] + ] + ], + "1": [ + "versions.yml:md5,609469c08e60062eed7528276deddf5e" + ], + "versions": [ + "versions.yml:md5,609469c08e60062eed7528276deddf5e" + ], + "warped_tractogram": [ + [ + { + "id": "test", + "single_end": false + }, + [ + "test__bundle_0.trk:md5,6e14cc02b66d12d5dde0a0701918385a", + "test__bundle_1.trk:md5,ef5a759144ab7e8d2f5abf30a463a152", + "test__bundle_2.trk:md5,824bcbf796cbce3e1c93e39d1c98dcf9", + "test__bundle_3.trk:md5,8877161b9aebaa6b12a4d7a8f8410407", + "test__bundle_4.trk:md5,6c8658b946154896952ca89e88fcf29f", + "test__bundle_5.trk:md5,fa058f93d9bfe4bed2ff027e9acd5682", + "test__bundle_6.trk:md5,125e50561b4b0c5fd8ee23867c28bc51" + ] + ] + ] + } + ], + "meta": { + "nf-test": "0.8.4", + "nextflow": "23.10.1" + }, + "timestamp": "2024-05-15T19:11:42.198039" + }, + "registration - tractogram - stub-run": { + "content": [ + [ + "versions.yml:md5,609469c08e60062eed7528276deddf5e" + ] + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "25.04.2" + }, + "timestamp": "2025-05-30T18:56:28.652442885" + }, + "registration - tractogram": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test__bundle_2_mni.trk:md5,824bcbf796cbce3e1c93e39d1c98dcf9" + ] + ], + "1": [ + "versions.yml:md5,609469c08e60062eed7528276deddf5e" + ], + "versions": [ + "versions.yml:md5,609469c08e60062eed7528276deddf5e" + ], + "warped_tractogram": [ + [ + { + "id": "test", + "single_end": false + }, + "test__bundle_2_mni.trk:md5,824bcbf796cbce3e1c93e39d1c98dcf9" + ] + ] + } + ], + "meta": { + "nf-test": "0.8.4", + "nextflow": "23.10.1" + }, + "timestamp": "2024-05-15T19:11:52.03634" + } +} \ No newline at end of file diff --git a/modules/nf-neuro/registration/tractogram/tests/nextflow.config b/modules/nf-neuro/registration/tractogram/tests/nextflow.config new file mode 100644 index 0000000..a32e45e --- /dev/null +++ b/modules/nf-neuro/registration/tractogram/tests/nextflow.config @@ -0,0 +1,13 @@ +process { + withName: "REGISTRATION_TRACTOGRAM" { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + ext.inverse = true + ext.force = true + ext.reverse_operation = true + ext.cut_invalid = true + ext.remove_single_point = true + ext.remove_overlapping_points = true + ext.threshold = 0.001 + ext.no_empty = true + } +} diff --git a/modules/nf-neuro/registration/tractogram/tests/nextflow_suffix.config b/modules/nf-neuro/registration/tractogram/tests/nextflow_suffix.config new file mode 100644 index 0000000..c9553be --- /dev/null +++ b/modules/nf-neuro/registration/tractogram/tests/nextflow_suffix.config @@ -0,0 +1,14 @@ +process { + withName: "REGISTRATION_TRACTOGRAM" { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + ext.inverse = true + ext.force = true + ext.reverse_operation = true + ext.cut_invalid = true + ext.remove_single_point = true + ext.remove_overlapping_points = true + ext.threshold = 0.001 + ext.no_empty = true + ext.suffix = 'mni' + } +} diff --git a/modules/nf-neuro/registration/tractogram/tests/tags.yml b/modules/nf-neuro/registration/tractogram/tests/tags.yml new file mode 100644 index 0000000..39a6e68 --- /dev/null +++ b/modules/nf-neuro/registration/tractogram/tests/tags.yml @@ -0,0 +1,2 @@ +registration/tractogram: + - "modules/nf-neuro/registration/tractogram/**" diff --git a/modules/nf-neuro/tractogram/removeinvalid/environment.yml b/modules/nf-neuro/tractogram/removeinvalid/environment.yml new file mode 100644 index 0000000..ff5b9fb --- /dev/null +++ b/modules/nf-neuro/tractogram/removeinvalid/environment.yml @@ -0,0 +1,3 @@ +channels: [] +dependencies: [] +name: tractogram_removeinvalid diff --git a/modules/nf-neuro/tractogram/removeinvalid/main.nf b/modules/nf-neuro/tractogram/removeinvalid/main.nf new file mode 100644 index 0000000..dad674a --- /dev/null +++ b/modules/nf-neuro/tractogram/removeinvalid/main.nf @@ -0,0 +1,72 @@ +process TRACTOGRAM_REMOVEINVALID { + tag "$meta.id" + label 'process_single' + + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://scil.usherbrooke.ca/containers/scilus_2.0.2.sif': + 'scilus/scilus:2.0.2' }" + + input: + tuple val(meta), path(tractogram) + + output: + tuple val(meta), path("*.{trk,tck}"), emit: tractograms + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def prefix = task.ext.prefix ?: "${meta.id}" + def suffix = task.ext.suffix ? "_${task.ext.suffix}" : "" + + def force = task.ext.force ? "-f" : "" + def cut_invalid = task.ext.cut_invalid ? "--cut_invalid" : "" + def remove_single_point = task.ext.remove_single_point ? "--remove_single_point" : "" + def remove_overlapping_points = task.ext.remove_overlapping_points ? "--remove_overlapping_points" : "" + def threshold = task.ext.threshold ? "--threshold " + task.ext.threshold : "" + def no_empty = task.ext.no_empty ? "--no_empty" : "" + + """ + for tractogram in ${tractogram}; + do \ + ext=\${tractogram#*.} + bname=\$(basename \${tractogram} .\${ext}) + + scil_tractogram_remove_invalid.py \$tractogram ${prefix}__\${bname}${suffix}.\${ext}\ + $cut_invalid\ + $remove_single_point\ + $remove_overlapping_points\ + $threshold\ + $no_empty\ + $force + + done + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + scilpy: 2.0.2 + END_VERSIONS + """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + def suffix = task.ext.suffix ? "_${task.ext.suffix}" : "" + + """ + scil_tractogram_remove_invalid.py -h + + for tractogram in ${tractogram}; + do \ + ext=\${tractogram#*.} + bname=\$(basename \${tractogram} .\${ext}) + + touch ${prefix}__\${bname}${suffix}.\${ext} + done + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + scilpy: 2.0.2 + END_VERSIONS + """ +} diff --git a/modules/nf-neuro/tractogram/removeinvalid/meta.yml b/modules/nf-neuro/tractogram/removeinvalid/meta.yml new file mode 100644 index 0000000..7006cda --- /dev/null +++ b/modules/nf-neuro/tractogram/removeinvalid/meta.yml @@ -0,0 +1,46 @@ +--- +name: "tractogram_removeinvalid" +description: | + Remove invalid streamlines from a single or multiple tractograms/bundles. +keywords: + - Invalid + - Tractograms + - Streamlines +tools: + - "scilpy": + description: "The Sherbrooke Connectivity Imaging Lab (SCIL) Python dMRI processing toolbox." + homepage: "https://github.com/scilus/scilpy.git" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'sample1', single_end:false ]` + + - tractograms: + type: file + description: Tractogram or list of tractograms to register + pattern: "*.{trk,tck}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'sample1', single_end:false ]` + + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + + - tractogram: + type: file + description: Filtered tractogram(s). + pattern: "*.{trk,tck}" + +authors: + - "@gagnonanthony" +maintainers: + - "@gagnonanthony" diff --git a/modules/nf-neuro/tractogram/removeinvalid/tests/main.nf.test b/modules/nf-neuro/tractogram/removeinvalid/tests/main.nf.test new file mode 100644 index 0000000..c10372b --- /dev/null +++ b/modules/nf-neuro/tractogram/removeinvalid/tests/main.nf.test @@ -0,0 +1,77 @@ +nextflow_process { + + name "Test Process TRACTOGRAM_REMOVEINVALID" + script "../main.nf" + process "TRACTOGRAM_REMOVEINVALID" + config "./nextflow.config" + + tag "modules" + tag "modules_nfcore" + tag "tractogram" + tag "tractogram/removeinvalid" + + tag "subworkflows/load_test_data" + + setup { + run("LOAD_TEST_DATA", alias: "LOAD_DATA") { + script "../../../../../subworkflows/nf-neuro/load_test_data/main.nf" + process { + """ + input[0] = Channel.from( [ "bundles.zip" ] ) + input[1] = "test.load-test-data" + """ + } + } + } + + test("tractogram - remove invalid") { + + when { + process { + """ + input[0] = LOAD_DATA.out.test_data_directory.map{ + test_data_directory -> [ + [ id:'test', single_end:false ], // meta map + file("\${test_data_directory}/fibercup_atlas/subj_1/*.trk", checkIfExists: true), + ] + } + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + + } + + test("tractogram - remove invalid - stub-run") { + tag "stub" + options "-stub-run" + + when { + process { + """ + input[0] = LOAD_DATA.out.test_data_directory.map{ + test_data_directory -> [ + [ id:'test', single_end:false ], // meta map + file("\${test_data_directory}/fibercup_atlas/subj_1/*.trk", checkIfExists: true), + ] + } + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out.versions).match() } + ) + } + + } + +} diff --git a/modules/nf-neuro/tractogram/removeinvalid/tests/main.nf.test.snap b/modules/nf-neuro/tractogram/removeinvalid/tests/main.nf.test.snap new file mode 100644 index 0000000..cf28f11 --- /dev/null +++ b/modules/nf-neuro/tractogram/removeinvalid/tests/main.nf.test.snap @@ -0,0 +1,65 @@ +{ + "tractogram - remove invalid - stub-run": { + "content": [ + [ + "versions.yml:md5,9c98a9977d211116b5e0585b635769c1" + ] + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "25.04.2" + }, + "timestamp": "2025-05-30T18:58:36.79962212" + }, + "tractogram - remove invalid": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + [ + "test__bundle_0.trk:md5,6fb44e5444b09eaf6e612e1bde04b0ed", + "test__bundle_1.trk:md5,04fbc7404a45e197d203d05e1bde351a", + "test__bundle_2.trk:md5,91cf2d5b1e8ca0e219012dbcaf4a8edb", + "test__bundle_3.trk:md5,a2b56e397fc062691ed4c99f8fe67228", + "test__bundle_4.trk:md5,3fe0cd679a23e53079bcfdfb3a777e89", + "test__bundle_5.trk:md5,9a7d5da64e557eb6622a051593886e4a", + "test__bundle_6.trk:md5,29434c0aa3bc72c721e8879edd4f0dca" + ] + ] + ], + "1": [ + "versions.yml:md5,9c98a9977d211116b5e0585b635769c1" + ], + "tractograms": [ + [ + { + "id": "test", + "single_end": false + }, + [ + "test__bundle_0.trk:md5,6fb44e5444b09eaf6e612e1bde04b0ed", + "test__bundle_1.trk:md5,04fbc7404a45e197d203d05e1bde351a", + "test__bundle_2.trk:md5,91cf2d5b1e8ca0e219012dbcaf4a8edb", + "test__bundle_3.trk:md5,a2b56e397fc062691ed4c99f8fe67228", + "test__bundle_4.trk:md5,3fe0cd679a23e53079bcfdfb3a777e89", + "test__bundle_5.trk:md5,9a7d5da64e557eb6622a051593886e4a", + "test__bundle_6.trk:md5,29434c0aa3bc72c721e8879edd4f0dca" + ] + ] + ], + "versions": [ + "versions.yml:md5,9c98a9977d211116b5e0585b635769c1" + ] + } + ], + "meta": { + "nf-test": "0.8.4", + "nextflow": "24.04.1" + }, + "timestamp": "2024-05-21T14:39:05.238848" + } +} \ No newline at end of file diff --git a/modules/nf-neuro/tractogram/removeinvalid/tests/nextflow.config b/modules/nf-neuro/tractogram/removeinvalid/tests/nextflow.config new file mode 100644 index 0000000..21d1d7d --- /dev/null +++ b/modules/nf-neuro/tractogram/removeinvalid/tests/nextflow.config @@ -0,0 +1,11 @@ +process { + withName: "TRACTOGRAM_REMOVEINVALID" { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + ext.force = true + ext.cut_invalid = true + ext.remove_single_point = true + ext.remove_overlapping_points = true + ext.threshold = 0.5 + ext.no_empty = true + } +} diff --git a/modules/nf-neuro/tractogram/removeinvalid/tests/tags.yml b/modules/nf-neuro/tractogram/removeinvalid/tests/tags.yml new file mode 100644 index 0000000..15a8f31 --- /dev/null +++ b/modules/nf-neuro/tractogram/removeinvalid/tests/tags.yml @@ -0,0 +1,2 @@ +tractogram/removeinvalid: + - "modules/nf-neuro/tractogram/removeinvalid/**" diff --git a/nextflow.config b/nextflow.config index b233b83..6a8d503 100644 --- a/nextflow.config +++ b/nextflow.config @@ -1,155 +1,929 @@ +profiles { + docker { + docker.enabled = true + conda.enabled = false + singularity.enabled = false + podman.enabled = false + shifter.enabled = false + charliecloud.enabled = false + apptainer.enabled = false + docker.runOptions = "-v ${params.templates_dir}:/extractor_flow -u \$(id -u):\$(id -g)" + docker.registry = 'docker.io' + } + + apptainer { + docker.enabled = false + conda.enabled = false + singularity.enabled = false + podman.enabled = false + shifter.enabled = false + charliecloud.enabled = false + apptainer.enabled = true + apptainer.runOptions = "--bind ${params.templates_dir}:/extractor_flow" + } + + fully_reproducible { + params.processes_bet_register_t1 = 1 + } + + extract_bundles { + params.extract_bundles = true + } +} + +manifest { + name = 'scilus/extractor_flow' + description = """extractor_flow is a Nextflow pipeline to filter and extract brain tractograms from a set of subjects.""" + version = '2.0dev' +} + +params { + // Supplied at runtime by the user. + input = null + templates_dir = null + templates_dir = null + + // Optional parameters supplied at runtime by the user. + keep_intermediate_steps=false + orig=false + run_bet=false + quick_registration=true + + // Output paths + mni_space = "mni_space" + orig_space = "orig_space" + output_dir = "./results_extractorflow" + final_outputs_dir = "${params.output_dir}/final_outputs" + intermediate_dir = "${params.output_dir}/intermediates" + + // Template T1 path + template_t1="/human-data/mni_152_sym_09c/t1" + extract_bundles = false + + distance = 0 + + // Number of processes per tasks + processes_major_filtering = 4 + + // Process control + processes = false + + // Templates and ROIs folder + rois_folder = "/extractor_flow/templates_and_ROIs/" + rois_folder_host = "${params.templates_dir}/templates_and_ROIs/" + + // Filtering List Folder + FLF = "/extractor_flow/filtering_lists/filtering_list_" + + // Loop + loop_angle_threshold = 360 + min_streamline_length = 20 + max_streamline_length = 100000 + + // Sides + sides = "L,R" + + cc_homotopic_pairs = "AGWM,CingGWM,CuGWM,FuGWM,Hippo,IFGWM,Ins,IOGWM,ITGWM,LFOGWM,LGWM,MFGWM,MFOGWM,MOGWM,MTGWM,PHG,PoCGWM,PrCGWM,PrCuGWM,RGGWM,SFGWM,SMGWM,SOGWM,SPGWM,STGWM,T_pole_gwm" + + bg_lists = "AGWM,CingGWM,CuGWM,ENT,FuGWM,Hippo,IFGWM,Ins,IOGWM,ITGWM,LFOGWM,LGWM,MFGWM,MFOGWM,MOGWM,MTGWM,PHG,PoCGWM,PrCGWM,PrCuGWM,RGGWM,SFGWM,SMGWM,SOGWM,SPGWM,STGWM,T_pole_gwm" + bg_caud_lists = "AGWM,CingGWM,IFGWM,Ins,ITGWM,LFOGWM,MFGWM,MFOGWM,MTGWM,PoCGWM,PrCGWM,PrCuGWM,RGGWM,SFGWM,SMGWM,SPGWM,STGWM,T_pole_gwm" + + + asso_ventral_lists = "O,P,T" + asso_dorsal_f_o_f_t_lists = "F_O_dorsal,F_T_dorsal" + asso_dorsal_f_p_lists = "F_P_dorsal_IFG,F_P_dorsal_MFG,F_P_dorsal_MiscG,F_P_dorsal_PrCG,F_P_dorsal_SFG" + + asso_p_o_lists = "P_O_PoCG,P_O_SPG,P_O_SMG,P_O_AG,P_O_PrCuG" + asso_p_t_lists = "P_T_PoCG,P_T_SPG,P_T_SMG,P_T_AG,P_T_PrCuG" + asso_o_t_lists = "O_T_STG,O_T_MTG,O_T_ITG,O_T_Tpole,O_T_FuG,O_T_MTL" + asso_ins_lists = "Ins_F,Ins_P,Ins_O,Ins_T," + + asso_frontal_be_lists = "SFG,MFG,IFG,PrCG,FrOrbG" + asso_occipital_be_lists = "MOG,SOG,IOG,CuG,LG" + asso_parietal_be_lists = "PoCG,SPG,AG,SMG,PrCuG" + asso_temporal_be_lists ="STG,MTG,ITG,FuG" + seeds = "JHU_MNI_AGWM_,JHU_MNI_CingGWM_,JHU_MNI_CuGWM_,JHU_MNI_FuGWM_,JHU_MNI_IFGWM_,JHU_MNI_Ins_,JHU_MNI_IOGWM_,JHU_MNI_ITGWM_,JHU_MNI_LFOGWM_,JHU_MNI_LGWM_,JHU_MNI_MFGWM_,JHU_MNI_MFOGWM_,JHU_MNI_MOGWM_,JHU_MNI_MTGWM_,JHU_MNI_PoCGWM_,JHU_MNI_PrCGWM_,JHU_MNI_PrCuGWM_,JHU_MNI_RGGWM_,JHU_MNI_SFGWM_,JHU_MNI_SMGWM_,JHU_MNI_SOGWM_,JHU_MNI_SPGWM_,JHU_MNI_STGWM_,JHU_MNI_T_pole_gwm_,JHU_MNI_Hippo_,JHU_MNI_PHG_" + + // Atlas + atlas.template = "JHU_MNI_SS_T1_brain_182x218x182_conv_f.nii.gz" + atlas.JHU_8 = "JHU_MNI_all_noDWM_all8.nii.gz" + atlas.csf = 'JHU_MNI_CSF.nii.gz' +} + +// cleanup = false +singularity.autoMounts = true +docker.runOptions = "-v ${params.templates_dir}:/extractor_flow" + process { - publishDir = [path: {"./results_extractorflow/intermediate_processes/$sid/$task.process"}, enabled: false] scratch = true errorStrategy = { task.attempt <= 3 ? 'retry' : 'ignore' } - maxRetries = 3 + maxRetries = 0 maxErrors = -1 stageInMode = 'copy' stageOutMode = 'rsync' tag = { "$sid" } afterScript = 'sleep 1' - containerOptions = "--user \$(id -u):\$(id -g)" -} + // containerOptions = "--user \$(id -u):\$(id -g)" -cleanup = true -singularity.autoMounts = true + ////////////////////// + // Preprocessing modules + ////////////////////// -params { - template_space="mni_space" - orig_space="orig_space" - final_output_mni_space = [path: {"./results_extractorflow/final_outputs/$sid/$params.template_space/"}, mode: 'copy'] - final_output_bundles_mni_space = [path: {"./results_extractorflow/final_outputs/$sid/$params.template_space/bundles/"}, mode: 'copy'] - final_output_orig_space = [path: {"./results_extractorflow/final_outputs/$sid/$params.orig_space/"}, mode: 'copy'] - final_output_bundles_orig_space = [path: {"./results_extractorflow/final_outputs/$sid/$params.orig_space/bundles/"}, mode: 'copy'] + withName: "REGISTRATION_ANTS" { + ext.quick = "${params.quick_registration}" - //**Template T1 path**// - template_t1="/human-data/mni_152_sym_09c/t1" - run_bet=false - extended=false - orig=false + publishDir = [ + // path: { "${final_outputs_dir}/${meta.id}/$params.mni_space/" }, + path: { "${params.final_outputs_dir}/${meta.id}/REGISTRATION_ANTS/" }, + mode: 'copy' + ] + } - keep_intermediate_steps=false + withName: "REGISTRATION_TRACTOGRAM" { + ext.inverse = true + ext.reverse_operation = false + ext.suffix = "mni_space" + ext.invalid_streamlines = "remove" + publishDir = [ + // path: {"${params.final_outputs_dir}/${meta.id}/$params.mni_space/"}, + path: {"${params.final_outputs_dir}/${meta.id}/REGISTRATION_TRACTOGRAM/"}, + mode: 'copy' + ] + } - quick_registration=false - registration_script="antsRegistrationSyN.sh" + withName: "MAJOR_FILTERING" { + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/MAJOR_FILTERING/"}, + ] + } - distance=0 + withName: "REGISTER_TRACTOGRAM_ORIG" { + ext.reverse_operation = true + ext.inverse = false + ext.when = params.orig + ext.invalid_management = "keep" + ext.suffix = "orig_space" + ext.prefix = "" + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/orig_space/"}, + mode: 'copy' + ] + } - //**Number of processes per tasks**// - processes_bet_register_t1=4 - processes_major_filtering=4 + withName: "TRACTOGRAM_REMOVEINVALID" { + ext.force = true + ext.remove_single_point = true + ext.cut_invalid = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/TRACTOGRAM_REMOVEINVALID/"}, + mode: 'copy' + ] + } - //**Output directory**// - output_dir=false + ////////////////////// + // Filtering list processes + ////////////////////// + withName: "EXTRACT_FORNIX" { + ext.filtering_list = params.FLF+"fx.txt" + ext.out_extension = "fornix_f" + ext.remaining_extension = "unplausible_streamlines_wo_fornix" + ext.keep = params.keep_intermediate_steps + ext.extract_masks = "" + ext.distance = 1 + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/FORNIX"}, + enabled: params.keep_intermediate_steps + ] + } - rois_folder="/extractor_flow/templates_and_ROIs/" + withName: "EXTRACT_EE_CEREBELLUM" { + ext.filtering_list = params.FLF+"out_cerebellum.txt" + ext.out_extension = "wb_clean01_nocereb" + ext.remaining_extension = "all_cerebellum" + ext.keep = true + ext.extract_masks = "" + ext.distance = params.distance - //**Filtering List Folder**/ - FLF="/extractor_flow/filtering_lists/filtering_list_" + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/EE_CEREBELLUM"}, + enabled: params.keep_intermediate_steps + ] + } - //**Loop**// - loop_angle_threshold=360 + withName: "EXTRACT_EE_BRAINSTEM" { + ext.filtering_list = params.FLF+"out_brainstem.txt" + ext.out_extension = "wb_clean02" + ext.remaining_extension = "all_brainstem" + ext.keep = true + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/EE_BRAINSTEM"}, + enabled: params.keep_intermediate_steps + ] + } - min_streaminline_lenght=20 - max_streaminline_lenght=100000 + withName: "REMOVE_OUT_OF_CGM_DWM" { + ext.filtering_list = params.FLF+"ee_CGM_SWM.txt" + ext.out_extension = "wb_either_CGM_SWM" + ext.remaining_extension = "no_CGM_SWM" + ext.keep = params.keep_intermediate_steps + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/REMOVE_OUT_OF_CGM_DWM"}, + enabled: params.keep_intermediate_steps + ] + } - //**Process control**// - processes = false + withName: "EXTRACT_ALL_COMMISSURAL" { + ext.filtering_list = params.FLF+"commissural.txt" + ext.out_extension = "wb_either_CGM_SWM_noCC" + ext.remaining_extension = "tmp_CC" + ext.keep = true + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/ALL_COMMISSURAL"}, + enabled: params.keep_intermediate_steps + ] + } - //**Sides**// - sides="L,R" + withName: "EXTRACT_PLAUSIBLE_CC_CX" { + ext.filtering_list = params.FLF+"CC_Cx.txt" + ext.out_extension = "in_CC_Cx_f" + ext.remaining_extension = "garbage" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/PLAUSIBLE_CC_CX"}, + enabled: params.keep_intermediate_steps + ] + } - cc_homotopic_pairs="AGWM,CingGWM,CuGWM,FuGWM,Hippo,IFGWM,Ins,IOGWM,ITGWM,LFOGWM,LGWM,MFGWM,MFOGWM,MOGWM,MTGWM,PHG,PoCGWM,PrCGWM,PrCuGWM,RGGWM,SFGWM,SMGWM,SOGWM,SPGWM,STGWM,T_pole_gwm" + withName: "EXTRACT_PLAUSIBLE_AC_CX" { + ext.filtering_list = params.FLF+"AC_Cx.txt" + ext.out_extension = "in_AC_Cx_f" + ext.remaining_extension = "garbage" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/PLAUSIBLE_AC_CX"}, + enabled: params.keep_intermediate_steps + ] + } - bg_lists="AGWM,CingGWM,CuGWM,ENT,FuGWM,Hippo,IFGWM,Ins,IOGWM,ITGWM,LFOGWM,LGWM,MFGWM,MFOGWM,MOGWM,MTGWM,PHG,PoCGWM,PrCGWM,PrCuGWM,RGGWM,SFGWM,SMGWM,SOGWM,SPGWM,STGWM,T_pole_gwm" - bg_caud_lists="AGWM,CingGWM,IFGWM,Ins,ITGWM,LFOGWM,MFGWM,MFOGWM,MTGWM,PoCGWM,PrCGWM,PrCuGWM,RGGWM,SFGWM,SMGWM,SPGWM,STGWM,T_pole_gwm" + withName: "SPLIT_NO_CC_ASSO_AND_BG" { + ext.filtering_list = params.FLF+"all_BG.txt" + ext.out_extension = "all_subcortical_from_CGM_SWM_noCC_f" + ext.remaining_extension = "asso_noBG" + ext.keep = true + ext.extract_masks = "" + ext.distance = 1 + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/SPLIT_NO_CC_ASSO_AND_BG"}, + enabled: params.keep_intermediate_steps + ] + } + withName: "SPLIT_BG_THAL" { + ext.filtering_list = params.FLF+"BG_ipsi_Thal.txt" + ext.out_extension = "BG_ipsi_Thal" + ext.remaining_extension = "garbage_BG_ipsi_Thal" + ext.keep = false + ext.reverse_suffix = true + ext.extract_masks = "" + ext.distance = 1 + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/SPLIT_BG_THAL"}, + enabled: params.keep_intermediate_steps + ] + } - asso_ventral_lists="O,P,T" - asso_dorsal_f_o_f_t_lists="F_O_dorsal,F_T_dorsal" + withName: "SPLIT_BG_PUT" { + ext.filtering_list = params.FLF+"BG_ipsi_Put.txt" + ext.out_extension = "BG_ipsi_Put" + ext.remaining_extension = "garbage_BG_ipsi_Put" + ext.reverse_suffix = true + ext.keep = false + ext.extract_masks = "" + ext.distance = 1 + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/SPLIT_BG_PUT"}, + enabled: params.keep_intermediate_steps + ] + } + withName: "SPLIT_BG_CAUD" { + ext.filtering_list = params.FLF+"BG_ipsi_Caud.txt" + ext.out_extension = "BG_ipsi_Caud" + ext.remaining_extension = "garbage_BG_ipsi_Caud" + ext.reverse_suffix = true + ext.keep = false + ext.extract_masks = "" + ext.distance = 1 + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/SPLIT_BG_CAUD"}, + enabled: params.keep_intermediate_steps + ] + } - asso_dorsal_f_p_lists="F_P_dorsal_IFG,F_P_dorsal_MFG,F_P_dorsal_MiscG,F_P_dorsal_PrCG,F_P_dorsal_SFG" + withName: "REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO" { + ext.filtering_list = params.FLF+"not_in_BG.txt" + ext.out_extension = "asso_all_intra_inter" + ext.remaining_extension = "asso_lost2" + ext.keep = params.keep_intermediate_steps + ext.extract_masks = "" + ext.distance = params.distance + ext.list_suffix = "" + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/SPLIT_BG_CAUD"}, + enabled: params.keep_intermediate_steps + ] + } - asso_p_o_lists="P_O_PoCG,P_O_SPG,P_O_SMG,P_O_AG,P_O_PrCuG" - asso_p_t_lists="P_T_PoCG,P_T_SPG,P_T_SMG,P_T_AG,P_T_PrCuG" - asso_o_t_lists="O_T_STG,O_T_MTG,O_T_ITG,O_T_Tpole,O_T_FuG,O_T_MTL" - asso_ins_lists="Ins_F,Ins_P,Ins_O,Ins_T," + withName: "CC_HOMOTOPIC" { + ext.filtering_list = params.FLF+"CC_homo.txt" + ext.out_extension = "cc_homotopic" + ext.remaining_extension = "garbage" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/CC_HOMOTOPIC"}, + enabled: params.keep_intermediate_steps + ] + } - asso_frontal_be_lists="SFG,MFG,IFG,PrCG,FrOrbG" - asso_occipital_be_lists="MOG,SOG,IOG,CuG,LG" - asso_parietal_be_lists="PoCG,SPG,AG,SMG,PrCuG" - asso_temporal_be_lists="STG,MTG,ITG,FuG" - seeds="JHU_MNI_AGWM_,JHU_MNI_CingGWM_,JHU_MNI_CuGWM_,JHU_MNI_FuGWM_,JHU_MNI_IFGWM_,JHU_MNI_Ins_,JHU_MNI_IOGWM_,JHU_MNI_ITGWM_,JHU_MNI_LFOGWM_,JHU_MNI_LGWM_,JHU_MNI_MFGWM_,JHU_MNI_MFOGWM_,JHU_MNI_MOGWM_,JHU_MNI_MTGWM_,JHU_MNI_PoCGWM_,JHU_MNI_PrCGWM_,JHU_MNI_PrCuGWM_,JHU_MNI_RGGWM_,JHU_MNI_SFGWM_,JHU_MNI_SMGWM_,JHU_MNI_SOGWM_,JHU_MNI_SPGWM_,JHU_MNI_STGWM_,JHU_MNI_T_pole_gwm_,JHU_MNI_Hippo_,JHU_MNI_PHG_" + withName: "ASSO_VENTRAL" { + ext.filtering_list = params.FLF+"ASSO_F.txt" + ext.out_extension = "asso_F" + ext.remaining_extension = "asso_lost" + ext.reverse_suffix = true + ext.mid_suffix = "ventral" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/ASSO_VENTRAL"}, + enabled: params.keep_intermediate_steps + ] + } - //**ATLAS**// - atlas.template="JHU_MNI_SS_T1_brain_182x218x182_conv_f.nii.gz" - atlas.JHU_8="JHU_MNI_all_noDWM_all8.nii.gz" - atlas.csf='JHU_MNI_CSF.nii.gz' -} + withName: "SPLIT_ASSO_VENTRAL_IFOF_UF" { + ext.filtering_list = params.FLF+"split_IFOF_UF.txt" + ext.out_extension = "asso_IFOF_f" + ext.remaining_extension = "asso_UF_f" + ext.keep = true + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/SPLIT_ASSO_VENTRAL_IFOF_UF"}, + enabled: params.keep_intermediate_steps + ] + } -if(params.processes) { - if(params.processes > Runtime.runtime.availableProcessors()) { - throw new RuntimeException("Number of processes higher than available CPUs.") + withName: "ASSO_DORSAL_F_P" { + ext.filtering_list = params.FLF+"ASSO.txt" + ext.reverse_suffix = true // Files are named _${list}_${side}.txt + ext.out_extension = "asso" + ext.remaining_extension = "asso_lost" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/ASSO_DORSAL_F_P"}, + enabled: params.keep_intermediate_steps + ] } - else if(params.processes < 1) { - throw new RuntimeException("When set, number of processes must be >= 1 " + - "and smaller or equal to the number of CPUs.") + + withName: "ASSO_DORSAL_F_O_F_T" { + ext.filtering_list = params.FLF+"ASSO.txt" + ext.reverse_suffix = true // Files are named _${list}_${side}.txt + ext.out_extension = "asso" + ext.remaining_extension = "asso_lost" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/ASSO_DORSAL_F_O_F_T"}, + enabled: params.keep_intermediate_steps + ] } - else { - executor.$local.cpus = params.processes + + withName: "ASSO_P_O" { + ext.filtering_list = params.FLF+"ASSO.txt" + ext.reverse_suffix = true // Files are named _${list}_${side}.txt + ext.out_extension = "asso" + ext.remaining_extension = "asso_lost" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/ASSO_P_O"}, + enabled: params.keep_intermediate_steps + ] + } + + withName: "ASSO_P_T" { + ext.filtering_list = params.FLF+"ASSO.txt" + ext.reverse_suffix = true // Files are named _${list}_${side}.txt + ext.out_extension = "asso" + ext.remaining_extension = "asso_lost" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/ASSO_P_T"}, + enabled: params.keep_intermediate_steps + ] } -} -if(params.output_dir) { - process.publishDir = [path: {"$params.output_dir/intermediate_processes/$sid/$task.process"}, mode: 'copy', enabled: false] - params.final_output_mni_space = [path: {"$params.output_dir/final_outputs/$sid/$params.template_space/"}, mode: 'copy'] - params.final_output_bundle_mni_space = [path: {"$params.output_dir/final_outputs/$sid/$params.template_space/bundles/"}, mode: 'copy'] - params.final_output_orig_space = [path: {"$params.output_dir/final_outputs/$sid/$params.orig_space/"}, mode: 'copy'] - params.final_output_bundles_orig_space = [path: {"$params.output_dir/final_outputs/$sid/$params.orig_space/bundles/"}, mode: 'copy'] -} + withName: "ASSO_O_T" { + ext.filtering_list = params.FLF+"ASSO.txt" + ext.reverse_suffix = true // Files are named _${list}_${side}.txt + ext.out_extension = "asso" + ext.remaining_extension = "asso_lost" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/ASSO_O_T"}, + enabled: params.keep_intermediate_steps + ] + } -if (params.keep_intermediate_steps) { - cleanup = false - if (params.output_dir) { - process.publishDir = {"$params.output_dir/intermediate_processes/$sid/$task.process"} - params.final_output_mni_space = {"$params.output_dir/final_outputs/$sid/$params.template_space/"} - params.final_output_bundles_mni_space = {"$params.output_dir/final_outputs/$sid/$params.template_space/bundles/"} - params.final_output_orig_space = {"$params.output_dir/final_outputs/$sid/$params.orig_space/"} - params.final_output_bundles_orig_space = {"$params.output_dir/final_outputs/$sid/$params.orig_space/bundles/"} - } - else{ - process.publishDir = {"./results_extractorflow/intermediate_processes/$sid/$task.process"} - params.final_output_mni_space = {"./results_extractorflow/final_outputs/$sid/$params.template_space/"} - params.final_output_bundles_mni_space = {"./results_extractorflow/final_outputs/$sid/$params.template_space/bundles/"} - params.final_output_orig_space = {"./results_extractorflow/final_outputs/$sid/$params.orig_space/"} - params.final_output_bundles_orig_space = {"./results_extractorflow/final_outputs/$sid/$params.orig_space/bundles/"} + withName: "ASSO_INS" { + ext.filtering_list = params.FLF+"ASSO.txt" + ext.reverse_suffix = true // Files are named _${list}_${side}.txt + ext.out_extension = "asso" + ext.remaining_extension = "asso_lost" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/ASSO_INS"}, + enabled: params.keep_intermediate_steps + ] } -} -else{ - if(params.output_dir) { - process.publishDir = [path: {"$params.output_dir/intermediate_processes/$sid/$task.process"}, enabled: false] - params.final_output_mni_space = [path: {"$params.output_dir/final_outputs/$sid/$params.template_space/"}, mode: 'copy'] - params.final_output_bundles_mni_space = [path: {"$params.output_dir/final_outputs/$sid/$params.template_space/bundles"}, mode: 'copy'] - params.final_output_orig_space = [path: {"$params.output_dir/final_outputs/$sid/$params.orig_space/"}, mode: 'copy'] + + withName: "ASSO_CING" { + ext.filtering_list = params.FLF+"ASSO_Cing.txt" + ext.reverse_suffix = true // Files are named _${list}_${side}.txt + ext.out_extension = "asso_all_Cing" + ext.remaining_extension = "asso_lost_Cing" + ext.keep = false + ext.extract_masks = "" + ext.distance = 1 + publishDir = [ + path: {"${params.intermediate_dir}/${meta.id}/EXTRACT/ASSO_CING"}, + enabled: params.keep_intermediate_steps + ] } -} -if(params.quick_registration) -{ - params.registration_script="antsRegistrationSyNQuick.sh" -} + ////////////////////// + // Merge processes + ////////////////////// + withName: "MERGE_BG_THAL" { + ext.op = "union" + ext.out_name = "BG_ipsi_Thal_all" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true + } -profiles { - fully_reproducible { - params.processes_bet_register_t1=1 + withName: "MERGE_BG_PUT" { + ext.op = "union" + ext.out_name = "BG_ipsi_Put_all" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_BG_CAUD" { + ext.op = "union" + ext.out_name = "BG_ipsi_Caud_all" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true } - macos { - process.scratch="/tmp" + withName: "MERGE_CC_HOMOTOPIC" { + ext.op = "union" + ext.out_name = "CC_homo" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true } - extract_bundles { - params.extended=true + withName: "MERGE_ASSO_VENTRAL" { + ext.op = "union" + ext.out_name = "asso_all_ventral_f" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_ASSO_DORSAL_F_P" { + ext.op = "union" + ext.out_name = "asso_F_P_dorsal_f" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_ASSO_DORSAL" { + ext.op = "union" + ext.out_name = "asso_all_dorsal_f" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_P_O" { + ext.op = "union" + ext.out_name = "asso_all_P_O_f" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_P_T" { + ext.op = "union" + ext.out_name = "asso_all_P_T_f" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_O_T" { + ext.op = "union" + ext.out_name = "asso_all_O_T_f" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_INS" { + ext.op = "union" + ext.out_name = "asso_all_Ins_f" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_ASSO_BE_FRONTAL_GYRUS" { + ext.op = "union" + ext.out_name = "asso_all_intraF_be_f" + ext.out_suffix = "_u" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_ASSO_EE_FRONTAL_GYRUS" { + ext.op = "union" + ext.out_name = "asso_all_intraF_ee_f" + ext.out_suffix = "_u" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_ASSO_BE_OCCIPITAL_GYRUS" { + ext.op = "union" + ext.out_name = "asso_all_intraO_be_f" + ext.out_suffix = "_u" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_ASSO_EE_OCCIPITAL_GYRUS" { + ext.op = "union" + ext.out_name = "asso_all_intraO_ee_f" + ext.out_suffix = "_u" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_ASSO_BE_PARIETAL_GYRUS" { + ext.op = "union" + ext.out_name = "asso_all_intraP_be_f" + ext.out_suffix = "_u" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_ASSO_EE_PARIETAL_GYRUS" { + ext.op = "union" + ext.out_name = "asso_all_intraP_ee_f" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_ASSO_BE_TEMPORAL_GYRUS" { + ext.op = "union" + ext.out_name = "asso_all_intraT_be_f" + ext.out_suffix = "_u" + ext.save_empty = true + ext.force = true + } + + withName: "MERGE_ASSO_EE_TEMPORAL_GYRUS" { + ext.op = "union" + ext.out_name = "asso_all_intraT_ee_f" + ext.out_suffix = "" + ext.save_empty = true + ext.force = true + } + + ////////////////////// + // Final outputs + ////////////////////// + + withName: "TRK_PLAUSIBLE" { + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/mni_space/"}, + mode: 'copy' + ] + } + + withName: "TRK_UNPLAUSIBLE" { + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/mni_space/"}, + mode: 'copy' + ] + } + + withName: "COPY_T1_TO_ORIG" { + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/orig_space/"}, + mode: 'copy' + ] + } + + + ////////////////////// + // Bundle extraction + ////////////////////// + + withName: "RENAME_CC_HOMO_FRONTAL" { + ext.op = "union" + ext.out_name = "cc_homotopic_frontal" + ext.out_suffix = "_" + params.mni_space + ext.save_empty = true + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_CC_HOMO_OCCIPITAL" { + ext.op = "union" + ext.out_name = "cc_homotopic_occipital" + ext.out_suffix = "_" + params.mni_space + ext.save_empty = true + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_CC_HOMO_TEMPORAL" { + ext.op = "union" + ext.out_name = "cc_homotopic_temporal" + ext.out_suffix = "_" + params.mni_space + ext.save_empty = true + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_CC_HOMO_PARIETAL" { + ext.op = "union" + ext.out_name = "cc_homotopic_parietal" + ext.out_suffix = "_" + params.mni_space + ext.save_empty = true + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_CC_HOMO_INSULAR" { + ext.out_name = "cc_homotopic_insular" + ext.out_suffix = "_" + params.mni_space + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_CC_HOMO_CINGULUM" { + ext.out_name = "cc_homotopic_cingulum" + ext.out_suffix = "_" + params.mni_space + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_CORTICO_STRIATE" { + ext.op = "union" + ext.out_name = "corticostriatal" + ext.out_suffix = "_" + params.mni_space + ext.save_empty = true + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_CORONARADIATA" { + ext.op = "union" + ext.out_name = "coronaradiata" + ext.out_suffix = "_" + params.mni_space + ext.save_empty = true + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_OPTICAL_RADIATION" { + ext.op = "union" + ext.out_name = "optical_radiation" + ext.out_suffix = "_" + params.mni_space + ext.save_empty = true + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_USHAPE" { + ext.out_name = "ushape" + ext.out_suffix = "_" + params.mni_space + ext.force = false + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_CING" { + ext.out_name = "cing" + ext.out_suffix = "_" + params.mni_space + ext.force = false + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_SLF" { + ext.op = "union" + ext.out_name = "slf" + ext.out_suffix = "_" + params.mni_space + ext.save_empty = true + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_AF" { + ext.out_name = "af" + ext.out_suffix = "_" + params.mni_space + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_CORTICOPONTINE_F" { + ext.filtering_list = params.FLF+"frontal.txt" + ext.reverse_suffix = true // Files are named _${list}_${side}.txt + ext.out_extension = "corticopontine_frontal_${params.mni_space}" + ext.remaining_extension = "lost" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_CORTICOPONTINE_POT" { + ext.filtering_list = params.FLF+"parieto_temporo_occipital.txt" + ext.reverse_suffix = true // Files are named _${list}_${side}.txt + ext.out_extension = "corticopontine_POT_${params.mni_space}" + ext.remaining_extension = "lost" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_CST" { + ext.filtering_list = params.FLF+"fronto_parietal.txt" + ext.reverse_suffix = true // Files are named _${list}_${side}.txt + ext.out_extension = "cst_${params.mni_space}" + ext.remaining_extension = "lost" + ext.keep = false + ext.extract_masks = "" + ext.distance = params.distance + + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_FORNIX" { + ext.out_name = "fornix" + ext.out_suffix = "_" + params.mni_space + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_IFOF" { + ext.out_name = "ifof" + ext.out_suffix = "_" + params.mni_space + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_UF" { + ext.out_name = "uf" + ext.out_suffix = "_" + params.mni_space + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_ILF" { + ext.out_name = "ilf" + ext.out_suffix = "_" + params.mni_space + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_BRAINSTEM" { + ext.out_name = "brainstem" + ext.out_suffix = "_" + params.mni_space + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_CEREBELLUM" { + ext.out_name = "cerebellum" + ext.out_suffix = "_" + params.mni_space + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] + } + + withName: "RENAME_ACCX" { + ext.out_name = "accx" + ext.out_suffix = "_" + params.mni_space + ext.force = true + publishDir = [ + path: {"${params.final_outputs_dir}/${meta.id}/bundles_mni/"}, + mode: 'copy' + ] } } diff --git a/nf-test.config b/nf-test.config new file mode 100644 index 0000000..12db2c1 --- /dev/null +++ b/nf-test.config @@ -0,0 +1,18 @@ +config { + // location for all nf-tests + testsDir "." + + // nf-test directory including temporary files for each test + workDir System.getenv("NFT_WORKDIR") ?: "tests/.runs" + + // run all test with the defined docker profile from the main nextflow.config + profile "docker,devcontainer" + + ignore ".venv/**/*.nf*" + + plugins { + repository "https://raw.githubusercontent.com/scilus/nf-neuro/main/tests/plugins.json" + + load "nft-nifti@0.0.1" + } +} \ No newline at end of file diff --git a/subworkflows/local/extension.nf b/subworkflows/local/extension.nf new file mode 100644 index 0000000..8bf7f44 --- /dev/null +++ b/subworkflows/local/extension.nf @@ -0,0 +1,131 @@ +include { FILTER_LIST_EACH as RENAME_CORTICOPONTINE_F } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_EACH as RENAME_CORTICOPONTINE_POT } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_EACH as RENAME_CST } from '../../modules/local/filtering/filter_with_list.nf' + +include { TRACTOGRAM_MATH as RENAME_CC_HOMO_FRONTAL } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as RENAME_CC_HOMO_OCCIPITAL } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as RENAME_CC_HOMO_TEMPORAL } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as RENAME_CC_HOMO_PARIETAL } from '../../modules/local/merge/main.nf' + +include { TRACTOGRAM_MATH as RENAME_CORTICO_STRIATE } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as RENAME_CORONARADIATA } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as RENAME_OPTICAL_RADIATION } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as RENAME_SLF } from '../../modules/local/merge/main.nf' + +include { COPY_FILE as RENAME_CC_HOMO_INSULAR } from '../../modules/local/utils/copy_file.nf' +include { COPY_FILE as RENAME_CC_HOMO_CINGULUM } from '../../modules/local/utils/copy_file.nf' +include { COPY_FILE as RENAME_USHAPE } from '../../modules/local/utils/copy_file.nf' +include { COPY_FILE as RENAME_CING } from '../../modules/local/utils/copy_file.nf' +include { COPY_FILE as RENAME_AF } from '../../modules/local/utils/copy_file.nf' +include { COPY_FILE as RENAME_FORNIX } from '../../modules/local/utils/copy_file.nf' +include { COPY_FILE as RENAME_IFOF } from '../../modules/local/utils/copy_file.nf' +include { COPY_FILE as RENAME_UF } from '../../modules/local/utils/copy_file.nf' +include { COPY_FILE as RENAME_ILF } from '../../modules/local/utils/copy_file.nf' +include { COPY_FILE as RENAME_BRAINSTEM } from '../../modules/local/utils/copy_file.nf' +include { COPY_FILE as RENAME_CEREBELLUM } from '../../modules/local/utils/copy_file.nf' +include { COPY_FILE as RENAME_ACCX } from '../../modules/local/utils/copy_file.nf' + +workflow EXTRACT_BUNDLES { + take: + trks + sides + + main: + empty_lists = Channel.from([""]) + + /* RENAME CC CC_Homotopic */ + RENAME_CC_HOMO_FRONTAL(trks.key_CC_Homotopic_frontal_for_rename.map { sid, _list, tractograms -> [sid, [], tractograms]}) + RENAME_CC_HOMO_OCCIPITAL(trks.key_CC_Homotopic_occipital_for_rename.map { sid, _list, tractograms -> [sid, [], tractograms]}) + RENAME_CC_HOMO_TEMPORAL(trks.key_CC_Homotopic_temporal_for_rename.map { sid, _list, tractograms -> [sid, [], tractograms]}) + RENAME_CC_HOMO_PARIETAL(trks.key_CC_Homotopic_parietal_for_rename.map { sid, _list, tractograms -> [sid, [], tractograms]}) + RENAME_CC_HOMO_INSULAR(trks.key_CC_Homotopic_insular_for_rename.map { sid, _list, tractograms -> [sid, [], tractograms]}) + RENAME_CC_HOMO_CINGULUM(trks.key_CC_Homotopic_cingulum_for_rename.map { sid, _list, tractograms -> [sid, [], tractograms]}) + + /* RENAME CORTICO_STRIATE */ + corticostriate_for_rename = trks.key_BG_ipsi_Caud_for_rename.concat(trks.key_BG_ipsi_Put_for_rename).groupTuple(by:[0,1]) + RENAME_CORTICO_STRIATE(corticostriate_for_rename) + + /* RENAME Corona radiata */ + RENAME_CORONARADIATA(trks.key_BG_ipsi_Thal_for_rename) + + /* RENAME OPTICAL RADIATION */ + RENAME_OPTICAL_RADIATION(trks.key_optic_radiation_for_rename.map { sid, side, _list, tractograms -> [sid, side, tractograms] }) + + /* RENAME U-SHAPE */ + RENAME_USHAPE(trks.key_asso_u_shape_for_rename) + + /* RENAME CINGULUM */ + RENAME_CING(trks.key_Cing_for_rename) + + /* RENAME SLF */ + slf_for_rename = trks.key_asso_all_intra_inter_dorsal_all_f_O_for_rename.concat(trks.key_asso_all_intra_inter_dorsal_f_p_for_rename).groupTuple(by:[0,1]) + .map { sid, side, _list, tractogram -> [sid, side, tractogram] } + RENAME_SLF(slf_for_rename) + + /* RENAME AF */ + af_for_rename = trks.key_asso_all_intra_inter_dorsal_all_f_T_for_rename + .map { sid, side, _asso_list, tractogram -> [sid, side, tractogram] } + RENAME_AF(af_for_rename) + + /* RENAME Cortico-pontine_F */ + RENAME_CORTICOPONTINE_F(trks.key_brainstem_corticopontine_frontal_for_rename, empty_lists, sides) + + /* RENAME cortico-pontine_POT */ + RENAME_CORTICOPONTINE_POT(trks.key_brainstem_ee_corticopontine_parietotemporooccipital_for_rename, empty_lists, sides) + + /* RENAME Pyramidal tract (CST) */ + RENAME_CST(trks.key_brainstem_pyramidal_for_rename, empty_lists, sides) + + /* RENAME fornix */ + fornix_with_list = trks.key_fornix_for_rename.map { sid, tractogram -> [sid, "", tractogram] } + RENAME_FORNIX(fornix_with_list) + + /* RENAME IFOF */ + RENAME_IFOF(trks.key_asso_IFOF_for_rename) + + /* RENAME UF */ + RENAME_UF(trks.key_asso_UF_for_rename) + + /* RENAME ILF */ + RENAME_ILF(trks.key_all_O_T_for_rename) + + /* RENAME BRAINSTEM */ + brainstem_with_list = trks.key_brainstem_for_rename.map { sid, tractogram -> [sid, "", tractogram] } + RENAME_BRAINSTEM(brainstem_with_list) + + /* RENAME CEREBELLUM */ + cerebellum_with_list = trks.key_cerebellum_for_rename.map { sid, tractogram -> [sid, "", tractogram] } + RENAME_CEREBELLUM(cerebellum_with_list) + + /* RENAME AC_CX */ + accx_with_list = trks.key_accx_for_rename.map { sid, tractogram -> [sid, "", tractogram] } + RENAME_ACCX(accx_with_list) + + bundles_to_register = RENAME_CC_HOMO_FRONTAL.out.tractogram + .concat(RENAME_CC_HOMO_OCCIPITAL.out.tractogram) + .concat(RENAME_CC_HOMO_TEMPORAL.out.tractogram) + .concat(RENAME_CC_HOMO_PARIETAL.out.tractogram) + .concat(RENAME_CC_HOMO_INSULAR.out.output_file) + .concat(RENAME_CC_HOMO_CINGULUM.out.output_file) + .concat(RENAME_CORTICO_STRIATE.out.tractogram) + .concat(RENAME_CORONARADIATA.out.tractogram) + .concat(RENAME_OPTICAL_RADIATION.out.tractogram) + .concat(RENAME_USHAPE.out.output_file) + .concat(RENAME_CING.out.output_file) + .concat(RENAME_SLF.out.tractogram) + .concat(RENAME_AF.out.output_file) + .concat(RENAME_CORTICOPONTINE_F.out.extracted) + .concat(RENAME_CORTICOPONTINE_POT.out.extracted) + .concat(RENAME_CST.out.extracted) + .concat(RENAME_FORNIX.out.output_file) + .concat(RENAME_IFOF.out.output_file) + .concat(RENAME_UF.out.output_file) + .concat(RENAME_ILF.out.output_file) + .concat(RENAME_BRAINSTEM.out.output_file) + .concat(RENAME_CEREBELLUM.out.output_file) + .concat(RENAME_ACCX.out.output_file) + .concat(trks.key_plausible_commissural) + + emit: + bundles = bundles_to_register +} diff --git a/subworkflows/local/extraction.nf b/subworkflows/local/extraction.nf new file mode 100644 index 0000000..51ab3e1 --- /dev/null +++ b/subworkflows/local/extraction.nf @@ -0,0 +1,955 @@ +include { FILTER_LIST as EXTRACT_FORNIX } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST as EXTRACT_EE_CEREBELLUM } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST as EXTRACT_EE_BRAINSTEM } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST as REMOVE_OUT_OF_CGM_DWM } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST as EXTRACT_ALL_COMMISSURAL } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST as EXTRACT_PLAUSIBLE_CC_CX } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST as EXTRACT_PLAUSIBLE_AC_CX } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST as SPLIT_NO_CC_ASSO_AND_BG } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_EACH as SPLIT_BG_THAL } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_EACH as SPLIT_BG_PUT } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_EACH as SPLIT_BG_CAUD } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_SIDE as REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_EACH as CC_HOMOTOPIC } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_SIDE as ASSO_VENTRAL } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_SIDE as SPLIT_ASSO_VENTRAL_IFOF_UF } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_SIDE as ASSO_DORSAL_F_P } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_SIDE as ASSO_DORSAL_F_O_F_T } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_SIDE as ASSO_P_O } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_SIDE as ASSO_P_T } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_SIDE as ASSO_O_T } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_SIDE as ASSO_INS } from '../../modules/local/filtering/filter_with_list.nf' +include { FILTER_LIST_SIDE as ASSO_CING } from '../../modules/local/filtering/filter_with_list.nf' + +include { TRACTOGRAM_MATH as MERGE_BG_THAL } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_BG_PUT } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_BG_CAUD } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_CC_HOMOTOPIC } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_ASSO_VENTRAL } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_ASSO_DORSAL_F_P } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_ASSO_DORSAL } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_P_O } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_P_T } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_O_T } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_INS } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_ASSO_BE_FRONTAL_GYRUS } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_ASSO_EE_FRONTAL_GYRUS } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_ASSO_BE_OCCIPITAL_GYRUS } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_ASSO_EE_OCCIPITAL_GYRUS } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_ASSO_BE_PARIETAL_GYRUS } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_ASSO_EE_PARIETAL_GYRUS } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_ASSO_BE_TEMPORAL_GYRUS } from '../../modules/local/merge/main.nf' +include { TRACTOGRAM_MATH as MERGE_ASSO_EE_TEMPORAL_GYRUS } from '../../modules/local/merge/main.nf' + +include { EXTRACT_BUNDLES } from './extension.nf' + +workflow EXTRACT { + take: + unplausible + wb + sides + mni_tractograms + + main: + // Init channels with empty values as a default value for "each" + empty_lists = Channel.from([""]) + empty_sides = Channel.from([""]) + + /* + Fornix + */ + EXTRACT_FORNIX(unplausible) + + /* + Cerebellum + */ + EXTRACT_EE_CEREBELLUM(wb) + EXTRACT_PLAUSIBLE_CEREBELLUM(EXTRACT_EE_CEREBELLUM.out.remaining) + + /* + Brainstem + */ + EXTRACT_EE_BRAINSTEM(EXTRACT_EE_CEREBELLUM.out.extracted) + EXTRACT_PLAUSIBLE_BRAINSTEM(EXTRACT_EE_BRAINSTEM.out.remaining) + + /* + Brain - Either end in CGM SWM + */ + REMOVE_OUT_OF_CGM_DWM(EXTRACT_EE_BRAINSTEM.out.extracted) + EXTRACT_ALL_COMMISSURAL(REMOVE_OUT_OF_CGM_DWM.out.extracted) + EXTRACT_PLAUSIBLE_CC_CX(EXTRACT_ALL_COMMISSURAL.out.remaining) + EXTRACT_PLAUSIBLE_AC_CX(EXTRACT_ALL_COMMISSURAL.out.remaining) + EXTRACT_PLAUSIBLE_CC_BG(EXTRACT_ALL_COMMISSURAL.out.remaining) + + /* + Split not CC in asso BG and not BG + */ + SPLIT_NO_CC_ASSO_AND_BG(EXTRACT_ALL_COMMISSURAL.out.extracted) + + + bg_list = Channel.from(params.bg_lists?.tokenize(',')) + /* + BG THAL + */ + SPLIT_BG_THAL(SPLIT_NO_CC_ASSO_AND_BG.out.extracted, bg_list, sides) + bg_ipsi_thal_for_rename = SPLIT_BG_THAL.out.extracted_with_side.groupTuple(by:[0,1]) + cugwm_for_combine = SPLIT_BG_THAL.out.extracted_with_side_list.filter{it[2]=='CuGWM'} + lgwm_for_combine = SPLIT_BG_THAL.out.extracted_with_side_list.filter{it[2]=='LGWM'} + optic_radiation_for_rename = cugwm_for_combine.concat(lgwm_for_combine).groupTuple(by:[0,1]) + bg_ipsi_thal_list_for_merge = SPLIT_BG_THAL.out.extracted.groupTuple() + .map { sid, tractograms -> [sid, [], tractograms] } + MERGE_BG_THAL(bg_ipsi_thal_list_for_merge) + + /* + BG PUT + */ + SPLIT_BG_PUT(SPLIT_NO_CC_ASSO_AND_BG.out.extracted, bg_list, sides) + bg_ipsi_put_list_for_merge = SPLIT_BG_PUT.out.extracted.groupTuple() + .map { sid, tractograms -> [sid, [], tractograms] } + MERGE_BG_PUT(bg_ipsi_put_list_for_merge) + + /* + BG CAUD + */ + bg_caud_list = params.bg_caud_lists?.tokenize(',') + SPLIT_BG_CAUD(SPLIT_NO_CC_ASSO_AND_BG.out.extracted, bg_caud_list, sides) + bg_ipsi_caud_list_for_merge = SPLIT_BG_CAUD.out.extracted.groupTuple() + .map { sid, tractograms -> [sid, [], tractograms] } + MERGE_BG_CAUD(bg_ipsi_caud_list_for_merge) + + SPLIT_ASSO_IN_HEMI(SPLIT_NO_CC_ASSO_AND_BG.out.remaining, empty_lists, sides) + + /* + Extracting U-shaped and streamlines restricted to Cortical GM and removing them from asso + */ + SPLIT_USHAPE_CGM_ASSO(SPLIT_ASSO_IN_HEMI.out.asso_for_extract_u_shape) + + /* + Extracting unplausible long-range association streamlines passing through subcortical structures (Cd, Put, GP, Thal, Amyg) + */ + REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO(SPLIT_USHAPE_CGM_ASSO.out.asso_for_remove_long_range, empty_lists) + + asso_all_intra_inter = REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side + asso_all_intra_inter_list = asso_all_intra_inter.groupTuple().map{it.flatten().toList()} + assoCGM_list = SPLIT_USHAPE_CGM_ASSO.out.assoCGM.groupTuple().map{it.flatten().toList()} + + /* + CC Homotopic + */ + + cc_homotopic_pairs = params.cc_homotopic_pairs?.tokenize(',') + CC_HOMOTOPIC(EXTRACT_PLAUSIBLE_CC_CX.out.extracted, cc_homotopic_pairs, empty_sides) + + + /* + Filter + Concat frontal + */ + CC_IFGWM_for_combine_frontal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='IFGWM'} + CC_SFGWM_for_combine_frontal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='SFGWM'} + CC_MFGWM_for_combine_frontal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='MFGWM'} + CC_MFOGWM_for_combine_frontal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='MFOGWM'} + CC_LFOGWM_for_combine_frontal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='LFOGWM'} + CC_PrCGWM_for_combine_frontal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='PrCGWM'} + CC_RGGWM_for_combine_frontal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='RGGWM'} + CC_Homotopic_frontal_for_rename = CC_IFGWM_for_combine_frontal.concat(CC_SFGWM_for_combine_frontal).concat(CC_MFGWM_for_combine_frontal).concat(CC_MFOGWM_for_combine_frontal).concat(CC_LFOGWM_for_combine_frontal).concat(CC_PrCGWM_for_combine_frontal).concat(CC_RGGWM_for_combine_frontal).groupTuple(by:0).map{ it } + + + /* + Filter + Concat occipital + */ + CC_SOGWM_for_combine_occipital = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='SOGWM'} + CC_MOGWM_for_combine_occipital = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='MOGWM'} + CC_IOGWM_for_combine_occipital = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='IOGWM'} + CC_CuGWM_for_combine_occipital = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='CuGWM'} + CC_LGWM_for_combine_occipital = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='LGWM'} + + CC_Homotopic_occipital_for_rename = CC_SOGWM_for_combine_occipital.concat(CC_MOGWM_for_combine_occipital).concat(CC_IOGWM_for_combine_occipital).concat(CC_CuGWM_for_combine_occipital).concat(CC_LGWM_for_combine_occipital).groupTuple(by:0) + + /* + Filter + Concat temporal + */ + CC_STGWM_for_combine_temporal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='STGWM'} + CC_T_pole_gwm_for_combine_temporal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='T_pole_gwm'} + CC_MTGWM_for_combine_temporal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='MTGWM'} + CC_ITGWM_for_combine_temporal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='ITGWM'} + CC_PHG_for_combine_temporal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='PHG'} + CC_Hippo_for_combine_temporal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='Hippo'} + CC_FuGWM_for_combine_temporal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='FuGWM'} + + CC_Homotopic_temporal_for_rename = CC_STGWM_for_combine_temporal.concat(CC_T_pole_gwm_for_combine_temporal).concat(CC_MTGWM_for_combine_temporal).concat(CC_ITGWM_for_combine_temporal).concat(CC_PHG_for_combine_temporal).concat(CC_Hippo_for_combine_temporal).concat(CC_FuGWM_for_combine_temporal).groupTuple(by:0) + + /* + Filter + Concat parietal + */ + CC_SPGWM_for_combine_parietal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='SPGWM'} + CC_SMGWM_for_combine_parietal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='SMGWM'} + CC_PrCuGWM_for_combine_parietal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='PrCuGWM'} + CC_PoCGWM_for_combine_parietal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='PoCGWM'} + CC_AGWM_for_combine_parietal = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='AGWM'} + CC_Homotopic_parietal_for_rename = CC_SPGWM_for_combine_parietal.concat(CC_SMGWM_for_combine_parietal).concat(CC_PrCuGWM_for_combine_parietal).concat(CC_PoCGWM_for_combine_parietal).concat(CC_AGWM_for_combine_parietal).groupTuple(by:0) + + + /* + Filter CC Cingulum + */ + CC_Homotopic_cingulum_for_rename = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='CingGWM'} + + /* + Filter CC Ins + */ + CC_Homotopic_insular_for_rename = CC_HOMOTOPIC.out.extracted_with_list.filter{it[1]=='Ins'} + + + /* + MERGE CC_Homotopic + */ + CC_Homotopic_list_for_merge = CC_HOMOTOPIC.out.extracted.groupTuple() + .map { sid, tractograms -> [sid, [], tractograms] } + MERGE_CC_HOMOTOPIC(CC_Homotopic_list_for_merge) + + /* + COMMISSURAL + */ + + all_cc_for_commissural = EXTRACT_ALL_COMMISSURAL.out.remaining.join(EXTRACT_PLAUSIBLE_AC_CX.out.extracted).join(EXTRACT_PLAUSIBLE_CC_BG.out.plausible).join(MERGE_CC_HOMOTOPIC.out.tractogram) + CC_ALL_COMMISSURAL(all_cc_for_commissural) + + /* + ASSO VENTRAL + */ + + asso_ventral_lists = params.asso_ventral_lists?.tokenize(',') + ASSO_VENTRAL(REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side, asso_ventral_lists) + + asso_all_intra_inter_ventral_all_for_merge = ASSO_VENTRAL.out.extracted_with_side.groupTuple(by:[0,1]).map{it.flatten().toList()} + .map { sid, side, t1, t2, t3 -> [sid, side, [t1, t2, t3]] } + MERGE_ASSO_VENTRAL(asso_all_intra_inter_ventral_all_for_merge) + + SPLIT_ASSO_VENTRAL_IFOF_UF(MERGE_ASSO_VENTRAL.out.tractogram_with_side, empty_lists) + + /* + ASSO DORSAL + */ + + asso_dorsal_f_p_lists = params.asso_dorsal_f_p_lists?.tokenize(',') + ASSO_DORSAL_F_P(REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side, asso_dorsal_f_p_lists) + asso_all_intra_inter_dorsal_f_p_list_for_merge = ASSO_DORSAL_F_P.out.extracted_with_side.groupTuple(by:[0,1]).map{it} + MERGE_ASSO_DORSAL_F_P(asso_all_intra_inter_dorsal_f_p_list_for_merge) + + asso_dorsal_f_o_f_t_list=params.asso_dorsal_f_o_f_t_lists?.tokenize(',') + ASSO_DORSAL_F_O_F_T(REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side, asso_dorsal_f_o_f_t_list) + asso_all_intra_inter_dorsal_all_f_T_for_rename = ASSO_DORSAL_F_O_F_T.out.extracted_with_side_list.filter{it[2]=='F_T_dorsal'} + asso_all_intra_inter_dorsal_all_f_O_for_rename = ASSO_DORSAL_F_O_F_T.out.extracted_with_side_list.filter{it[2]=='F_O_dorsal'} + + asso_all_intra_inter_dorsal_all_for_merge = MERGE_ASSO_DORSAL_F_P.out.tractogram_with_side.groupTuple(by:[0,1]).join(ASSO_DORSAL_F_O_F_T.out.extracted_with_side.groupTuple(by:[0,1]), by:[0,1]).map{it.flatten().toList()} + .map { sid, side, t1, t2, t3 -> [sid, side, [t1, t2, t3]] } + MERGE_ASSO_DORSAL(asso_all_intra_inter_dorsal_all_for_merge) + + /* + ASSO P_O + */ + + asso_p_o_list = params.asso_p_o_lists?.tokenize(',') + ASSO_P_O(REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side, asso_p_o_list) + + asso_intra_inter_p_o_list_for_merge = ASSO_P_O.out.extracted_with_side.groupTuple(by:[0,1]).map{it} + MERGE_P_O(asso_intra_inter_p_o_list_for_merge) + + /* + ASSO P_T + */ + + asso_p_t_list = params.asso_p_t_lists?.tokenize(',') + ASSO_P_T(REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side, asso_p_t_list) + + asso_intra_inter_p_t_list_for_merge = ASSO_P_T.out.extracted_with_side.groupTuple(by:[0,1]).map{it} + MERGE_P_T(asso_intra_inter_p_t_list_for_merge) + + /* + ASSO O_T + */ + + asso_o_t_list = params.asso_o_t_lists?.tokenize(',') + ASSO_O_T(REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side, asso_o_t_list) + + asso_intra_inter_o_t_list_for_merge = ASSO_O_T.out.extracted_with_side.groupTuple(by:[0,1]).map{it} + MERGE_O_T(asso_intra_inter_o_t_list_for_merge) + + /* + ASSO Ins + */ + + asso_ins_list = params.asso_ins_lists?.tokenize(',') + ASSO_INS(REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side, asso_ins_list) + + asso_intra_inter_ins_list_for_merge = ASSO_INS.out.extracted_with_side.groupTuple(by:[0,1]).map{it} + MERGE_INS(asso_intra_inter_ins_list_for_merge) + + /* + ASSO CING + */ + ASSO_CING(REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side, empty_lists) + + /* + BE ASSO FRONTAL: extracting all streamlines with both ends in a frontal gyrus (U-shape > 20 mm) + */ + + asso_frontal_be_list=params.asso_frontal_be_lists?.tokenize(',') + ASSO_BE_FRONTAL_GYRUS(REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side, asso_frontal_be_list) + + asso_frontal_be_list_for_merge = ASSO_BE_FRONTAL_GYRUS.out.extracted_with_side.groupTuple(by:[0,1]) + .map { sid, side, _gyrus, tractograms -> [sid, side, tractograms]} + MERGE_ASSO_BE_FRONTAL_GYRUS(asso_frontal_be_list_for_merge) + + /* + EE ASSO FRONTAL: extracting all streamlines with either ends in a frontal gyrus (U-shape > 20 mm) + */ + + asso_frontal_ee_list = Channel.from(['SFG_MFG', 70], + ['SFG_IFG', 70], + ['SFG_PrCG', 90], + ['SFG_FrOrbG', 70], + ['MFG_IFG', 70], + ['MFG_PrCG', 110], + ['MFG_FrOrbG', 60], + ['IFG_PrCG', 110], + ['IFG_FrOrbG', 60]) + asso_frontal_ee_for_extract = REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side.combine(asso_frontal_ee_list) + ASSO_EE_FRONTAL_GYRUS(asso_frontal_ee_for_extract) + + asso_frontal_ee_list_for_merge = ASSO_EE_FRONTAL_GYRUS.out.extracted_with_side.groupTuple(by:[0,1]) + .map { sid, side, _gyrus, tractograms -> [sid, side, tractograms]} + MERGE_ASSO_EE_FRONTAL_GYRUS(asso_frontal_ee_list_for_merge) + + /* + BE ASSO OCCIPITAL: extracting all streamlines with both ends in a occipital gyrus (U-shape > 20 mm) + */ + + asso_occipital_be_list = params.asso_occipital_be_lists?.tokenize(',') + ASSO_BE_OCCIPITAL_GYRUS(REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side, asso_occipital_be_list) + + asso_occipital_be_list_for_merge = ASSO_BE_OCCIPITAL_GYRUS.out.extracted_with_side.groupTuple(by:[0,1]) + .map { sid, side, _gyrus, tractograms -> [sid, side, tractograms]} + MERGE_ASSO_BE_OCCIPITAL_GYRUS(asso_occipital_be_list_for_merge) + + /* + EE ASSO OCCIPITAL: extracting all streamlines with either ends in a occipital gyrus (U-shape > 20 mm) + */ + + asso_occipital_ee_list = Channel.from(['MOG_SOG', 60],['MOG_IOG', 50], ['MOG_CuG', 60], ['SOG_CuG', 30], ['CuG_LG', 60]) + asso_occipital_ee_for_extract = REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side.combine(asso_occipital_ee_list) + ASSO_EE_OCCIPITAL_GYRUS(asso_occipital_ee_for_extract) + + asso_occipital_ee_list_for_merge = ASSO_EE_OCCIPITAL_GYRUS.out.extracted_with_side.groupTuple(by:[0,1]) + .map { sid, side, _gyrus, tractograms -> [sid, side, tractograms]} + MERGE_ASSO_EE_OCCIPITAL_GYRUS(asso_occipital_ee_list_for_merge) + + /* + BE ASSO PARIETAL: extracting all streamlines with both ends in a parietal gyrus (U-shape > 20 mm) + */ + + asso_parietal_be_list = params.asso_parietal_be_lists?.tokenize(',') + ASSO_BE_PARIETAL_GYRUS(REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side, asso_parietal_be_list) + + asso_parietal_be_list_for_merge = ASSO_BE_PARIETAL_GYRUS.out.extracted_with_side.groupTuple(by:[0,1]) + .map { sid, side, _gyrus, tractograms -> [sid, side, tractograms]} + MERGE_ASSO_BE_PARIETAL_GYRUS(asso_parietal_be_list_for_merge) + + /* + EE ASSO PARIETAL: extracting all streamlines with either ends in a parietal gyrus (U-shape > 20 mm) + */ + + asso_parietal_ee_list = Channel.from(['SPG_PoCG', 50], ['SPG_AG', 80], ['SPG_SMG', 70], ['SPG_PrCuG', 50], ['AG_PoCG', 10000], ['AG_SMG', 90], ['AG_PrCuG', 90] , ['SMG_PoCG', 60], ['SMG_PrCuG',100], ['PoCG_PrCuG', 80]) + asso_parietal_ee_for_extract = REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side.combine(asso_parietal_ee_list) + ASSO_EE_PARIETAL_GYRUS(asso_parietal_ee_for_extract) + + asso_parietal_ee_list_for_merge = ASSO_EE_PARIETAL_GYRUS.out.extracted_with_side.groupTuple(by:[0,1]) + .map { sid, side, _gyrus, tractograms -> [sid, side, tractograms]} + MERGE_ASSO_EE_PARIETAL_GYRUS(asso_parietal_ee_list_for_merge) + + /* + BE ASSO TEMPORAL: extracting all streamlines with both ends in a temporal gyrus and merge (U-shape > 20 mm) + */ + asso_temporal_be_list = params.asso_temporal_be_lists?.tokenize(',') + ASSO_BE_TEMPORAL_GYRUS(REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side, asso_temporal_be_list) + + asso_temporal_be_list_for_merge = ASSO_BE_TEMPORAL_GYRUS.out.extracted_with_side.groupTuple(by:[0,1]) + .map { sid, side, _gyrus, tractograms -> [sid, side, tractograms]} + MERGE_ASSO_BE_TEMPORAL_GYRUS(asso_temporal_be_list_for_merge) + + /* + EE ASSO TEMPORAL: extracting all streamlines with either ends in a temporal gyrus and merge (U-shape > 20 mm) + */ + + asso_temporal_ee_list = Channel.from(['STG_MTG', 60], ['STG_ITG',80], ['STG_Tpole',110], ['MTG_ITG',60], ['MTG_Tpole', 100000], ['ITG_Tpole', 60]) + asso_temporal_ee_for_extract = REMOVE_UNPLAUSIBLE_LONG_RANGE_ASSO.out.extracted_with_side.combine(asso_temporal_ee_list) + ASSO_EE_TEMPORAL_GYRUS(asso_temporal_ee_for_extract) + + asso_temporal_ee_list_for_merge = ASSO_EE_TEMPORAL_GYRUS.out.extracted_with_side.groupTuple(by:[0,1]) + .map { sid, side, _gyrus, tractograms -> [sid, side, tractograms]} + MERGE_ASSO_EE_TEMPORAL_GYRUS(asso_temporal_ee_list_for_merge) + + /* + Extracting plausible streamlines + */ + merge_trk_plausible = EXTRACT_FORNIX.out.extracted.concat( + EXTRACT_PLAUSIBLE_CEREBELLUM.out.plausible, + EXTRACT_PLAUSIBLE_BRAINSTEM.out.brainstem_for_trk_plausible, + EXTRACT_PLAUSIBLE_AC_CX.out.extracted, + EXTRACT_PLAUSIBLE_CC_BG.out.plausible, + MERGE_BG_THAL.out.tractogram, + MERGE_BG_PUT.out.tractogram, + MERGE_BG_CAUD.out.tractogram, + SPLIT_USHAPE_CGM_ASSO.out.asso_u_shape_for_trk_plausible, + MERGE_CC_HOMOTOPIC.out.tractogram, + MERGE_ASSO_DORSAL.out.tractogram, + MERGE_ASSO_VENTRAL.out.tractogram, + MERGE_P_O.out.tractogram, + MERGE_P_T.out.tractogram, + MERGE_O_T.out.tractogram, + MERGE_INS.out.tractogram, + ASSO_CING.out.extracted, + MERGE_ASSO_BE_FRONTAL_GYRUS.out.tractogram, + MERGE_ASSO_EE_FRONTAL_GYRUS.out.tractogram, + MERGE_ASSO_BE_OCCIPITAL_GYRUS.out.tractogram, + MERGE_ASSO_EE_OCCIPITAL_GYRUS.out.tractogram, + MERGE_ASSO_BE_PARIETAL_GYRUS.out.tractogram, + MERGE_ASSO_EE_PARIETAL_GYRUS.out.tractogram, + MERGE_ASSO_BE_TEMPORAL_GYRUS.out.tractogram, + MERGE_ASSO_EE_TEMPORAL_GYRUS.out.tractogram + ).groupTuple(by: 0) + + TRK_PLAUSIBLE(merge_trk_plausible) + + /* + Extracting unplausible streamlines + */ + for_trk_unplausible = mni_tractograms.join(TRK_PLAUSIBLE.out.tractogram) + TRK_UNPLAUSIBLE(for_trk_unplausible) + + /* Pack up for bundle extraction */ + for_bundle_extraction = [ + key_CC_Homotopic_frontal_for_rename: CC_Homotopic_frontal_for_rename, + key_CC_Homotopic_occipital_for_rename: CC_Homotopic_occipital_for_rename, + key_CC_Homotopic_temporal_for_rename: CC_Homotopic_temporal_for_rename, + key_CC_Homotopic_parietal_for_rename: CC_Homotopic_parietal_for_rename, + key_CC_Homotopic_insular_for_rename: CC_Homotopic_insular_for_rename, + key_CC_Homotopic_cingulum_for_rename: CC_Homotopic_cingulum_for_rename, + key_BG_ipsi_Caud_for_rename: SPLIT_BG_CAUD.out.extracted_with_side, + key_BG_ipsi_Put_for_rename: SPLIT_BG_PUT.out.extracted_with_side, + key_BG_ipsi_Thal_for_rename: bg_ipsi_thal_for_rename, + key_optic_radiation_for_rename: optic_radiation_for_rename, + key_asso_u_shape_for_rename: SPLIT_USHAPE_CGM_ASSO.out.asso_u_shape_for_rename, + key_Cing_for_rename: ASSO_CING.out.extracted_with_side, + key_asso_all_intra_inter_dorsal_all_f_O_for_rename: asso_all_intra_inter_dorsal_all_f_O_for_rename, + key_asso_all_intra_inter_dorsal_f_p_for_rename: ASSO_DORSAL_F_P.out.extracted_with_side_list, + key_asso_all_intra_inter_dorsal_all_f_T_for_rename: asso_all_intra_inter_dorsal_all_f_T_for_rename, + key_brainstem_corticopontine_frontal_for_rename: EXTRACT_PLAUSIBLE_BRAINSTEM.out.brainstem_corticopontine_frontal_for_rename, + key_brainstem_ee_corticopontine_parietotemporooccipital_for_rename: EXTRACT_PLAUSIBLE_BRAINSTEM.out.brainstem_ee_corticopontine_parietotemporooccipital_for_rename, + key_brainstem_pyramidal_for_rename: EXTRACT_PLAUSIBLE_BRAINSTEM.out.brainstem_pyramidal_for_rename, + key_fornix_for_rename: EXTRACT_FORNIX.out.extracted, + key_asso_IFOF_for_rename: SPLIT_ASSO_VENTRAL_IFOF_UF.out.extracted_with_side, + key_asso_UF_for_rename: SPLIT_ASSO_VENTRAL_IFOF_UF.out.remaining_with_side, + key_all_O_T_for_rename: MERGE_O_T.out.tractogram_with_side, + key_brainstem_for_rename: EXTRACT_PLAUSIBLE_BRAINSTEM.out.brainstem_for_trk_plausible, + key_cerebellum_for_rename: EXTRACT_PLAUSIBLE_CEREBELLUM.out.plausible, + key_accx_for_rename: EXTRACT_PLAUSIBLE_AC_CX.out.extracted, + key_plausible_commissural: CC_ALL_COMMISSURAL.out.plausible + ] + + // TODO: Maybe move the following in the main.nf. + // However, it is problematic to do so with how + // nextflow seems to be handling the channels when + // emitting values. Needs more investigation. + + extracted_bundles = Channel.empty() + if (params.extract_bundles) { + EXTRACT_BUNDLES(for_bundle_extraction, sides) + extracted_bundles = EXTRACT_BUNDLES.out.bundles + } + + emit: + plausible = TRK_PLAUSIBLE.out.tractogram + unplausible = TRK_UNPLAUSIBLE.out.tractogram + bundles = extracted_bundles +} + +process EXTRACT_PLAUSIBLE_CEREBELLUM { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), path(tractogram) // from ee_cerebellum_for_extract_plausible + + output: + tuple val(meta), path("${meta.id}__all_cerebellum_plausibles.trk"), emit: plausible + path "${meta.id}__all_in_cerebellum_nocx_nocerebwm.trk" + path "${meta.id}__all_in_cerebellum_in_Medulla.trk" + path "${meta.id}__all_in_cerebellum_in_Pons.trk" + path "${meta.id}__all_in_cerebellum_in_Midbrain.trk" + path "${meta.id}__all_in_cerebellum_in_redN_and_Thal.trk" + + script: + """ + scil_tractogram_filter_by_roi ${tractogram} ${meta.id}__tmp_in_cerebellum.trk\ + --filtering_list ${params.FLF}in_cerebellum.txt -f + scil_tractogram_filter_by_roi ${meta.id}__tmp_in_cerebellum.trk ${meta.id}__all_in_cerebellum_nocx_nocerebwm.trk\ + --filtering_list ${params.FLF}cerebellum_nocx_in_cereb.txt -f + scil_tractogram_filter_by_roi ${meta.id}__tmp_in_cerebellum.trk ${meta.id}__all_in_cerebellum_in_Medulla.trk\ + --filtering_list ${params.FLF}cerebellum_in_medulla.txt -f + scil_tractogram_filter_by_roi ${meta.id}__tmp_in_cerebellum.trk ${meta.id}__all_in_cerebellum_in_Pons.trk\ + --filtering_list ${params.FLF}cerebellum_in_pons.txt -f + scil_tractogram_filter_by_roi ${meta.id}__tmp_in_cerebellum.trk ${meta.id}__all_in_cerebellum_in_Midbrain.trk\ + --filtering_list ${params.FLF}cerebellum_in_midbrain.txt -f + scil_tractogram_filter_by_roi ${meta.id}__tmp_in_cerebellum.trk ${meta.id}__all_in_cerebellum_in_redN_and_Thal.trk\ + --filtering_list ${params.FLF}cerebellum_in_rednucleus_and_thalamus.txt -f + scil_tractogram_math union ${meta.id}__all_in_*.trk ${meta.id}__all_cerebellum_plausibles.trk --save_empty -f + """ +} + +process EXTRACT_PLAUSIBLE_BRAINSTEM { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), path(tractogram) // from all_brainstem_for_extract_plausible + output: + tuple val(meta), path("${meta.id}__all_brainstem_plausibles.trk"), emit: brainstem_for_trk_plausible + path "${meta.id}__all_brainstem_unplausibles.trk", optional: true + path "${meta.id}__be_midbrain.trk" + path "${meta.id}__be_medulla.trk" + path "${meta.id}__be_pons.trk" + path "${meta.id}__ee_thalamus.trk" + path "${meta.id}__ee_red_nucleus.trk" + tuple val(meta), path("${meta.id}__ee_fronto_pontine.trk"), emit: brainstem_corticopontine_frontal_for_rename + tuple val(meta), path("${meta.id}__ee_parietotemporooccipital_pontine.trk"), emit: brainstem_ee_corticopontine_parietotemporooccipital_for_rename + tuple val(meta), path("${meta.id}__ee_pyramidal.trk"), emit: brainstem_pyramidal_for_rename + path "${meta.id}__ee_cortico_tectal.trk" + + script: + """ + # Extract be midbrain + scil_tractogram_filter_by_roi ${meta.id}__all_brainstem.trk ${meta.id}__be_midbrain.trk\ + --filtering_list ${params.FLF}brainstem_be_midbrain.txt -f + # Extract be medulla + scil_tractogram_filter_by_roi ${meta.id}__all_brainstem.trk ${meta.id}__be_medulla.trk\ + --filtering_list ${params.FLF}brainstem_be_medulla.txt -f + # Extract be pons + scil_tractogram_filter_by_roi ${meta.id}__all_brainstem.trk ${meta.id}__be_pons.trk\ + --filtering_list ${params.FLF}brainstem_be_pons.txt -f + + # Extract ee thalamus + scil_tractogram_filter_by_roi ${meta.id}__all_brainstem.trk ${meta.id}__ee_thalamus.trk\ + --filtering_list ${params.FLF}brainstem_ee_thalamus.txt -f + # Extract ee red_nucleus + scil_tractogram_filter_by_roi ${meta.id}__all_brainstem.trk ${meta.id}__ee_red_nucleus.trk\ + --filtering_list ${params.FLF}brainstem_ee_red_nucleus.txt -f + + # Prepartion for fronto-pontine, parietotemporooccipito-pontine, pyramidal, cortico-tectal + scil_tractogram_filter_by_roi ${meta.id}__all_brainstem.trk ${meta.id}__ee_tmp_01.trk\ + --filtering_list ${params.FLF}brainstem_ee_tmp_01.txt -f + scil_tractogram_filter_by_roi ${meta.id}__all_brainstem.trk ${meta.id}__ee_tmp_02.trk\ + --filtering_list ${params.FLF}brainstem_ee_tmp_02.txt -f + + scil_tractogram_math union ${meta.id}__ee_tmp_01.trk ${meta.id}__ee_tmp_02.trk\ + ${meta.id}__ee_tmp_03.trk --save_empty -f + + # Extract ee Fronto-pontine R and L + scil_tractogram_filter_by_roi ${meta.id}__ee_tmp_03.trk ${meta.id}__ee_fronto_pontine_R.trk\ + --filtering_list ${params.FLF}brainstem_ee_F_pontine_R.txt -f + scil_tractogram_filter_by_roi ${meta.id}__ee_tmp_03.trk ${meta.id}__ee_fronto_pontine_L.trk\ + --filtering_list ${params.FLF}brainstem_ee_F_pontine_L.txt -f + scil_tractogram_math union ${meta.id}__ee_fronto_pontine_L.trk ${meta.id}__ee_fronto_pontine_R.trk\ + ${meta.id}__ee_fronto_pontine.trk --save_empty -f + + # Extract ee ParietoTemporooccipital pontine R and L + scil_tractogram_filter_by_roi ${meta.id}__ee_tmp_03.trk ${meta.id}__ee_parietotemporooccipital_pontine_R.trk\ + --filtering_list ${params.FLF}brainstem_ee_PTO_pontine_R.txt -f + scil_tractogram_filter_by_roi ${meta.id}__ee_tmp_03.trk ${meta.id}__ee_parietotemporooccipital_pontine_L.trk\ + --filtering_list ${params.FLF}brainstem_ee_PTO_pontine_L.txt -f + scil_tractogram_math union ${meta.id}__ee_parietotemporooccipital_pontine_L.trk ${meta.id}__ee_parietotemporooccipital_pontine_R.trk\ + ${meta.id}__ee_parietotemporooccipital_pontine.trk --save_empty -f + + # Extract ee Pyramidal + scil_tractogram_filter_by_roi ${meta.id}__ee_tmp_03.trk ${meta.id}__ee_pyramidal.trk\ + --filtering_list ${params.FLF}brainstem_ee_pyramidal.txt -f + + # Extract ee Tectal + scil_tractogram_filter_by_roi ${meta.id}__ee_tmp_03.trk ${meta.id}__ee_cortico_tectal.trk\ + --filtering_list ${params.FLF}brainstem_ee_cortico_tectal.txt -f + scil_tractogram_filter_by_length ${meta.id}__ee_cortico_tectal.trk ${meta.id}__ee_cortico_tectal.trk --maxL 100 -f + + rm -f ${meta.id}__*tmp_*.trk + + scil_tractogram_math union ${meta.id}__be_*.trk ${meta.id}__ee_*.trk ${meta.id}__all_brainstem_plausibles.trk --save_empty -f + + if ${params.keep_intermediate_steps} + then + scil_tractogram_math difference ${meta.id}__all_brainstem.trk ${meta.id}__all_brainstem_plausibles.trk ${meta.id}__all_brainstem_unplausibles.trk --save_empty -f + fi + """ +} + +process EXTRACT_PLAUSIBLE_CC_BG { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), path(tractogram) // from cc_for_extract_CC_BG + + output: + tuple val(meta), path("${meta.id}__in_CC_BG_f.trk"), emit: plausible // into ccbg_for_trk_plausible, ccbg_for_commissural + path "${meta.id}__in_CC_BG_f.txt" + + script: + """ + scil_tractogram_filter_by_roi ${tractogram} tmp.trk \ + --filtering_list ${params.FLF}CC_BG.txt -f\ + --overwrite_distance both_ends include 1\ + --overwrite_distance either_end include 1 + + scil_tractogram_filter_by_length tmp.trk\ + ${meta.id}__in_CC_BG_f.trk\ + --maxL 170 + + scil_tractogram_count_streamlines ${meta.id}__in_CC_BG_f.trk > ${meta.id}__in_CC_BG_f.txt + """ +} + +process SPLIT_ASSO_IN_HEMI { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), path(tractogram) // from asso_noBG_for_split_hemi + each list + each side // from sides + + output: + tuple val(meta), val(side), path("${meta.id}__asso_${side}.trk"), emit: asso_for_extract_u_shape + path "${meta.id}__asso_${side}.txt", optional: true + + script: + """ + scil_tractogram_filter_by_roi ${tractogram} ${meta.id}__asso_L.trk\ + --filtering_list ${params.FLF}asso_L.txt -f + scil_tractogram_filter_by_roi ${tractogram} ${meta.id}__asso_R.trk\ + --filtering_list ${params.FLF}asso_R.txt -f + """ +} + +process SPLIT_USHAPE_CGM_ASSO { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), val(side), path(tractogram) // from asso_for_extract_u_shape + + output: + tuple val(meta), val(side), path("${meta.id}__asso_only_in_CGM_${side}.trk"), emit: assoCGM + tuple val(meta), val(side), path("${meta.id}__asso_Ushape_${side}.trk"), emit: assoUShape + tuple val(meta), path("${meta.id}__asso_Ushape_${side}_u.trk"), emit: asso_u_shape_for_trk_plausible + tuple val(meta), val(side), path("${meta.id}__asso_Ushape_${side}_u.trk"), emit: asso_u_shape_for_rename + + tuple val(meta), val(side), path("${meta.id}__asso_f_${side}.trk"), emit: asso_for_remove_long_range + path "${meta.id}__asso_only_in_CGM_${side}.txt", optional: true + path "${meta.id}__asso_Ushape_${side}.txt", optional: true + path "${meta.id}__asso_f_${side}.txt", optional: true + + script: + """ + scil_tractogram_filter_by_roi ${tractogram} ${meta.id}__tmp1_${side}.trk \ + --filtering_list ${params.FLF}all_in_CGM_${side}.txt -f + + scil_tractogram_math difference ${tractogram} ${meta.id}__tmp1_${side}.trk \ + ${meta.id}__asso_SWM_${side}.trk --save_empty -f + + scil_tractogram_filter_by_roi ${meta.id}__tmp1_${side}.trk ${meta.id}__asso_only_in_CGM_${side}.trk \ + --filtering_list ${params.FLF}not_in_SWM_${side}.txt -f + + scil_tractogram_math difference ${meta.id}__tmp1_${side}.trk ${meta.id}__asso_only_in_CGM_${side}.trk \ + ${meta.id}__tmp2_${side}.trk --save_empty -f + + scil_tractogram_filter_by_roi ${meta.id}__tmp2_${side}.trk ${meta.id}__asso_Ushape_${side}.trk \ + --filtering_list ${params.FLF}not_in_DWM_${side}.txt -f + + scil_tractogram_extract_ushape ${meta.id}__asso_Ushape_${side}.trk --minU 0.5 --maxU 1 ${meta.id}__asso_Ushape_${side}_u.trk -f + + scil_tractogram_math difference ${meta.id}__tmp2_${side}.trk ${meta.id}__asso_Ushape_${side}.trk \ + ${meta.id}__asso_DWM_${side}.trk --save_empty -f + + scil_tractogram_math union ${meta.id}__asso_DWM_${side}.trk ${meta.id}__asso_SWM_${side}.trk ${meta.id}__asso_f_${side}.trk --save_empty -f + + if ${params.keep_intermediate_steps} + then + scil_tractogram_count_streamlines ${meta.id}__asso_only_in_CGM_${side}.trk > ${meta.id}__asso_only_in_CGM_${side}.txt + scil_tractogram_count_streamlines ${meta.id}__asso_Ushape_${side}.trk > ${meta.id}__asso_Ushape_${side}.txt + scil_tractogram_count_streamlines ${meta.id}__asso_f_${side}.trk > ${meta.id}__asso_f_${side}.txt + fi + """ +} + +process CC_ALL_COMMISSURAL { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), path(tmp_cc), path(accx), path(ccbg), path(cc_homo) // from all_cc_for_commissural + + output: + tuple val(meta), path("${meta.id}__plausible_commissural_${params.mni_space}.trk"), emit: plausible + path "${meta.id}__unplausible_commissural.trk", optional: true + + script: + """ + scil_tractogram_math union ${accx} ${ccbg} ${cc_homo} ${meta.id}__plausible_commissural_${params.mni_space}.trk --save_empty -f + + if ${params.keep_intermediate_steps} + then + scil_tractogram_math difference ${tmp_cc} ${meta.id}__plausible_commissural_${params.mni_space}.trk ${meta.id}__unplausible_commissural.trk --save_empty -f + fi + """ +} + +process ASSO_BE_FRONTAL_GYRUS { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), val(side), path(tractogram) // from asso_all_intra_inter_for_be_frontal_filtering + each gyrus // from asso_frontal_be_list + + output: + tuple val(meta), val(side), val(gyrus), path("${meta.id}_asso_intra_be_frontal_${gyrus}_${side}_u.trk"), emit: extracted_with_side + + script: + """ + scil_tractogram_filter_by_roi ${tractogram} tmp.trk\ + --filtering_list ${params.FLF}ASSO_be_${gyrus}_${side}.txt -f + scil_tractogram_extract_ushape tmp.trk --minU 0.5 --maxU 1\ + ${meta.id}_asso_intra_be_frontal_${gyrus}_${side}_u.trk -f + """ +} + +process ASSO_EE_FRONTAL_GYRUS { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), val(side), path(tractogram), val(gyrus), val(max_length) // from asso_frontal_ee_for_extract + + output: + tuple val(meta), val(side), val(gyrus), path("${meta.id}_asso_intra_ee_frontal_${gyrus}_${side}.trk"), emit: extracted_with_side //into asso_frontal_ee_for_merge + + script: + """ + scil_tractogram_filter_by_roi ${tractogram} tmp_01.trk\ + --filtering_list ${params.FLF}ASSO_ee_${gyrus}_${side}.txt -f + scil_tractogram_filter_by_length tmp_01.trk tmp_02.trk\ + --maxL ${max_length} -f + scil_tractogram_extract_ushape tmp_02.trk\ + --minU 0.5\ + --maxU 1\ + ${meta.id}_asso_intra_ee_frontal_${gyrus}_${side}.trk -f + """ +} + +process ASSO_BE_OCCIPITAL_GYRUS { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), val(side), path(tractogram) // from asso_all_intra_inter_for_be_occipital_filtering + each gyrus // from asso_occipital_be_list + + output: + tuple val(meta), val(side), val(gyrus), path("${meta.id}_asso_intra_be_occipital_${gyrus}_${side}_u.trk"), emit: extracted_with_side // into asso_occipital_be_for_merge + + script: + """ + scil_tractogram_filter_by_roi ${tractogram} tmp.trk \ + --filtering_list ${params.FLF}ASSO_be_${gyrus}_${side}.txt -f + scil_tractogram_extract_ushape tmp.trk\ + --minU 0.5\ + --maxU 1\ + ${meta.id}_asso_intra_be_occipital_${gyrus}_${side}_u.trk -f + """ +} + +process ASSO_EE_OCCIPITAL_GYRUS { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), val(side), path(tractogram), val(gyrus), val(max_length) // from asso_occipital_ee_for_extract + + output: + tuple val(meta), val(side), val(gyrus), path("${meta.id}_asso_intra_ee_occipital_${gyrus}_${side}.trk"), emit: extracted_with_side // into asso_occipital_ee_for_merge + + script: + """ + scil_tractogram_filter_by_roi ${tractogram} tmp_01.trk\ + --filtering_list ${params.FLF}ASSO_ee_${gyrus}_${side}.txt -f + scil_tractogram_filter_by_length tmp_01.trk tmp_02.trk\ + --maxL ${max_length} -f + scil_tractogram_extract_ushape tmp_02.trk\ + --minU 0.5\ + --maxU 1\ + ${meta.id}_asso_intra_ee_occipital_${gyrus}_${side}.trk -f + """ +} + +process ASSO_BE_PARIETAL_GYRUS { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), val(side), path(tractogram) // from asso_all_intra_inter_for_be_parietal_filtering + each gyrus // from asso_parietal_be_list + + output: + tuple val(meta), val(side), val(gyrus), path("${meta.id}_asso_intra_be_parietal_${gyrus}_${side}_u.trk"), emit: extracted_with_side // into asso_parietal_be_for_merge + + script: + """ + scil_tractogram_filter_by_roi ${tractogram} tmp.trk\ + --filtering_list ${params.FLF}ASSO_be_${gyrus}_${side}.txt -f + scil_tractogram_extract_ushape tmp.trk\ + --minU 0.5\ + --maxU 1\ + ${meta.id}_asso_intra_be_parietal_${gyrus}_${side}_u.trk -f + """ +} + +process ASSO_EE_PARIETAL_GYRUS { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), val(side), path(tractogram), val(gyrus), val(max_length) // from asso_parietal_ee_for_extract + + output: + tuple val(meta), val(side), val(gyrus), path("${meta.id}_asso_intra_ee_parietal_${gyrus}_${side}.trk"), emit: extracted_with_side //into asso_parietal_ee_for_merge + + script: + """ + scil_tractogram_filter_by_roi ${tractogram} tmp_01.trk\ + --filtering_list ${params.FLF}ASSO_ee_${gyrus}_${side}.txt -f + scil_tractogram_filter_by_length tmp_01.trk tmp_02.trk\ + --maxL ${max_length} -f + scil_tractogram_extract_ushape tmp_02.trk\ + --minU 0.5\ + --maxU 1\ + ${meta.id}_asso_intra_ee_parietal_${gyrus}_${side}.trk -f + """ +} + +process ASSO_BE_TEMPORAL_GYRUS { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), val(side), path(tractogram) // from asso_all_intra_inter_for_be_temporal_filtering + each gyrus // from asso_temporal_be_list + + output: + tuple val(meta), val(side), val(gyrus), path("${meta.id}_asso_intra_be_temporal_${gyrus}_${side}_u.trk"), emit: extracted_with_side // into asso_temporal_be_for_merge + + script: + """ + scil_tractogram_filter_by_roi ${tractogram} tmp.trk\ + --filtering_list ${params.FLF}ASSO_be_${gyrus}_${side}.txt -f + scil_tractogram_extract_ushape tmp.trk\ + --minU 0.5\ + --maxU 1\ + ${meta.id}_asso_intra_be_temporal_${gyrus}_${side}_u.trk -f + """ +} + +process ASSO_EE_TEMPORAL_GYRUS { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), val(side), path(tractogram), val(gyrus), val(max_length) // from asso_temporal_ee_for_extract + + output: + tuple val(meta), val(side), val(gyrus), path("${meta.id}_asso_intra_ee_temporal_${gyrus}_${side}.trk"), emit: extracted_with_side // into asso_temporal_ee_for_merge + + script: + """ + scil_tractogram_filter_by_roi ${tractogram} tmp_01.trk\ + --filtering_list ${params.FLF}ASSO_ee_${gyrus}_${side}.txt -f + scil_tractogram_filter_by_length tmp_01.trk tmp_02.trk\ + --maxL ${max_length} -f + scil_tractogram_extract_ushape tmp_02.trk\ + --minU 0.5\ + --maxU 1\ + ${meta.id}_asso_intra_ee_temporal_${gyrus}_${side}.trk -f + """ +} + +process TRK_PLAUSIBLE { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), path(tractogram) // from merge_trk_plausible + + output: + tuple val(meta), path("${meta.id}__plausible_${params.mni_space}.trk"), emit: tractogram + + script: + """ + scil_tractogram_math union ${tractogram} ${meta.id}__plausible_${params.mni_space}_tmp.trk --save_empty -f --no_metadata + scil_tractogram_shuffle ${meta.id}__plausible_${params.mni_space}_tmp.trk ${meta.id}__plausible_${params.mni_space}.trk -f + """ +} + +process TRK_UNPLAUSIBLE { + tag "$meta.id" + cpus 1 + + container 'scilus/scilpy:dev' + + input: + tuple val(meta), path(trk01), path(trk02) // from for_trk_unplausible + output: + tuple val(meta), path("${meta.id}__unplausible_${params.mni_space}.trk"), emit: tractogram + + script: + """ + scil_tractogram_math difference ${trk01} ${trk02} ${meta.id}__unplausible_${params.mni_space}.trk --save_empty -f + """ +} + diff --git a/subworkflows/local/transform.nf b/subworkflows/local/transform.nf new file mode 100644 index 0000000..a4457cb --- /dev/null +++ b/subworkflows/local/transform.nf @@ -0,0 +1,91 @@ +include { REGISTRATION_ANTS } from '../../modules/nf-neuro/registration/ants/main' +include { BETCROP_ANTSBET } from '../../modules/nf-neuro/betcrop/antsbet/main' +include { TRACTOGRAM_REMOVEINVALID } from '../../modules/nf-neuro/tractogram/removeinvalid/main.nf' + +// TODO: Replace the following processes with the NF-Neuro module REGISTRATION_TRACTOGRAM +include { REGISTRATION_TRACTOGRAM } from '../../modules/nf-neuro/registration/tractogram/main.nf' +include { COPY_FILE as COPY_T1_ATLAS } from '../../modules/local/utils/copy_file.nf' + +// For tractograms that have a T1w, we assume they are not in the MNI space: +// - We register the T1w to the template space and transform the tractograms to the template space. +workflow TRANSFORM_TO_MNI { + take: + in_tractogram + t1s + + main: + + // SECTION A.1: For the subjects that have a T1w, + // we bet & register the T1w and the tractograms to the template space. + + t1s_to_bet = Channel.empty() + if (params.run_bet) { + // takes: + // sid, t1, template, tissues_probabilities, mask, initial_affine + BETCROP_ANTSBET(t1s_to_bet) + t1s_to_bet = BETCROP_ANTSBET.out.t1.join(BETCROP_ANTSBET.out.mask) + } + else { + beted_t1s = t1s.map { sid, t1 -> [sid, t1, []]} + } + + // Add the T1 template + template_t1 = Channel.fromPath("${params.rois_folder_host}${params.atlas.template}") + t1s_for_registration = beted_t1s + .combine(template_t1) // Add the template T1 + .map { sid, t1, mask, template -> [sid, template, t1, mask] } + REGISTRATION_ANTS(t1s_for_registration) + + // Transform the tractograms + inv_transformation = REGISTRATION_ANTS.out.affine // *__output0InverseAffine.mat + deformation = REGISTRATION_ANTS.out.inverse_warp // *__output1InverseWarp.nii.gz + + transformation_for_trk_registration = in_tractogram + .combine(template_t1) + .join(inv_transformation) + .join(deformation) + .map { sid, tractogram, template, transfo, deform -> [sid, template, transfo, tractogram, [], deform] } + + // takes: + // sid, anat, transfo, tractogram, ref, deformation + REGISTRATION_TRACTOGRAM(transformation_for_trk_registration) + + // Provide the transformation and T1 in case we want to transform to orig space later on + transformations_for_orig = Channel.empty() + if (params.orig) { + // (sid, transfo, inv_deformation, deformation) + transformations_for_orig = REGISTRATION_ANTS.out.affine + .join(REGISTRATION_ANTS.out.warp) + } + + emit: + tractograms = REGISTRATION_TRACTOGRAM.out.tractogram + transformations_for_orig = transformations_for_orig +} + +// For tractograms that DO NOT have a T1w, we assume they are in the MNI space: +// - We remove the invalid streamlines and copy the template T1w to the subject folder. +workflow CLEAN_IF_FROM_MNI { + take: + in_tractogram + t1s + + main: + // We get (sid, tractogram, {t1 || null}) + tractograms_to_clean = in_tractogram.join(t1s, remainder: true) + // Keep the tractograms that do not have a T1w + tractograms_to_clean = tractograms_to_clean.filter { it[2] == null } + // Only keep (sid, tractogram) + tractograms_to_clean = tractograms_to_clean.map{ sid, trk, _null_t1 -> [sid, trk] } + + template_t1 = Channel.fromPath("${params.rois_folder_host}${params.atlas.template}") + to_copy_atlas = tractograms_to_clean.combine(template_t1) + .map{ sid, _trk, t1 -> [sid, [], t1]} + COPY_T1_ATLAS(to_copy_atlas) + + TRACTOGRAM_REMOVEINVALID(tractograms_to_clean) + + + emit: + cleaned_mni_tractograms = TRACTOGRAM_REMOVEINVALID.out.tractograms +} \ No newline at end of file diff --git a/subworkflows/nf-neuro/load_test_data/main.nf b/subworkflows/nf-neuro/load_test_data/main.nf new file mode 100644 index 0000000..ed9f153 --- /dev/null +++ b/subworkflows/nf-neuro/load_test_data/main.nf @@ -0,0 +1,201 @@ + + +def locate_local_cache () { + // Find cache location for test archives, in order of preference: + // 1. Using environment variable $NFNEURO_TEST_DATA_HOME + // 2. Using environment variable $XDG_DATA_HOME + // 3. Using default location $HOME/.local/share + // + // Location selected is appended with 'nf-neuro-test-archives'. + // If the location does not exist, it is created. + + def storage = file( + System.getenv('NFNEURO_TEST_DATA_HOME') ?: + System.getenv('XDG_DATA_HOME') ?: + "${System.getenv('HOME')}/.local/share" + ) + def cache_location = file("$storage/nf-neuro-test-archives") + + if ( !cache_location.exists() ) { + try { + cache_location.mkdirs() + } + catch (Exception _e) { + error "Failed to create cache location: $cache_location | $_e" + } + } + + return cache_location +} + +def locate_remote_cache () { + return "$params.test_data_remote/$params.test_database_path" +} + +def load_manifest () { + // Load test data associations from params.test_data_associations + // which must be a map of test data identifiers [filename: identifier] + + if ( ! params.test_data_associations ) { + error """ + No test data associations provided, cannot create cache manifest. Please + provide a map of test data identifiers [filename: identifier] using + params.test_data_associations. + """ + } + + return params.test_data_associations +} + +def validate_cache_entry ( name, manager ) { + // Check if the cache entry is present in the manifest + + if ( !manager.manifest[name] ) { + error "Invalid cache entry supplied : $name" + } + +} + +def add_cache_entry ( name, manager ) { + // Add the test data archive as an entry in the cache. The archive is + // fetched from the remote location and stored in the cache location. + // The given name is validated against the manifest before adding. + + manager.validate_entry(name) + + def identifier = "${manager.manifest[name]}" + def cache_entry = file("${manager.cache_location}/$identifier") + def remote_subpath = "${identifier[0..1]}/${identifier[2..-1]}" + def remote_entry = file("$manager.remote_location/$remote_subpath") + + try { + remote_entry.copyTo(cache_entry) + } + catch (Exception _e) { + manager.delete_entry(name) + error "Failed to download test archive: $name | $_e" + } + + return cache_entry +} + +def get_cache_entry ( name, manager ) { + // Retrieve the cache entry for the given test data archive name. + // If the entry does not exist, it is added to the cache. The add + // operation will validate the name against the manifest. + + def identifier = "${manager.manifest[name]}" + def cache_entry = file("${manager.cache_location}/$identifier") + + if ( !cache_entry.exists() ) manager.add_entry(name) + + return cache_entry +} + +def delete_cache_entry ( name, manager ) { + // Delete the cache entry for the given test data archive name. + + def identifier = "${manager.manifest[name]}" + def cache_entry = file("${manager.cache_location}/$identifier") + if ( cache_entry.exists() ) { + try { + cache_entry.delete() + } + catch (Exception _e) { + error "Failed to delete cache entry for test archive: $name | $_e" + } + } +} + +def update_cache_entry ( name, manager ) { + // Update the cache entry for the given test data archive name. The + // procedure uses add to carry the update, but deletes the entry first + // if it exists. The add operation will validate the name against + // the manifest. + + manager.delete_entry(name) + manager.add_entry(name) +} + +def setup_cache () { + // Build a cache manager to encapsulate interaction with the test data cache. + // The manager follows simple CRUD operation to handle update and retrieval of + // test data archives from the cache and the remote location. + + def cache_manager = new Expando( + remote_location: locate_remote_cache(), + cache_location: locate_local_cache(), + manifest: load_manifest() + ) + cache_manager.validate_entry = { v -> validate_cache_entry( v, cache_manager ) } + cache_manager.add_entry = { v -> add_cache_entry(v, cache_manager) } + cache_manager.get_entry = { v -> get_cache_entry(v, cache_manager) } + cache_manager.delete_entry = { v -> delete_cache_entry(v, cache_manager) } + cache_manager.update_entry = { v -> update_cache_entry(v, cache_manager) } + + return cache_manager +} + +def unzip_test_archive ( archive, destination ) { + // Unzip the test data archive to the destination directory. + // Exception are not handled here, and are propagated to the caller. + + def content = null + try { + content = new java.util.zip.ZipFile("$archive") + content.entries().each{ entry -> + def local_target = file("$destination/${entry.getName()}") + if (entry.isDirectory()) { + local_target.mkdirs(); + } else { + local_target.getParent().mkdirs(); + file("$local_target").withOutputStream{ + out -> out << content.getInputStream(entry) + } + } + } + content.close() + } + catch (Exception _e) { + if (content) content.close() + throw _e + } +} + +def fetch_archive ( name, destination, manager ) { + // Unzip all archive content to destination + try { + unzip_test_archive(manager.get_entry(name), destination) + + return destination.resolve("${name.take(name.lastIndexOf('.'))}") + } + catch (java.util.zip.ZipException _e) { + try { + manager.delete_entry(name) + unzip_test_archive(manager.get_entry(name), destination) + + return destination.resolve("${name.take(name.lastIndexOf('.'))}") + } + catch (Exception _ee) { + error "Failed to fetch test archive: $name | $_ee" + } + } +} + +workflow LOAD_TEST_DATA { + + take: + ch_archive + test_data_prefix + + main: + manager = setup_cache() + + test_data_path = java.nio.file.Files.createTempDirectory("$test_data_prefix") + ch_test_data_directory = ch_archive.map{ archive -> + fetch_archive(archive, test_data_path, manager) + } + + emit: + test_data_directory = ch_test_data_directory // channel: [ test_data_directory ] +} diff --git a/subworkflows/nf-neuro/load_test_data/meta.yml b/subworkflows/nf-neuro/load_test_data/meta.yml new file mode 100644 index 0000000..750e935 --- /dev/null +++ b/subworkflows/nf-neuro/load_test_data/meta.yml @@ -0,0 +1,45 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/scilus/nf-neuro/main/subworkflows/meta-schema.json +name: "load_test_data" + +description: | + Load test data from Scilpy in a local temporary directory. Archives names can be found at: + https://github.com/scilus/scilpy/blob/96ab12eb5d303754ef7ad74d7385137683cb9d47/scilpy/io/fetcher.py#L59 +keywords: + - load + - test + - data + - scilpy +components: + - testdata/scilpy +input: + - ch_archive: + type: string + description: | + Input archives to download and extract + Structure: [ val(archive_name) ] + pattern: "*.zip" + + - test_data_prefix: + type: string + description: | + Prefix for the output test data directory + Structure: [ val(directory_prefix) ] + pattern: "*" + +output: + - test_data_directory: + type: file + description: | + Directory containing test data extracted from the archive + Structure: [ path(temporary_test_data_path) ] + pattern: "*/{prefix}_*/archive_name" + + - versions: + type: file + description: | + File containing software versions + Structure: [ path(versions.yml) ] + pattern: "versions.yml" + +authors: + - "@AlexVCaron" diff --git a/subworkflows/nf-neuro/load_test_data/tests/main.nf.test b/subworkflows/nf-neuro/load_test_data/tests/main.nf.test new file mode 100644 index 0000000..92c7cc1 --- /dev/null +++ b/subworkflows/nf-neuro/load_test_data/tests/main.nf.test @@ -0,0 +1,31 @@ +nextflow_workflow { + + name "Test Subworkflow LOAD_TEST_DATA" + script "../main.nf" + workflow "LOAD_TEST_DATA" + + tag "subworkflows" + tag "subworkflows_nfcore" + tag "subworkflows/load_test_data" + tag "testdata" + tag "testdata/scilpy" + + test("load multiple packages") { + + when { + workflow { + """ + input[0] = Channel.from( ["bids_json.zip", "stats.zip"] ) + input[1] = "test.load_test_data" + """ + } + } + + then { + assertAll( + { assert workflow.success }, + { assert snapshot(workflow.out).match() } + ) + } + } +} diff --git a/subworkflows/nf-neuro/load_test_data/tests/main.nf.test.snap b/subworkflows/nf-neuro/load_test_data/tests/main.nf.test.snap new file mode 100644 index 0000000..d32bb3c --- /dev/null +++ b/subworkflows/nf-neuro/load_test_data/tests/main.nf.test.snap @@ -0,0 +1,177 @@ +{ + "load multiple packages": { + "content": [ + { + "0": [ + [ + "result_complex_dwi_complex_rev_dwi.json:md5,8002324629222edb9217a77b27e60a34", + "result_complex_dwi_complex_rev_dwi_sbref.json:md5,720af6dfc6596814218a8e42d69b58c3", + "result_complex_dwi_complex_sbref.json:md5,60f3667851c3b6ed78f28a508a69341f", + "result_complex_dwi_epi.json:md5,60f3667851c3b6ed78f28a508a69341f", + "result_real_dwi_epi.json:md5,22adcf1437117b643cf292a82e7f7552", + "result_real_dwi_real_rev_dwi.json:md5,42cb893cc5185bb674a28a38f1e35a99", + "result_real_dwi_real_rev_dwi_sbref.json:md5,86343131de7e534024f0deb7697922c3", + "result_real_dwi_real_sbref.json:md5,22adcf1437117b643cf292a82e7f7552" + ], + [ + [ + ".gitignore:md5,63638721e968d8bb26df823dfeb84151", + "meanstd_all.json:md5,263627e1f11003985b2938e55fcf3cde", + "meanstd_all.json.dvc:md5,26b112b72edc22d1de6766d84e16e6d3", + "participants.tsv:md5,98444b2e2467231a3a14851589cb5ebf", + "participants.tsv.dvc:md5,de480a3413180b5c2845411cc3a00c00" + ], + [ + ".gitignore:md5,673491db53cebb2bf1ab799823ef8bca", + "list_id.txt:md5,95083621de77a7c8d12f073a481765db", + "list_id.txt.dvc:md5,980b540df3723153229b52986df2db8c", + "sub-1005_ad.npy:md5,a7d46fa37c1a62c2662fbd61aea1a2ee", + "sub-1005_ad.npy.dvc:md5,d4eb49390b9253aeb957de8cc6823f6a", + "sub-1005_afd_fixel.npy:md5,1a9024929d9a695a60a13328f035ab65", + "sub-1005_afd_fixel.npy.dvc:md5,106c31cec7fa49b670b93f5f743df777", + "sub-1005_afd_total.npy:md5,ade39bcb17ce1a698f872d4caef2e3f0", + "sub-1005_afd_total.npy.dvc:md5,f6d940723bca3ce11ebf7e3d7601c185", + "sub-1005_fa.npy:md5,24108ebd3c6b531546b60248c913a762", + "sub-1005_fa.npy.dvc:md5,92d94ebff4b017385efd82b08e4cfd9f", + "sub-1005_md.npy:md5,8a968dba976c04ec84f023e77258cb91", + "sub-1005_md.npy.dvc:md5,0d17faf437297542ed6ee3dc108a4492", + "sub-1005_nufo.npy:md5,e827e638fbb32a22e71f785fa80ced40", + "sub-1005_nufo.npy.dvc:md5,45b809e1f1ec5dd8e37cc6e355381dee", + "sub-1005_rd.npy:md5,c2b9cf912edc2dccdc44154f9004dfa1", + "sub-1005_rd.npy.dvc:md5,c32bbeb2b845a02effc620c2d4935fe9", + "sub-1108_ad.npy:md5,a23d2736ba32a234e02a0a260a603745", + "sub-1108_ad.npy.dvc:md5,df6d8d241d1c19a157f0f0961f46a91b", + "sub-1108_afd_fixel.npy:md5,5f0979ab2d4ffc2b08f01da511da28ac", + "sub-1108_afd_fixel.npy.dvc:md5,34ab6fca6d58bf6b4ba9c49ccbc504a4", + "sub-1108_afd_total.npy:md5,3a3f14d517b10cdc040eae51133d5ee6", + "sub-1108_afd_total.npy.dvc:md5,b8e1e272a5f8888e5ebe3812bf64493a", + "sub-1108_fa.npy:md5,c046d5da721fc4cbe1c79247920611d3", + "sub-1108_fa.npy.dvc:md5,4594334a0abd647e4a42f24f18199c55", + "sub-1108_md.npy:md5,d4b7841476e869ed04071401872e05b4", + "sub-1108_md.npy.dvc:md5,5495dce2eaca7da89064f338a9c74bfc", + "sub-1108_nufo.npy:md5,a211e9042a0d808b14382453d19f2f8e", + "sub-1108_nufo.npy.dvc:md5,49dd90f0c4ea628456e700e50ca48290", + "sub-1108_rd.npy:md5,240830e4488eb1a7c94f11a2440be26b", + "sub-1108_rd.npy.dvc:md5,e647edc4749fbb47248b19b8d172d1b4", + "sub-1230_ad.npy:md5,da1d3a1ab54b1880829d112362ea64d4", + "sub-1230_ad.npy.dvc:md5,5de28f3d0ddf873a95a373167cedee0d", + "sub-1230_afd_fixel.npy:md5,565f067c92be5621f980e9ee91404892", + "sub-1230_afd_fixel.npy.dvc:md5,6629061f87fe58257590ed3afc201eb7", + "sub-1230_afd_total.npy:md5,458683f5388fd8420bb5829b0943b836", + "sub-1230_afd_total.npy.dvc:md5,d4f412f62bc9821d55430ea7d15fc5d9", + "sub-1230_fa.npy:md5,be96096d322041ed1a89d79b20111140", + "sub-1230_fa.npy.dvc:md5,8f82a2bdc07f0c6350d9920ccc62ea8e", + "sub-1230_md.npy:md5,8f6e6e79afd45832f414987064a993ef", + "sub-1230_md.npy.dvc:md5,e3ea7d43cde26b651405eee216330c48", + "sub-1230_nufo.npy:md5,7405bf0c6552524ca7a4c414630a875c", + "sub-1230_nufo.npy.dvc:md5,87984598ddf7fbad047e2c605c6b8033", + "sub-1230_rd.npy:md5,f9130325d75bba08b8f7fcf52e730648", + "sub-1230_rd.npy.dvc:md5,b58b499e68ecfae4a28c7228cc86a800", + "sub-2120_ad.npy:md5,35400d000f98ff268f173492aa845edf", + "sub-2120_ad.npy.dvc:md5,6d85fafe3c23fae81618eacfa48c4fdb", + "sub-2120_afd_fixel.npy:md5,dd27ff47e09f853921692b01e261e3ff", + "sub-2120_afd_fixel.npy.dvc:md5,5bbad3d78e4681601fe0721a412c8e9a", + "sub-2120_afd_total.npy:md5,fde8a68353981a301564af775fe4019c", + "sub-2120_afd_total.npy.dvc:md5,f2ff79dfee6bf3862f9e0689da6b62e2", + "sub-2120_fa.npy:md5,2f2c87666f1e2578201f2f403e050c2e", + "sub-2120_fa.npy.dvc:md5,23100d5801b092c4c3994520936ff5cc", + "sub-2120_md.npy:md5,0fa57e28cb7de9f25edad0b0daa749af", + "sub-2120_md.npy.dvc:md5,3e7335e4190e4f0f54c4726d83098427", + "sub-2120_nufo.npy:md5,a7b5b0b46b420356a353ad64ce62ae00", + "sub-2120_nufo.npy.dvc:md5,5296aad4973d52d20595fee43cc7f47a", + "sub-2120_rd.npy:md5,84fb2b0a1b32684ad92b393474eefffd", + "sub-2120_rd.npy.dvc:md5,4191106058b2f08321a547bce3836b30" + ] + ] + ], + "test_data_directory": [ + [ + "result_complex_dwi_complex_rev_dwi.json:md5,8002324629222edb9217a77b27e60a34", + "result_complex_dwi_complex_rev_dwi_sbref.json:md5,720af6dfc6596814218a8e42d69b58c3", + "result_complex_dwi_complex_sbref.json:md5,60f3667851c3b6ed78f28a508a69341f", + "result_complex_dwi_epi.json:md5,60f3667851c3b6ed78f28a508a69341f", + "result_real_dwi_epi.json:md5,22adcf1437117b643cf292a82e7f7552", + "result_real_dwi_real_rev_dwi.json:md5,42cb893cc5185bb674a28a38f1e35a99", + "result_real_dwi_real_rev_dwi_sbref.json:md5,86343131de7e534024f0deb7697922c3", + "result_real_dwi_real_sbref.json:md5,22adcf1437117b643cf292a82e7f7552" + ], + [ + [ + ".gitignore:md5,63638721e968d8bb26df823dfeb84151", + "meanstd_all.json:md5,263627e1f11003985b2938e55fcf3cde", + "meanstd_all.json.dvc:md5,26b112b72edc22d1de6766d84e16e6d3", + "participants.tsv:md5,98444b2e2467231a3a14851589cb5ebf", + "participants.tsv.dvc:md5,de480a3413180b5c2845411cc3a00c00" + ], + [ + ".gitignore:md5,673491db53cebb2bf1ab799823ef8bca", + "list_id.txt:md5,95083621de77a7c8d12f073a481765db", + "list_id.txt.dvc:md5,980b540df3723153229b52986df2db8c", + "sub-1005_ad.npy:md5,a7d46fa37c1a62c2662fbd61aea1a2ee", + "sub-1005_ad.npy.dvc:md5,d4eb49390b9253aeb957de8cc6823f6a", + "sub-1005_afd_fixel.npy:md5,1a9024929d9a695a60a13328f035ab65", + "sub-1005_afd_fixel.npy.dvc:md5,106c31cec7fa49b670b93f5f743df777", + "sub-1005_afd_total.npy:md5,ade39bcb17ce1a698f872d4caef2e3f0", + "sub-1005_afd_total.npy.dvc:md5,f6d940723bca3ce11ebf7e3d7601c185", + "sub-1005_fa.npy:md5,24108ebd3c6b531546b60248c913a762", + "sub-1005_fa.npy.dvc:md5,92d94ebff4b017385efd82b08e4cfd9f", + "sub-1005_md.npy:md5,8a968dba976c04ec84f023e77258cb91", + "sub-1005_md.npy.dvc:md5,0d17faf437297542ed6ee3dc108a4492", + "sub-1005_nufo.npy:md5,e827e638fbb32a22e71f785fa80ced40", + "sub-1005_nufo.npy.dvc:md5,45b809e1f1ec5dd8e37cc6e355381dee", + "sub-1005_rd.npy:md5,c2b9cf912edc2dccdc44154f9004dfa1", + "sub-1005_rd.npy.dvc:md5,c32bbeb2b845a02effc620c2d4935fe9", + "sub-1108_ad.npy:md5,a23d2736ba32a234e02a0a260a603745", + "sub-1108_ad.npy.dvc:md5,df6d8d241d1c19a157f0f0961f46a91b", + "sub-1108_afd_fixel.npy:md5,5f0979ab2d4ffc2b08f01da511da28ac", + "sub-1108_afd_fixel.npy.dvc:md5,34ab6fca6d58bf6b4ba9c49ccbc504a4", + "sub-1108_afd_total.npy:md5,3a3f14d517b10cdc040eae51133d5ee6", + "sub-1108_afd_total.npy.dvc:md5,b8e1e272a5f8888e5ebe3812bf64493a", + "sub-1108_fa.npy:md5,c046d5da721fc4cbe1c79247920611d3", + "sub-1108_fa.npy.dvc:md5,4594334a0abd647e4a42f24f18199c55", + "sub-1108_md.npy:md5,d4b7841476e869ed04071401872e05b4", + "sub-1108_md.npy.dvc:md5,5495dce2eaca7da89064f338a9c74bfc", + "sub-1108_nufo.npy:md5,a211e9042a0d808b14382453d19f2f8e", + "sub-1108_nufo.npy.dvc:md5,49dd90f0c4ea628456e700e50ca48290", + "sub-1108_rd.npy:md5,240830e4488eb1a7c94f11a2440be26b", + "sub-1108_rd.npy.dvc:md5,e647edc4749fbb47248b19b8d172d1b4", + "sub-1230_ad.npy:md5,da1d3a1ab54b1880829d112362ea64d4", + "sub-1230_ad.npy.dvc:md5,5de28f3d0ddf873a95a373167cedee0d", + "sub-1230_afd_fixel.npy:md5,565f067c92be5621f980e9ee91404892", + "sub-1230_afd_fixel.npy.dvc:md5,6629061f87fe58257590ed3afc201eb7", + "sub-1230_afd_total.npy:md5,458683f5388fd8420bb5829b0943b836", + "sub-1230_afd_total.npy.dvc:md5,d4f412f62bc9821d55430ea7d15fc5d9", + "sub-1230_fa.npy:md5,be96096d322041ed1a89d79b20111140", + "sub-1230_fa.npy.dvc:md5,8f82a2bdc07f0c6350d9920ccc62ea8e", + "sub-1230_md.npy:md5,8f6e6e79afd45832f414987064a993ef", + "sub-1230_md.npy.dvc:md5,e3ea7d43cde26b651405eee216330c48", + "sub-1230_nufo.npy:md5,7405bf0c6552524ca7a4c414630a875c", + "sub-1230_nufo.npy.dvc:md5,87984598ddf7fbad047e2c605c6b8033", + "sub-1230_rd.npy:md5,f9130325d75bba08b8f7fcf52e730648", + "sub-1230_rd.npy.dvc:md5,b58b499e68ecfae4a28c7228cc86a800", + "sub-2120_ad.npy:md5,35400d000f98ff268f173492aa845edf", + "sub-2120_ad.npy.dvc:md5,6d85fafe3c23fae81618eacfa48c4fdb", + "sub-2120_afd_fixel.npy:md5,dd27ff47e09f853921692b01e261e3ff", + "sub-2120_afd_fixel.npy.dvc:md5,5bbad3d78e4681601fe0721a412c8e9a", + "sub-2120_afd_total.npy:md5,fde8a68353981a301564af775fe4019c", + "sub-2120_afd_total.npy.dvc:md5,f2ff79dfee6bf3862f9e0689da6b62e2", + "sub-2120_fa.npy:md5,2f2c87666f1e2578201f2f403e050c2e", + "sub-2120_fa.npy.dvc:md5,23100d5801b092c4c3994520936ff5cc", + "sub-2120_md.npy:md5,0fa57e28cb7de9f25edad0b0daa749af", + "sub-2120_md.npy.dvc:md5,3e7335e4190e4f0f54c4726d83098427", + "sub-2120_nufo.npy:md5,a7b5b0b46b420356a353ad64ce62ae00", + "sub-2120_nufo.npy.dvc:md5,5296aad4973d52d20595fee43cc7f47a", + "sub-2120_rd.npy:md5,84fb2b0a1b32684ad92b393474eefffd", + "sub-2120_rd.npy.dvc:md5,4191106058b2f08321a547bce3836b30" + ] + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0-rc1", + "nextflow": "24.04.2" + }, + "timestamp": "2024-07-26T15:46:38.950226" + } +} \ No newline at end of file diff --git a/subworkflows/nf-neuro/load_test_data/tests/nextflow.config b/subworkflows/nf-neuro/load_test_data/tests/nextflow.config new file mode 100644 index 0000000..e177a14 --- /dev/null +++ b/subworkflows/nf-neuro/load_test_data/tests/nextflow.config @@ -0,0 +1,2 @@ +process { +} diff --git a/subworkflows/nf-neuro/load_test_data/tests/tags.yml b/subworkflows/nf-neuro/load_test_data/tests/tags.yml new file mode 100644 index 0000000..1ec148f --- /dev/null +++ b/subworkflows/nf-neuro/load_test_data/tests/tags.yml @@ -0,0 +1,2 @@ +subworkflows/load_test_data: + - subworkflows/nf-neuro/load_test_data/** diff --git a/templates/filter_with_list.sh b/templates/filter_with_list.sh index abeace8..4cc3557 100644 --- a/templates/filter_with_list.sh +++ b/templates/filter_with_list.sh @@ -1,21 +1,37 @@ #!/bin/bash +set -e + +# Print the parameters for debugging +echo "===============================" +echo "filter_with_list.sh arguments:" +echo "Distance: ${distance}" +echo "Tractogram: ${tractogram}" +echo "Basename: ${basename}" +echo "Output Extension: ${out_extension}" +echo "Remaining Extension: ${remaining_extension}" +echo "Filtering List: ${filtering_list}" +echo "Extract Masks: ${extract_masks}" +echo "Keep: ${keep}" +echo "===============================" if [ "${distance}" = "0" ] then -scil_filter_tractogram.py ${tractogram} ${basename}__${out_extension}.trk \ +scil_tractogram_filter_by_roi ${tractogram} ${basename}__${out_extension}.trk \ --filtering_list ${filtering_list} ${extract_masks} -f \ --display_count > ${basename}__${out_extension}.txt; else -scil_filter_tractogram.py ${tractogram} ${basename}__${out_extension}.trk \ +scil_tractogram_filter_by_roi ${tractogram} ${basename}__${out_extension}.trk \ --filtering_list ${filtering_list} ${extract_masks} -f \ --overwrite_distance both_ends include ${distance} --overwrite_distance either_end include ${distance} --display_count > ${basename}__${out_extension}.txt; fi if [ "${keep}" = "true" ] then - scil_streamlines_math.py difference ${tractogram} \ + scil_tractogram_math difference ${tractogram} \ ${basename}__${out_extension}.trk \ - ${sid}__${remaining_extension}.trk ; - scil_count_streamlines.py ${sid}__${remaining_extension}.trk > ${sid}__${remaining_extension}.txt; + ${basename}__${remaining_extension}.trk \ + --save_empty; + scil_tractogram_count_streamlines ${basename}__${remaining_extension}.trk > ${basename}__${remaining_extension}.txt; fi - \ No newline at end of file + +echo "Done." diff --git a/templates/old_filter_with_list.sh b/templates/old_filter_with_list.sh new file mode 100644 index 0000000..354fc7b --- /dev/null +++ b/templates/old_filter_with_list.sh @@ -0,0 +1,35 @@ +#!/bin/bash +set -e +ls -lh /extractor_flow +# Print the parameters for debugging +echo "===============================" +echo "filter_with_list.sh arguments:" +echo "Distance: ${distance}" +echo "Tractogram: ${tractogram}" +echo "Basename: ${basename}" +echo "Output Extension: ${out_extension}" +echo "Remaining Extension: ${remaining_extension}" +echo "Filtering List: ${filtering_list}" +echo "Extract Masks: ${extract_masks}" +echo "Keep: ${keep}" +echo "===============================" + +if [ "${distance}" = "0" ] +then + scil_filter_tractogram.py ${tractogram} ${basename}__${out_extension}.trk \ + --filtering_list ${filtering_list} ${extract_masks} -f \ + --display_count > ${basename}__${out_extension}.txt; +else + scil_filter_tractogram.py ${tractogram} ${basename}__${out_extension}.trk \ + --filtering_list ${filtering_list} ${extract_masks} -f \ + --overwrite_distance both_ends include ${distance} --overwrite_distance either_end include ${distance} --display_count > ${basename}__${out_extension}.txt; +fi + +if [ "${keep}" = "true" ] +then + scil_tractogram_math.py difference ${tractogram} \ + ${basename}__${out_extension}.trk \ + ${meta.id}__${remaining_extension}.trk \ + --save_empty; + scil_count_streamlines.py ${meta.id}__${remaining_extension}.trk > ${meta.id}__${remaining_extension}.txt; +fi diff --git a/tests/config/nextflow.config b/tests/config/nextflow.config new file mode 100644 index 0000000..4392201 --- /dev/null +++ b/tests/config/nextflow.config @@ -0,0 +1,81 @@ +process { + cpus = { check_max(4, 'cpus') } + memory = { check_max(4.GB, 'memory') } + time = { check_max(2.h, 'time') } + + withLabel: process_single { + cpus = { check_max(1, 'cpus') } + } + + withLabel: process_low { + cpus = { check_max(2, 'cpus') } + } + + withLabel: process_medium { + cpus = { check_max(4, 'cpus') } + } + + withLabel: process_high { + cpus = { check_max(8, 'cpus') } + } + + withLabel: process_long { + time = { check_max(20.h, 'time') } + } + + withLabel: process_high_memory { + memory = { check_max(16.GB, 'memory') } + } + +} + +profiles { + apptainer { + apptainer.enabled = true + apptainer.autoMounts = true + } + singularity { + singularity.enabled = true + singularity.autoMounts = true + } + podman { + podman.enabled = true + podman.userEmulation = true + podman.runOptions = "--runtime crun --platform linux/x86_64 --systemd=always" + } + docker { + docker.enabled = true + docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' + } + docker_self_hosted{ + docker.enabled = true + docker.fixOwnership = true + docker.runOptions = '--platform=linux/amd64' + } + docker_self_hosted_bigmem{ + docker.enabled = true + docker.fixOwnership = true + docker.runOptions = '--platform=linux/amd64' + } + arm { + docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' + } + gpu { + docker.runOptions = '-u $(id -u):$(id -g) --gpus all' + apptainer.runOptions = '--nv' + singularity.runOptions = '--nv' + } + conda { + conda.enabled = true + conda.channels = ['conda-forge', 'bioconda'] + apptainer.enabled = false + } + mamba { + conda.enabled = true + conda.useMamba = true + } +} + +manifest { + nextflowVersion = '!>=23.04.0' +} diff --git a/tests/nextflow.config b/tests/nextflow.config new file mode 100644 index 0000000..4770b86 --- /dev/null +++ b/tests/nextflow.config @@ -0,0 +1,86 @@ + +params { + outdir = "output/" + publish_dir_mode = "copy" + singularity_pull_docker_container = false + + test_data_remote = "https://scil.usherbrooke.ca" + test_database_path = "scil_test_data/dvc-store/files/md5" + test_data_associations = new groovy.json.JsonSlurper().parse( + new File("$projectDir/tests/test_data.json") + ) +} + +report { + enabled = true + file = "$launchDir/report.html" + overwrite = true +} + + +includeConfig "$projectDir/config/resources.config" +includeConfig "$projectDir/tests/config/nextflow.config" + +def check_max (obj, type) { + if (type == 'memory') { + try { + if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) + return params.max_memory as nextflow.util.MemoryUnit + else + return obj + } catch (all) { + println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" + return obj + } + } else if (type == 'time') { + try { + if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) + return params.max_time as nextflow.util.Duration + else + return obj + } catch (all) { + println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" + return obj + } + } else if (type == 'cpus') { + try { + return Math.min( obj, params.max_cpus as int ) + } catch (all) { + println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" + return obj + } + } +} + + +import com.sun.management.OperatingSystemMXBean +import java.lang.management.ManagementFactory + +def query_container_limits (type) { + if (type == 'memory') { + try { + def memory_limit = System.getenv("DEVCONTAINER_RAM_LIMIT_GB") + if (memory_limit) { + return "${memory_limit}.GB" + } else { + def sysmem = (( + (OperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean() + ).getTotalPhysicalMemorySize() / 1073741824).toInteger() + return "${sysmem}.GB" + } + } catch (all) { + return 0.GB + } + } else if (type == 'cpus') { + try { + def cpu_limit = System.getenv("DEVCONTAINER_CPU_LIMIT") + if (cpu_limit) { + return cpu_limit as int + } else { + return Runtime.runtime.availableProcessors() as int + } + } catch (all) { + return 1 + } + } +} diff --git a/tests/plugins.json b/tests/plugins.json new file mode 100644 index 0000000..08631db --- /dev/null +++ b/tests/plugins.json @@ -0,0 +1,17 @@ +[ + { + "id": "nft-nifti", + "latest": "0.0.1", + "url": "https://github.com/scilus/nft-nifti", + "github": "scilus/nft-nifti", + "description": "Provides support for NIFTI files.", + "author": "Arnaud Boré and Alex V Caron", + "keywords": [], + "releases": [ + { + "version": "0.0.1", + "url": "https://github.com/scilus/nft-nifti/releases/download/0.0.1/nft-nifti-0.0.1.jar" + } + ] + } +] diff --git a/tests/test_data.json b/tests/test_data.json new file mode 100644 index 0000000..b4d1992 --- /dev/null +++ b/tests/test_data.json @@ -0,0 +1,62 @@ +{ + "commit_amico.zip": "c190e6b9d22350b51e222c60febe13b4", + "bundles.zip": "6d3ebc21062bf320714483b7314a230a", + "stats.zip": "2aeac4da5ab054b3a460fc5fdc5e4243", + "bst.zip": "eed227fd246255e7417f92d49eb1066a", + "filtering.zip": "19116ff4244d057c8214ee3fe8e05f71", + "ihMT.zip": "08fcf44848ba2649aad5a5a470b3cb06", + "tractometry.zip": "890bfa70e44b15c0d044085de54e00c6", + "bids_json.zip": "97fd9a414849567fbfdfdb0ef400488b", + "MT.zip": "1f4345485248683b3652c97f2630950e", + "btensor_testdata.zip": "7ada72201a767292d56634e0a7bbd9ad", + "tracking.zip": "4793a470812318ce15f1624e24750e4d", + "atlas.zip": "dc34e073fc582476504b3caf127e53ef", + "anatomical_filtering.zip": "5282020575bd485e15d3251257b97e01", + "connectivity.zip": "fe8c47f444d33067f292508d7050acc4", + "plot.zip": "a1dc54cad7e1d17e55228c2518a1b34e", + "others.zip": "82248b4888a63b0aeffc8070cc206995", + "fodf_filtering.zip": "5985c0644321ecf81fd694fb91e2c898", + "processing.zip": "eece5cdbf437b8e4b5cb89c797872e28", + "surface_vtk_fib.zip": "241f3afd6344c967d7176b43e4a99a41", + "tractograms.zip": "5497d0bf3ccc35f8f4f117829d790267", + "registration.zip": "95ebaa64866bac18d8b0fcd96cd10958", + "topup_eddy.zip": "7847496510dc85fb205ba9586f0011ff", + "topup_eddy_light.zip": "54369410cfd0587e1d8916047945c1fd", + "bids.zip": "68b9efa1e009a59a83adef3aeea9b469", + "antsbet.zip": "66850bea7af7c1f3fc4e7d371d12d6e8", + "freesurfer.zip": "3b876fba6fd77d4962243ac9647bc505", + "freesurfer_reslice.zip": "636ee21cd2ed2910a8f64d432b47f578", + "freesurfer_transforms.zip": "5c562afd9dea52eb99f13ab4dffd4113", + "light.zip": "f2a3a8bddf43d1f67a5e8867ce9ebaa2", + "heavy.zip": "6f2cd0bbdb162455e71de1c7d3b4eb18", + "raw_b0.zip": "6e7b8181b3e929f4235a9364aa60656b", + "raw_DWIms300-1000-2000-dir8-32-60.zip": "cfa4cb9cc9595a18272fdb0ef3a08ce5", + "raw_DWIms300-1000-dir8-32.zip": "29dfaba220a1e921f95e16f811977f36", + "raw_DWIms1000-2000-dir32-60.zip": "7fed32798f158ab8118a116fd9a69f52", + "raw_DWIss300-dir8.zip": "33e735bf7514159b803f8302f4d97db8", + "raw_DWIss1000-dir32.zip": "55246c8f6fe8c04cb8538715cad21c38", + "raw_DWIss2000-dir60.zip": "8dc746ccf9eed5f8931265bad379e2e5", + "raw_EPI.zip": "631e9781bda5ae482d92bba1c612bbf5", + "raw_FLAIR.zip": "67663a38ce23120b1b42d73fa9268d2d", + "raw_T1w.zip": "2a4adad9dd05a0fb626cb71945bc11b5", + "raw_segmentation.zip": "6aac249229e1e360d1fca90bc4a4bec2", + "topup_results.zip": "f83628eac00c9851cb015588909c4ef8", + "b0.zip": "fefd7452a61a80f6ceac7352e6231a0d", + "DWIms300-1000-2000-dir8-32-60.zip": "6249618893c832dfb4392c72fd7c81cb", + "DWIms300-1000-dir8-32.zip": "52e67fa54a2494d2ad5b496d25e6ee99", + "DWIms1000-2000-dir32-60.zip": "5db0cc6624da1d5ec99893b08629e45f", + "DWIms1000-2000-dir10-10.zip": "31dd0ee67ff31c26991b619dfd37da0f", + "DWIss300-dir8.zip": "ef1335775c9fbf11452c71e01b097282", + "DWIss1000-dir32.zip": "c72fd590a3f400278299a068eeffd1d0", + "DWIss2000-dir60.zip": "9ad7ccb4e30b39c47f374018db2e6424", + "T1w.zip": "8e9cf2beec4595d407732152a19589f5", + "dti.zip": "ba866c59fc4d984f626691a6b941fc32", + "responses.zip": "1a5b862b91c72740bcce0431a1f9254a", + "segmentation.zip": "ab51be6d142ad1a8e30ffb964447d010", + "freesurfer_full.zip": "203e482426065326ff5920368d4d7908", + "freesurfer_nifti.zip": "adb5ac4cf5c45040339e04e7c142e8c9", + "transform.zip": "148afd665ddbd2bb80493208480571a9", + "dicom.zip": "234913cbad53c19aa19aef9eda0a3839", + "TOPUP.zip": "da11914087a1a4ed1d21d478540d41b0", + "dwi_mouse.zip": "ea83ac51297b255b34eabb826e55153c" +}