Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2,656 changes: 2,656 additions & 0 deletions assets/LUT_AMBA-LR.json

Large diffs are not rendered by default.

1,329 changes: 1,329 additions & 0 deletions assets/LUT_AMBA.json

Large diffs are not rendered by default.

39 changes: 33 additions & 6 deletions main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,15 @@ include { TRACKING_MASK } from './modules/local/tracking/mask/main.nf'
include { TRACKING_LOCALTRACKING } from './modules/nf-neuro/tracking/localtracking/main.nf'
include { MOUSE_EXTRACTMASKS } from './modules/local/mouse/extractmasks/main.nf'
include { MOUSE_VOLUMEROISTATS } from './modules/local/mouse/volumeroistats/main.nf'
include { MOUSE_COMBINESTATS } from './modules/local/mouse/combinestats/main.nf'
include { STATS_METRICSINROI as STATS_AMBA } from './modules/nf-neuro/stats/metricsinroi/main'
include { STATS_METRICSINROI as STATS_AMBA_LR } from './modules/nf-neuro/stats/metricsinroi/main'
// include { MOUSE_COMBINESTATS as COMBINESTATS_AMBA } from './modules/local/mouse/combinestats/main.nf'
// include { MOUSE_COMBINESTATS as COMBINESTATS_AMBA_LR } from './modules/local/mouse/combinestats/main.nf'
include { MOUSE_COMBINESTATS as COMBINESTATS_MERGED} from './modules/local/mouse/combinestats/main.nf'
include { MULTIQC } from "./modules/nf-core/multiqc/main"
include { PRE_QC } from './modules/local/mouse/preqc/main.nf'


workflow get_data {
main:
if ( !params.input ) {
Expand All @@ -43,18 +48,26 @@ workflow get_data {
input = file(params.input)
// ** Loading all files. ** //
dwi_channel = Channel.fromFilePairs("$input/**/*dwi.{nii.gz,bval,bvec}", size: 3, flat: true)
{ it.parent.name }
.map{ sid, bvals, bvecs, dwi -> [ [id: sid], dwi, bvals, bvecs ] } // Reordering the inputs.
{ it.parent.name }
.map{ sid, bvals, bvecs, dwi -> [ [id: sid], dwi, bvals, bvecs ] } // Reordering the inputs.

mask_channel = Channel.fromPath("$input/**/*mask.nii.gz")
.map { mask_file -> def sid = mask_file.parent.name
[[id: sid], mask_file] }

template_channel = Channel.fromPath("$projectDir/assets/reference_rgb_mqc.png")

lut_channel = Channel.of([
amba : file("$projectDir/assets/LUT_AMBA.json"),
amba_lr: file("$projectDir/assets/LUT_AMBA-LR.json")
])


emit:
dwi = dwi_channel
mask = mask_channel
template_rgb = template_channel
lut = lut_channel
}

workflow {
Expand All @@ -80,6 +93,7 @@ workflow {
bvec: [meta, bvec]
}
ch_ref_rgb = data.template_rgb
ch_lut = data.lut

if ( params.run_preqc ) {
PRE_QC(ch_dwi_bvalbvec.dwi.join(ch_dwi_bvalbvec.bvs_files).combine(ch_ref_rgb))
Expand Down Expand Up @@ -200,7 +214,7 @@ workflow {
ch_multiqc_files = ch_multiqc_files.mix(TRACKING_LOCALTRACKING.out.mqc)
}

MOUSE_EXTRACTMASKS(MOUSE_REGISTRATION.out.ANO_LR)
MOUSE_EXTRACTMASKS(MOUSE_REGISTRATION.out.ANO)

ch_metrics = RECONST_DTIMETRICS.out.md
.join(RECONST_DTIMETRICS.out.fa)
Expand All @@ -213,10 +227,23 @@ workflow {
.combine(MOUSE_EXTRACTMASKS.out.masks_dir, by: 0)
MOUSE_VOLUMEROISTATS(ch_for_stats)

all_stats = MOUSE_VOLUMEROISTATS.out.stats
STATS_AMBA(ch_metrics.join(MOUSE_REGISTRATION.out.ANO.combine(ch_lut.map{ it.amba })))
STATS_AMBA_LR(ch_metrics.join(MOUSE_REGISTRATION.out.ANO_LR.combine(ch_lut.map{ it.amba_lr })))


// all_stats_amba = STATS_AMBA.out.stats_json
// .map{ _meta, json -> json}
// .collect()
// all_stats_lr = STATS_AMBA_LR.out.stats_json
// .map{ _meta, json -> json}
// .collect()
all_stats_merged = MOUSE_VOLUMEROISTATS.out.stats
.map{ _meta, json -> json}
.collect()
MOUSE_COMBINESTATS(all_stats)

// COMBINESTATS_AMBA(all_stats_amba)
// COMBINESTATS_AMBA_LR(all_stats_lr)
COMBINESTATS_MERGED(all_stats_merged)

ch_multiqc_files = ch_multiqc_files
.groupTuple()
Expand Down
5 changes: 5 additions & 0 deletions modules.json
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,11 @@
"git_sha": "5420aa212ac5ad3f255f50ca5b0d5995693f8c35",
"installed_by": ["modules"]
},
"stats/metricsinroi": {
"branch": "main",
"git_sha": "ca21922a7affe217f7391a98b5af1e50a79f094c",
"installed_by": ["modules"]
},
"tracking/localtracking": {
"branch": "main",
"git_sha": "ec4f04fd25e3167ba495094925027248722c1aad",
Expand Down
2 changes: 1 addition & 1 deletion modules/local/mouse/combinestats/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ process MOUSE_COMBINESTATS {

for curr_stat in $stats_list;
do
bname=\${curr_stat/__stats/}
bname=\${curr_stat/stats/}
mv \$curr_stat stats/\${bname}
done

Expand Down
18 changes: 8 additions & 10 deletions modules/local/mouse/extractmasks/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ process MOUSE_EXTRACTMASKS {
tuple val(meta), path("*masks") , emit: masks_dir
tuple val(meta), path("*__masks/*_MO_L.nii.gz"), path("*__masks/*_MO_R.nii.gz") , emit: masks_MO, optional: true
tuple val(meta), path("*__masks/*_SS_L.nii.gz"), path("*__masks/*_SS_R.nii.gz") , emit: masks_SS, optional: true
path("*__masks/*.txt")
path "versions.yml" , emit: versions

when:
Expand All @@ -21,18 +20,17 @@ process MOUSE_EXTRACTMASKS {
def prefix = task.ext.prefix ?: "${meta.id}"
def labels = task.ext.labels
"""
mouse_extract_masks.py $atlas $labels ${prefix}__masks -f
mouse_extract_masks.py $atlas $labels ${prefix}__masks --merge -f

for curr_label in $labels; do
for side in L R; do
ids=\$(cat ${prefix}__masks/\${curr_label}_\$side.txt)
if [[ \$ids ]]; then
scil_labels_combine ${prefix}__masks/${prefix}__\${curr_label}_\$side.nii.gz \
--volume_ids $atlas \${ids} \
--merge_groups -f
fi
done
ids=\$(cat ${prefix}__masks/\${curr_label}.txt)
if [[ \$ids ]]; then
scil_labels_combine ${prefix}__masks/${prefix}__\${curr_label}.nii.gz \
--volume_ids $atlas \${ids} \
--merge_groups -f
fi
done
rm -rf ${prefix}__masks/*.txt

cat <<-END_VERSIONS > versions.yml
"${task.process}":
Expand Down
4 changes: 2 additions & 2 deletions modules/local/mouse/volumeroistats/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ process MOUSE_VOLUMEROISTATS {
input:
tuple val(meta), path(metrics_list), path(mask_directory)
output:
tuple val(meta), path("*__stats.json") , emit: stats
tuple val(meta), path("*_stats.json") , emit: stats
path "versions.yml" , emit: versions

when:
Expand Down Expand Up @@ -38,7 +38,7 @@ process MOUSE_VOLUMEROISTATS {
cp \$mask masks/\${bname}.nii.gz
done

scil_volume_stats_in_ROI masks/*gz --metrics_dir metrics -f > ${prefix}__stats.json
scil_volume_stats_in_ROI masks/*gz --metrics_dir metrics -f > ${prefix}_stats.json

cat <<-END_VERSIONS > versions.yml
"${task.process}":
Expand Down
3 changes: 3 additions & 0 deletions modules/nf-neuro/stats/metricsinroi/environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
channels: []
dependencies: []
name: stats_metricsinroi
205 changes: 205 additions & 0 deletions modules/nf-neuro/stats/metricsinroi/main.nf
Original file line number Diff line number Diff line change
@@ -0,0 +1,205 @@
process STATS_METRICSINROI {
tag "$meta.id"
label 'process_single'

container "scilus/scilpy:dev"

input:
tuple val(meta), path(metrics), path(rois), path(rois_lut) /* optional, input = [] */

output:
tuple val(meta), path("*.json") , emit: stats_json
tuple val(meta), path("*_desc-mean_*.{csv,tsv}") , emit: stats_mean
tuple val(meta), path("*_desc-std_*.{csv,tsv}") , emit: stats_std
path "versions.yml" , emit: versions

when:
task.ext.when == null || task.ext.when

script:
def prefix = task.ext.prefix ?: "${meta.id}"
def suffix = task.ext.first_suffix ? "${task.ext.first_suffix}_stats" : "stats"
def bin = task.ext.bin ? "--bin " : ""
def normalize_weights = task.ext.normalize_weights ? "--normalize_weights " : ""
def use_label = task.ext.use_label ? true : false
def key_substrs_to_remove = task.ext.key_substrs_to_remove ?: []
def value_substrs_to_remove = task.ext.value_substrs_to_remove ?: []

def meta_columns = task.ext.meta_columns ?: []
def meta_columns_values = meta_columns.collect { col -> meta.containsKey(col) ? meta[col] : "null" }

def output_format = task.ext.output_format ?: 'tsv' // 'csv' or 'tsv'

assert output_format in ['csv', 'tsv'] : "output_format must be either 'csv' or 'tsv'"

def sep = output_format == 'tsv' ? '\t' : ','
"""
export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1
export OMP_NUM_THREADS=1
export OPENBLAS_NUM_THREADS=1

if $use_label;
then
if [[ ! -f "$rois_lut" ]];
then
echo "ROI LUT is missing. Will fail."
fi

scil_volume_stats_in_labels $rois $rois_lut \
--metrics $metrics \
--sort_keys > ${prefix}_${suffix}.json
else
scil_volume_stats_in_ROI $rois \
--metrics $metrics \
--sort_keys \
--keep_unique_roi_name \
$bin $normalize_weights > ${prefix}_${suffix}.json
fi

# Remove all substrings from the keys as specified
# in the configuration via 'task.ext.key_substrs_to_remove'
for substr in ${key_substrs_to_remove.join(' ')};
do
SUBSTR="\$substr" jq -r '
with_entries(.key |= sub(env.SUBSTR; ""))
' ${prefix}_${suffix}.json > ${prefix}_${suffix}_tmp.json
mv ${prefix}_${suffix}_tmp.json ${prefix}_${suffix}.json
done

# Extract 'desc' substring from keys and store it temporarily in values
# This allows us to remove the substring from the key now and append it later
jq -r '
with_entries(
.value |= with_entries(
if (.key | test("_desc-[a-zA-Z0-9]+")) then
(.key | capture("_desc-(?<desc>[a-zA-Z0-9]+)").desc) as \$d |
.key |= sub("_desc-[a-zA-Z0-9]+"; "") |
.key |= if \$d == "fwc" then . + "t" else . + "_" + \$d end
else
.
end
)
)
' ${prefix}_${suffix}.json > ${prefix}_${suffix}_tmp.json
mv ${prefix}_${suffix}_tmp.json ${prefix}_${suffix}.json

# Remove all substrings from the values as specified
# in the configuration via 'task.ext.value_substrs_to_remove'
for substr in ${value_substrs_to_remove.join(' ')};
do
SUBSTR="\$substr" jq -r '
with_entries(
.value |= with_entries(
.key |= sub(env.SUBSTR; "")
)
)
' ${prefix}_${suffix}.json > ${prefix}_${suffix}_tmp.json
mv ${prefix}_${suffix}_tmp.json ${prefix}_${suffix}.json
done

# Append the extracted 'desc' to the keys, before the extension if present
jq -r '
with_entries(
.value |= with_entries(
if (.value.extracted_desc) then
(.value.extracted_desc) as \$d |
del(.value.extracted_desc) |
.key |= if test("\\\\.") then sub("(?<base>.*?)(?<ext>\\\\..*)\$"; .base + "_" + \$d + .ext) else . + "_" + \$d end
else
.
end
)
)
' ${prefix}_${suffix}.json > ${prefix}_${suffix}_tmp.json
mv ${prefix}_${suffix}_tmp.json ${prefix}_${suffix}.json

# Get all ROIs names from the JSON
rois=\$(jq -r "keys[]" ${prefix}_${suffix}.json)

# All ROIs have the same metrics. To get the metrics names from
# the JSON, we can just fetch them from the first ROI.
first_roi=\$(printf '%s\\n' \$rois | head -n 1)

# Extract the metrics names from this first roi
metrics=\$(FIRST_ROI="\$first_roi" jq -r ".\\"\$first_roi\\" | keys[]" ${prefix}_${suffix}.json)

# Create the CSV/TSV headers
# (sample, roi, metric1, metric2, ..., metricN)
header_mean="sample${sep}roi"
header_std="sample${sep}roi"

# Create the meta columns
for meta_col in ${meta_columns.join(' ')}; do
header_mean="\${header_mean}${sep}\${meta_col}"
header_std="\${header_std}${sep}\${meta_col}"
done

# Add the metric columns
for metric in \$metrics; do
header_mean="\${header_mean}${sep}\${metric}"
header_std="\${header_std}${sep}\${metric}"
done
echo "\$header_mean" > ${prefix}_desc-mean_${suffix}.${output_format}
echo "\$header_std" > ${prefix}_desc-std_${suffix}.${output_format}

for roi in \$rois;
do
# Initialize lines with sample and roi
line_mean="${prefix}${sep}\${roi}"
line_std="${prefix}${sep}\${roi}"

# Add meta columns values if specified
for meta_val in ${meta_columns_values.join(' ')}; do
if [ "\${meta_val}" == "null" ]; then
line_mean="\${line_mean}${sep}" # no value = empty string
line_std="\${line_std}${sep}" # no value = empty string
else
line_mean="\${line_mean}${sep}\${meta_val}"
line_std="\${line_std}${sep}\${meta_val}"
fi
done

for metric in \$metrics;
do
# Fetch the "mean" and "std" values from each roi/metric
# pair from the JSON
val_mean=\$(jq -r --arg ROI "\$roi" --arg METRIC "\$metric" '.[\$ROI].[\$METRIC].mean' ${prefix}_${suffix}.json)
val_std=\$(jq -r --arg ROI "\$roi" --arg METRIC "\$metric" '.[\$ROI].[\$METRIC].std' ${prefix}_${suffix}.json)

# Append values to the lines
line_mean="\${line_mean}${sep}\${val_mean}"
line_std="\${line_std}${sep}\${val_std}"
done

# Append the completed lines to the files
echo "\$line_mean" >> ${prefix}_desc-mean_${suffix}.${output_format}
echo "\$line_std" >> ${prefix}_desc-std_${suffix}.${output_format}
done

cat <<-END_VERSIONS > versions.yml
"${task.process}":
scilpy: \$(uv pip -q -n list | grep scilpy | tr -s ' ' | cut -d' ' -f2)
jq: \$(jq --version |& sed '1!d ; s/jq-//')
END_VERSIONS
"""

stub:
def prefix = task.ext.prefix ?: "${meta.id}"
def suffix = task.ext.first_suffix ? "${task.ext.first_suffix}_stats" : "stats"
def output_format = task.ext.output_format ?: 'tsv' // 'csv' or 'tsv'
assert output_format in ['csv', 'tsv'] : "output_format must be either 'csv' or 'tsv'"
"""
scil_volume_stats_in_ROI -h
scil_volume_stats_in_labels -h

touch ${prefix}_${suffix}.json
touch ${prefix}_desc-mean_${suffix}.${output_format}
touch ${prefix}_desc-std_${suffix}.${output_format}

cat <<-END_VERSIONS > versions.yml
"${task.process}":
scilpy: \$(uv pip -q -n list | grep scilpy | tr -s ' ' | cut -d' ' -f2)
jq: \$(jq --version |& sed '1!d ; s/jq-//')
END_VERSIONS
"""
}
Loading