Skip to content

Commit

Permalink
Merge pull request #19 from eurunuela/afni
Browse files Browse the repository at this point in the history
Drop AFNI dependency
  • Loading branch information
eurunuela authored Oct 13, 2022
2 parents cb38b5f + 6d58475 commit 76e67cd
Show file tree
Hide file tree
Showing 12 changed files with 411 additions and 253 deletions.
12 changes: 6 additions & 6 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
paths:
- src/pySPFM
- restore_cache: # ensure this step occurs *before* installing dependencies
key: deps1-{{ checksum "setup.cfg" }}-{{ checksum "setup.py" }}-{{ "<< parameters.PYTHON >>" }}
key: conda-{{ checksum "setup.cfg" }}-{{ checksum "setup.py" }}-{{ "<< parameters.PYTHON >>" }}
- run: # will overwrite pySPFM installation each time
name: Generate environment
command: |
Expand All @@ -46,7 +46,7 @@ jobs:
fi
python setup.py install --user
- save_cache: # environment cache tied to requirements
key: deps1-{{ checksum "setup.cfg" }}-{{ checksum "setup.py" }}-{{ "<< parameters.PYTHON >>" }}
key: conda-{{ checksum "setup.cfg" }}-{{ checksum "setup.py" }}-{{ "<< parameters.PYTHON >>" }}
paths:
- "/opt/conda/envs/py<< parameters.PYTHON >>_env"

Expand All @@ -61,7 +61,7 @@ jobs:
- attach_workspace: # get pySPFM
at: /tmp
- restore_cache: # load environment
key: deps1-{{ checksum "setup.cfg" }}-{{ checksum "setup.py" }}-{{ "<< parameters.PYTHON >>" }}
key: conda-{{ checksum "setup.cfg" }}-{{ checksum "setup.py" }}-{{ "<< parameters.PYTHON >>" }}
- run:
name: Run tests and compile coverage
command: |
Expand All @@ -86,7 +86,7 @@ jobs:
- attach_workspace: # get pySPFM
at: /tmp
- restore_cache: # load environment
key: deps1-{{ checksum "setup.cfg" }}-{{ checksum "setup.py" }}-{{ "<< parameters.PYTHON >>" }}
key: conda-{{ checksum "setup.cfg" }}-{{ checksum "setup.py" }}-{{ "<< parameters.PYTHON >>" }}
- run:
name: Run tests and compile coverage
command: |
Expand All @@ -108,7 +108,7 @@ jobs:
- attach_workspace: # get pySPFM
at: /tmp
- restore_cache: # load environment
key: deps1-{{ checksum "setup.cfg" }}-{{ checksum "setup.py" }}-{{ "3.9" }}
key: conda-{{ checksum "setup.cfg" }}-{{ checksum "setup.py" }}-{{ "3.9" }}
- run:
name: Build documentation
command: |
Expand All @@ -128,7 +128,7 @@ jobs:
- attach_workspace: # get pySPFM
at: /tmp
- restore_cache: # load environment
key: deps1-{{ checksum "setup.cfg" }}-{{ checksum "setup.py" }}-{{ "3.9" }}
key: conda-{{ checksum "setup.cfg" }}-{{ checksum "setup.py" }}-{{ "3.9" }}
- run:
name: Linting
command: |
Expand Down
305 changes: 131 additions & 174 deletions pySPFM/deconvolution/hrf_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,211 +3,168 @@
import subprocess

import numpy as np
import scipy.io
import scipy.stats
from nilearn.glm.first_level import glover_hrf, spm_hrf

LGR = logging.getLogger("GENERAL")
RefLGR = logging.getLogger("REFERENCES")


def hrf_linear(TR, p):
"""Generate HRF.
Parameters
----------
TR : float
TR of the acquisition.
p : list
Input parameters of the response function (two gamma functions).
defaults
(seconds)
p[1] - delay of response (relative to onset) 6
p[2] - delay of undershoot (relative to onset) 16
p[3] - dispersion of response 1
p[4] - dispersion of undershoot 1
p[5] - ratio of response to undershoot 6
p[6] - onset (seconds) 0
p[7] - length of kernel (seconds) 32
Returns
-------
hrf : ndarray
A hemodynamic response function (HRF).
Notes
-----
Based on the spm_hrf function in SPM8
Written in R by Cesar Caballero Gaudes
Translated into Python by Eneko Urunuela
"""
# global parameter
# --------------------------------------------------------------------------
fMRI_T = 16

# modelled hemodynamic response function - {mixture of Gammas}
# --------------------------------------------------------------------------
dt = TR / fMRI_T
u = np.arange(0, p[6] / dt + 1, 1) - p[5] / dt
a1 = p[0] / p[2]
b1 = 1 / p[3]
a2 = p[1] / p[3]
b2 = 1 / p[3]

hrf = (
scipy.stats.gamma.pdf(u * dt, a1, scale=b1)
- scipy.stats.gamma.pdf(u * dt, a2, scale=b2) / p[4]
) / dt
time_axis = np.arange(0, int(p[6] / TR + 1), 1) * fMRI_T
hrf = hrf[time_axis]
min_hrf = 1e-9 * min(hrf[hrf > 10 * np.finfo(float).eps])

if min_hrf < 10 * np.finfo(float).eps:
min_hrf = 10 * np.finfo(float).eps

hrf[hrf == 0] = min_hrf

return hrf


def hrf_afni(TR, lop_hrf="SPMG1"):
"""Generate HRF with AFNI's 3dDeconvolve.
Parameters
----------
TR : float
TR of the acquisition.
lop_hrf : str
3dDeconvolve option to select HRF shape, by default "SPMG1"
Returns
-------
hrf : ndarray
A hemodynamic response function (HRF).
Notes
-----
AFNI installation is needed as it runs 3dDeconvolve on the terminal wtih subprocess.
"""
dur_hrf = 8
last_hrf_sample = 1
# Increases duration until last HRF sample is zero
while last_hrf_sample != 0:
dur_hrf = 2 * dur_hrf
hrf_command = (
"3dDeconvolve -x1D_stop -nodata %d %f -polort -1 -num_stimts 1 -stim_times 1 "
"'1D:0' '%s' -quiet -x1D stdout: | 1deval -a stdin: -expr 'a'"
) % (dur_hrf, TR, lop_hrf)
hrf_tr_str = subprocess.check_output(
hrf_command, shell=True, universal_newlines=True
).splitlines()
hrf = np.array([float(i) for i in hrf_tr_str])
last_hrf_sample = hrf[len(hrf) - 1]
if last_hrf_sample != 0:
LGR.info(
"Duration of HRF was not sufficient for specified model. Doubling duration "
"and computing again."
)

# Removes tail of zero samples
while last_hrf_sample == 0:
hrf = hrf[0 : len(hrf) - 1]
last_hrf_sample = hrf[len(hrf) - 1]

return hrf


class HRFMatrix:
"""A class for generating an HRF matrix.
Parameters
----------
TR : float
TR of the acquisition, by default 2
TE : list
Values of TE in ms, by default None
n_scans : int
Number of volumes in acquisition, by default 200
r2only : bool
Whether to only consider R2* in the signal model, by default True
is_afni : bool
Whether to use AFNI's 3dDeconvolve to generate HRF matrix, by default True
lop_hrf : str
3dDeconvolve option to select HRF shape, by default "SPMG1"
model : str
Model to use for HRF, by default "spm"
block : bool
Whether to use the block model in favor of the spike model, by default false
custom : str
Path to custom HRF file, by default None
"""

def __init__(
self,
TR=2,
TE=None,
n_scans=200,
r2only=True,
is_afni=True,
lop_hrf="SPMG1",
te=None,
model="spm",
block=True,
):
self.TR = TR
self.TE = TE
self.n_scans = n_scans
self.r2only = r2only
self.lop_hrf = lop_hrf
self.is_afni = is_afni
# If te values are higher than 1, then assume they are in ms and convert to s
# If te values are lower than 1, then assume they are in s

if te is not None:
if max(te) > 1:
te = [i / 1000 for i in te]
self.te = te

self.model = model
self.block = block

def generate_hrf(self):
def generate_hrf(self, tr, n_scans):
"""Generate HRF matrix.
Parameters
----------
tr : float
tr of the acquisition.
n_scans : int
Number of scans.
Returns
-------
self
self.hrf_ : array_like
A hemodynamic response function (HRF).
"""
if self.is_afni:
hrf_SPM = hrf_afni(self.TR, self.lop_hrf)
else:
p = [6, 16, 1, 1, 6, 0, 32]
hrf_SPM = hrf_linear(self.TR, p)

self.L_hrf = len(hrf_SPM) # Length
mahrf = max(abs(hrf_SPM)) # Max value
filler = np.zeros(self.n_scans - hrf_SPM.shape[0], dtype=int)
hrf_SPM = np.append(hrf_SPM, filler) # Fill up array with zeros until n_scans

temp = hrf_SPM

for i in range(self.n_scans - 1):
foo = np.append(np.zeros(i + 1), hrf_SPM[0 : (len(hrf_SPM) - i - 1)])
temp = np.column_stack((temp, foo))
# Read custom HRF from file if self.model ends in .1D or .txt
if self.model.endswith(".1D") or self.model.endswith(".txt"):
hrf = np.loadtxt(self.custom)

# Make sure that the HRF is not longer than the number of scans
# If it is, then raise an error
if len(hrf) > n_scans:
raise ValueError(
"HRF is longer than the number of scans. "
"Please make sure that your custom HRF is not longer than the number of scans."
)
else:
# Get HRF from nilearn
if self.model == "spm":
hrf = spm_hrf(tr, oversampling=1, time_length=n_scans * tr)
elif self.model == "glover":
hrf = glover_hrf(tr, oversampling=1, time_length=n_scans * tr)
else:
raise ValueError(
"Model must be either 'spm', 'glover' or a custom '.1D' or '.txt' file, not %s"
% self.model
)

# Calculate maximum HRF value
max_val = max(abs(hrf))

# Generate HRF matrix
hrf_mtx = hrf
for i in range(n_scans - 1):
foo = np.append(np.zeros(i + 1), hrf[0 : (len(hrf) - i - 1)])
hrf_mtx = np.column_stack((hrf_mtx, foo))

# Normalize HRF matrix
hrf_mtx = hrf_mtx / max_val

# Concatenate and scale HRFs for multi-echo,
# leave it as it is for single-echo.
if len(self.te) > 1:
# Add integrator if necessary
if self.block:
hrf_mtx_te = -self.te[0] * np.dot(hrf_mtx, np.tril(np.ones(n_scans)))
else:
hrf_mtx_te = -self.te[0] * hrf_mtx

# Concatenate and scale HRFs for multi-echo
for teidx in range(len(self.te) - 1):
# Add integrator if necessary
if self.block:
hrf_mtx_te = np.vstack(
(
hrf_mtx_te,
-self.te[teidx + 1] * np.dot(hrf_mtx, np.tril(np.ones(n_scans))),
)
)
else:
hrf_mtx_te = np.vstack((hrf_mtx_te, -self.te[teidx + 1] * hrf_mtx))

if len(self.TE) > 1:
tempTE = -self.TE[0] * temp
for teidx in range(len(self.TE) - 1):
tempTE = np.vstack((tempTE, -self.TE[teidx + 1] * temp))
self.hrf_ = hrf_mtx_te
else:
tempTE = temp
if self.block:
self.hrf_ = np.dot(hrf_mtx, np.tril(np.ones(n_scans)))
else:
self.hrf_ = hrf_mtx

if self.r2only:
self.hrf = tempTE
return self

self.hrf_norm = self.hrf / mahrf
def hrf_afni(tr, hrf_model="SPMG1"):
"""Generate HRF with AFNI's 3dDeconvolve.
if self.block:
if len(self.TE) > 1:
for teidx in range(len(self.TE)):
temp = self.hrf[
teidx * self.n_scans : (teidx + 1) * self.n_scans - 1, :
].copy()
self.hrf[teidx * self.n_scans : (teidx + 1) * self.n_scans - 1, :] = np.dot(
temp, np.tril(np.ones(self.n_scans))
)
temp = self.hrf_norm[
teidx * self.n_scans : (teidx + 1) * self.n_scans - 1, :
].copy()
self.hrf_norm[
teidx * self.n_scans : (teidx + 1) * self.n_scans - 1, :
] = np.dot(temp, np.tril(np.ones(self.n_scans)))
else:
self.hrf = np.dot(self.hrf, np.tril(np.ones(self.n_scans)))
self.hrf_norm = np.dot(self.hrf_norm, np.tril(np.ones(self.n_scans)))
Parameters
----------
tr : float
tr of the acquisition.
hrf_model : str
3dDeconvolve option to select HRF shape, by default "SPMG1"
return self
Returns
-------
hrf : array_like
A hemodynamic response function (HRF).
Notes
-----
AFNI installation is needed as it runs 3dDeconvolve on the terminal with subprocess.
"""
dur_hrf = 8
last_hrf_sample = 1
# Increases duration until last HRF sample is zero
while last_hrf_sample != 0:
dur_hrf = 2 * dur_hrf
hrf_command = (
"3dDeconvolve -x1D_stop -nodata %d %f -polort -1 -num_stimts 1 -stim_times 1 "
"'1D:0' '%s' -quiet -x1D stdout: | 1deval -a stdin: -expr 'a'"
) % (dur_hrf, tr, hrf_model)
hrf_tr_str = subprocess.check_output(
hrf_command, shell=True, universal_newlines=True
).splitlines()
hrf = np.array([float(i) for i in hrf_tr_str])
last_hrf_sample = hrf[len(hrf) - 1]
if last_hrf_sample != 0:
LGR.info(
"Duration of HRF was not sufficient for specified model. Doubling duration "
"and computing again."
)

# Removes tail of zero samples
while last_hrf_sample == 0:
hrf = hrf[0 : len(hrf) - 1]
last_hrf_sample = hrf[len(hrf) - 1]

return hrf
Loading

0 comments on commit 76e67cd

Please sign in to comment.