Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP: fODF interpolation (help needed) #1017

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 25 additions & 1 deletion scilpy/image/volume_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from scilpy.image.reslice import reslice # Don't use Dipy's reslice. Buggy.
from scilpy.io.image import get_data_as_mask
from scilpy.gradients.bvec_bval_tools import identify_shells
from scilpy.reconst.fodf import exp_u, log_u
from scilpy.utils.spatial import voxel_to_world
from scilpy.utils.spatial import world_to_voxel

Expand Down Expand Up @@ -497,14 +498,17 @@ def _interp_code_to_order(interp_code):

def resample_volume(img, ref_img=None, volume_shape=None, iso_min=False,
voxel_res=None,
interp='lin', enforce_dimensions=False):
interp='lin', fodf=False, enforce_dimensions=False):
"""
Function to resample a dataset to match the resolution of another reference
dataset or to the resolution specified as in argument.

One (and only one) of the following options must be chosen:
ref, volume_shape, iso_min or voxel_res.

If fODF is True, the fODF are projected from S3++ to R3 before resampling,
and then back. This is to avoid the "swelling" problem [1].

Parameters
----------
img: nib.Nifti1Image
Expand Down Expand Up @@ -532,6 +536,13 @@ def resample_volume(img, ref_img=None, volume_shape=None, iso_min=False,
-------
resampled_image: nib.Nifti1Image
Resampled image.

References
----------
[1] Cheng, J., Ghosh, A., Jiang, T., & Deriche, R. (2009). A Riemannian
framework for orientation distribution function computing. In International
Conference on Medical Image Computing and Computer-Assisted Intervention
(pp. 911-918). Berlin, Heidelberg: Springer Berlin Heidelberg.
"""
data = np.asanyarray(img.dataobj)
original_shape = data.shape
Expand Down Expand Up @@ -576,9 +587,22 @@ def resample_volume(img, ref_img=None, volume_shape=None, iso_min=False,
logging.info('Data affine setup: %s', nib.aff2axcodes(affine))
logging.info('Resampling data to %s with mode %s', new_zooms, interp)

# If fODF, we project to R^3 before resampling.
if fodf:
# Perform projection to the tangent plane. Ref. 1 eq. 11
data, norm = log_u(data) # v_c

data2, affine2 = reslice(data, affine, original_zooms, new_zooms,
_interp_code_to_order(interp))

# If fODF, we project back to S3++ after resampling.
if fodf:
# Reslice the normalization factor so we can multiply it back.
norm, _ = reslice(norm, affine, original_zooms, new_zooms,
_interp_code_to_order(interp))
# Reference 1 eq. 10, ie pullback map.
data2 = exp_u(data2, norm) # c

logging.info('Resampled data shape: %s', data2.shape)
logging.info('Resampled data affine: %s', affine2)
logging.info('Resampled data affine setup: %s', nib.aff2axcodes(affine2))
Expand Down
118 changes: 118 additions & 0 deletions scilpy/reconst/fodf.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,3 +284,121 @@ def verify_frf_files(wm_frf, gm_frf, csf_frf):
'Invalid or deprecated FRF format')

return wm_frf, gm_frf, csf_frf


def log_u(c):
""" Log projection of the SH coefficients to euclidian space from S3++
space. See eq. 11 in [1].

Parameters
----------
c : ndarray
SH coefficients of shape (H, W, D, K)

Returns
-------
v_c : ndarray
The projection of the SH coefficients in the tangent space of the SH.
norm : ndarray
The norm of the SH coefficients to undo the normalization.

References:
----------
Cheng, J., Ghosh, A., Jiang, T., & Deriche, R. (2009). A Riemannian
framework for orientation distribution function computing. In International
Conference on Medical Image Computing and Computer-Assisted Intervention
(pp. 911-918). Berlin, Heidelberg: Springer Berlin Heidelberg.

"""

H, W, D, K = c.shape

# We first have to normalize the SH coefficients.
norm = np.linalg.norm(c, axis=-1, keepdims=True)
# Using a mask to avoid division by zero.
mask = (abs(norm) < 1e-2).repeat(K, axis=-1)
c = np.ma.masked_array(c, mask)

# Eq. 11, c sums 1
# Turns out just normalizing the SH coefficients is super good enough.
c_sq = c / norm

# Assert that the SH coefficients are normalized, condition in Eq. 1
# is satisfied.
assert np.allclose((c_sq ** 2).sum(-1), 1), c_sq
# Log projection of the SH coefficients, ie pushforward map.

# Unit tangent vector
u = np.zeros((H, W, D, K))
u[..., 0] = 1

# Distance between the SH coordinates and the unit tangent vector
# PSI = np.arccos(np.einsum('...i,...i->...', c_sq, u))[..., None]
# The above line is equivalent to the following line, but the following
# line is faster.
PSI = np.arccos(c_sq[..., 0])[..., None]

# Cosine of the angle
cos_PSI = np.cos(PSI)

# Translate the SH coefficients
d = np.subtract(c_sq, u * cos_PSI)

# Norm
n = np.linalg.norm(d, axis=-1, keepdims=True)

# Projection
v_c = (d * PSI) / n

return v_c.data, norm


def exp_u(v_c, norm):
""" Exponential projection of the SH coefficients from the tangent space
to the S3++ space. See eq. 12 in [1].

Parameters
----------
v_c : ndarray (H, W, D, K)
The projection of the SH coefficients in the tangent space of the SH.
norm : ndarray (H, W, D, 1)
The norm of the SH coefficients to undo the normalization.

Returns
-------
c : ndarray (H, W, D, K)
SH coefficients in their native S3++ space.

References:
----------
Cheng, J., Ghosh, A., Jiang, T., & Deriche, R. (2009). A Riemannian
framework for orientation distribution function computing. In International
Conference on Medical Image Computing and Computer-Assisted Intervention
(pp. 911-918). Berlin, Heidelberg: Springer Berlin Heidelberg.
"""

H, W, D, K = v_c.shape

# Unit tangent vector
u = np.zeros((H, W, D, K))
u[..., 0] = 1

# Norm
PSI = np.linalg.norm(v_c, axis=-1, keepdims=True)
mask = (abs(norm) < 0.1).repeat(K, axis=-1)
v_c_mask = np.ma.masked_array(v_c, mask)

# Cosine of the angle
u_cos_PSI = u * np.cos(PSI)
# Normalize the vector
v_c_norm = v_c_mask / PSI
# Sine of the angle
sin_PSI = np.sin(PSI)

# Projection back to the S3++ space
c = u_cos_PSI + v_c_norm * sin_PSI

# Undo the normalization and remove epsilon
c = np.multiply(c, norm)

return c.data
25 changes: 19 additions & 6 deletions scilpy/reconst/sh.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,6 +350,7 @@ def _maps_from_sh_parallel(args):
afd_max = np.zeros(data_shape)
afd_sum = np.zeros(data_shape)
rgb_map = np.zeros((data_shape, 3))
ga_map = np.zeros(data_shape)
gfa_map = np.zeros(data_shape)
qa_map = np.zeros((data_shape, peak_values.shape[1]))

Expand All @@ -374,9 +375,18 @@ def _maps_from_sh_parallel(args):
afd_sum[idx] = np.sqrt(np.dot(shm_coeff[idx], shm_coeff[idx]))
qa_map = peak_values[idx] - odf.min()
global_max = max(global_max, peak_values[idx][0])
# Property 4 of section 2.2 in Cheng et al. 2009
ga_norm_thr = 1e-1
shm_coeff_i_norm = np.linalg.norm(shm_coeff[idx])
if shm_coeff_i_norm <= ga_norm_thr:
ga_map[idx] = 0
else:
c = (shm_coeff[idx]) / shm_coeff_i_norm
cdotu = np.clip(c[0], -1, 1)
ga_map[idx] = np.arccos(cdotu)

return chunk_id, nufo_map, afd_max, afd_sum, rgb_map, \
gfa_map, qa_map, max_odf, global_max
ga_map, gfa_map, qa_map, max_odf, global_max


def maps_from_sh(shm_coeff, peak_dirs, peak_values, peak_indices, sphere,
Expand Down Expand Up @@ -414,7 +424,7 @@ def maps_from_sh(shm_coeff, peak_dirs, peak_values, peak_indices, sphere,
Returns
-------
tuple of np.ndarray
nufo_map, afd_max, afd_sum, rgb_map, gfa, qa
nufo_map, afd_max, afd_sum, rgb_map, ga, gfa, qa
"""
sh_order = order_from_ncoef(shm_coeff.shape[-1])
B, _ = sh_to_sf_matrix(sphere, sh_order, sh_basis_type)
Expand Down Expand Up @@ -459,6 +469,7 @@ def maps_from_sh(shm_coeff, peak_dirs, peak_values, peak_indices, sphere,
afd_max_array = np.zeros(data_shape[0:3])
afd_sum_array = np.zeros(data_shape[0:3])
rgb_map_array = np.zeros(data_shape[0:3] + (3,))
ga_map_array = np.zeros(data_shape[0:3])
gfa_map_array = np.zeros(data_shape[0:3])
qa_map_array = np.zeros(data_shape[0:3] + (npeaks,))

Expand All @@ -468,12 +479,13 @@ def maps_from_sh(shm_coeff, peak_dirs, peak_values, peak_indices, sphere,
tmp_afd_max_array = np.zeros((np.count_nonzero(mask)))
tmp_afd_sum_array = np.zeros((np.count_nonzero(mask)))
tmp_rgb_map_array = np.zeros((np.count_nonzero(mask), 3))
tmp_ga_map_array = np.zeros((np.count_nonzero(mask)))
tmp_gfa_map_array = np.zeros((np.count_nonzero(mask)))
tmp_qa_map_array = np.zeros((np.count_nonzero(mask), npeaks))

all_time_max_odf = -np.inf
all_time_global_max = -np.inf
for (i, nufo_map, afd_max, afd_sum, rgb_map,
for (i, nufo_map, afd_max, afd_sum, rgb_map, ga_map,
gfa_map, qa_map, max_odf, global_max) in results:
all_time_max_odf = max(all_time_global_max, max_odf)
all_time_global_max = max(all_time_global_max, global_max)
Expand All @@ -482,16 +494,17 @@ def maps_from_sh(shm_coeff, peak_dirs, peak_values, peak_indices, sphere,
tmp_afd_max_array[chunk_len[i]:chunk_len[i+1]] = afd_max
tmp_afd_sum_array[chunk_len[i]:chunk_len[i+1]] = afd_sum
tmp_rgb_map_array[chunk_len[i]:chunk_len[i+1], :] = rgb_map
tmp_ga_map_array[chunk_len[i]:chunk_len[i+1]] = ga_map
tmp_gfa_map_array[chunk_len[i]:chunk_len[i+1]] = gfa_map
tmp_qa_map_array[chunk_len[i]:chunk_len[i+1], :] = qa_map

nufo_map_array[mask] = tmp_nufo_map_array
afd_max_array[mask] = tmp_afd_max_array
afd_sum_array[mask] = tmp_afd_sum_array
rgb_map_array[mask] = tmp_rgb_map_array
ga_map_array[mask] = tmp_ga_map_array
gfa_map_array[mask] = tmp_gfa_map_array
qa_map_array[mask] = tmp_qa_map_array

rgb_map_array /= all_time_max_odf
rgb_map_array *= 255
qa_map_array /= all_time_global_max
Expand All @@ -501,8 +514,8 @@ def maps_from_sh(shm_coeff, peak_dirs, peak_values, peak_indices, sphere,
or np.array_equal(np.array([1]), afd_unique):
logging.warning('All AFD_max values are 1. The peaks seem normalized.')

return(nufo_map_array, afd_max_array, afd_sum_array,
rgb_map_array, gfa_map_array, qa_map_array)
return (nufo_map_array, afd_max_array, afd_sum_array,
rgb_map_array, gfa_map_array, ga_map_array, qa_map_array)


def _convert_sh_basis_parallel(args):
Expand Down
41 changes: 36 additions & 5 deletions scripts/scil_fodf_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
the threshold set using --at, AND an amplitude above the RELATIVE threshold
set using --rt.

GA is the Geometric Anisotropy, which is the geodesic distance between the
ODF and the isotropic ODF, ∈ [0, arccos(1/√4π)]. See [3] eq.7 for more details.

The --at argument should be set to a value which is 1.5 times the maximal
value of the fODF in the ventricules. This can be obtained with the
scil_fodf_max_in_ventricles.py script.
Expand All @@ -30,6 +33,18 @@
See [Raffelt et al. NeuroImage 2012] and [Dell'Acqua et al HBM 2013] for the
definitions.

References
----------
[1] Raffelt et al. (2012). Apparent Fibre Density: a novel measure for the
analysis of diffusion-weighted magnetic resonance images. NeuroImage.
[2] Dell'Acqua et al. (2013). Tract density imaging: description of a
reproducible method and demonstration of its usefulness in neuropsychiatric
research. Human Brain Mapping.
[3] Cheng, J., Ghosh, A., Jiang, T., & Deriche, R. (2009). A Riemannian
framework for orientation distribution function computing. In International
Conference on Medical Image Computing and Computer-Assisted Intervention
(pp. 911-918). Berlin, Heidelberg: Springer Berlin Heidelberg.

Formerly: scil_compute_fodf_metrics.py
"""

Expand Down Expand Up @@ -100,6 +115,11 @@ def _build_arg_parser():
help='Output filename for the NuFO map.')
g.add_argument('--rgb', metavar='file', default='',
help='Output filename for the RGB map.')
g.add_argument('--ga', metavar='file', default='',
help='Output filename for the Geometric Anisotropy map.')
g.add_argument('--gfa', metavar='file', default='',
help='Output filename for the Generalized Fractional '
'Anisotropy map.')
g.add_argument('--peaks', metavar='file', default='',
help='Output filename for the extracted peaks.')
g.add_argument('--peak_values', metavar='file', default='',
Expand All @@ -122,12 +142,14 @@ def main():
args.afd_sum = args.afd_sum or 'afd_sum.nii.gz'
args.nufo = args.nufo or 'nufo.nii.gz'
args.rgb = args.rgb or 'rgb.nii.gz'
args.ga = args.ga or 'ga.nii.gz'
args.gfa = args.gfa or 'gfa.nii.gz'
args.peaks = args.peaks or 'peaks.nii.gz'
args.peak_values = args.peak_values or 'peak_values.nii.gz'
args.peak_indices = args.peak_indices or 'peak_indices.nii.gz'

arglist = [args.afd_max, args.afd_total, args.afd_sum, args.nufo,
args.rgb, args.peaks, args.peak_values,
args.rgb, args.ga, args.gfa, args.peaks, args.peak_values,
args.peak_indices]
if args.not_all and not any(arglist):
parser.error('When using --not_all, you need to specify at least '
Expand Down Expand Up @@ -161,10 +183,11 @@ def main():
nbr_processes=args.nbr_processes)

# Computing maps
if args.nufo or args.afd_max or args.afd_total or args.afd_sum or args.rgb:
nufo_map, afd_max, afd_sum, rgb_map, \
_, _ = maps_from_sh(data, peak_dirs, peak_values, peak_indices,
sphere, nbr_processes=args.nbr_processes)
if (args.nufo or args.afd_max or args.afd_total or
args.afd_sum or args.rgb or args.gfa or args.ga):
nufo_map, afd_max, afd_sum, rgb_map, gfa_map, ga_map, \
_ = maps_from_sh(data, peak_dirs, peak_values, peak_indices,
sphere, nbr_processes=args.nbr_processes)

# Save result
if args.nufo:
Expand All @@ -189,6 +212,14 @@ def main():
nib.save(nib.Nifti1Image(rgb_map.astype('uint8'), affine),
args.rgb)

if args.gfa:
nib.save(nib.Nifti1Image(gfa_map.astype(np.float32), affine),
args.gfa)

if args.ga:
nib.save(nib.Nifti1Image(ga_map.astype(np.float32), affine),
args.ga)

if args.peaks or args.peak_values:
if not args.abs_peaks_and_values:
peak_values = np.divide(peak_values, peak_values[..., 0, None],
Expand Down
7 changes: 5 additions & 2 deletions scripts/scil_volume_resample.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,7 @@
import numpy as np

from scilpy.io.utils import (add_verbose_arg, add_overwrite_arg,
assert_inputs_exist, assert_outputs_exist,
assert_headers_compatible)
assert_inputs_exist, assert_outputs_exist)
from scilpy.image.volume_operations import resample_volume


Expand Down Expand Up @@ -54,6 +53,9 @@ def _build_arg_parser():
p.add_argument('--enforce_dimensions', action='store_true',
help='Enforce the reference volume dimension.')

p.add_argument('--fodf', action='store_true',
help='Flag to indicate that the input is a FODF volume.')

add_verbose_arg(p)
add_overwrite_arg(p)

Expand Down Expand Up @@ -99,6 +101,7 @@ def main():
iso_min=args.iso_min,
voxel_res=args.voxel_size,
interp=args.interp,
fodf=args.fodf,
enforce_dimensions=args.enforce_dimensions)

# Saving results
Expand Down