Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add pipeline step for COMMIT streamline filtering #30

Merged
merged 17 commits into from
Mar 9, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
98 changes: 98 additions & 0 deletions addons/COMMIT-Filter/COMMIT_script.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
# Script to run COMMIT and COMMIT2 on CATO output.
# This script is based on the COMMIT wiki: https://github.com/daducci/COMMIT/wiki/COMMIT2

import argparse
import os

import numpy as np

import commit
from commit import trk2dictionary

argParser = argparse.ArgumentParser()

argParser.add_argument(
"--dwiProcessedFile", help="preprocessed DWI file", required=True
)
argParser.add_argument("--dwiSchemeFile", help="DWI scheme file", required=True)
argParser.add_argument("--fiberFile", help="Input fiber file", required=True)
argParser.add_argument(
"--outputCommitDir", help="Output directory for COMMIT", required=True
)
argParser.add_argument(
"--intermediateConnectomeFile",
help="Connectome file with number of streamline per connection (for internal use)",
required=True,
)
argParser.add_argument("--subjectDir", help="Subject directory", required=True)
argParser.add_argument("--wmMaskFile", help="White matter mask file", required=True)
argParser.add_argument(
"--regLambda", help="Regularisation parameter lambda", required=True
)

args = argParser.parse_args()

# Import tractogram
os.chdir(args.subjectDir)

trk2dictionary.run(
filename_tractogram=args.fiberFile,
filename_mask=args.wmMaskFile,
path_out=args.outputCommitDir,
fiber_shift=0,
)

# load the data
mit = commit.Evaluation(".", ".")
mit.load_data(args.dwiProcessedFile, args.dwiSchemeFile)

# use a forward-model with 1 Stick for the streamlines and 2 Balls for all the rest
mit.set_model("StickZeppelinBall")
d_par = 1.7e-3 # Parallel diffusivity [mm^2/s]
d_perps_zep = [] # Perpendicular diffusivity(s) [mm^2/s]
d_isos = [1.7e-3, 3.0e-3] # Isotropic diffusivity(s) [mm^2/s]
mit.model.set(d_par, d_perps_zep, d_isos)

mit.generate_kernels(regenerate=True)
mit.load_kernels()

# create the sparse data structures to handle the matrix A
mit.load_dictionary(args.outputCommitDir)
mit.set_threads()
mit.build_operator()

# perform the fit
mit.fit(tol_fun=1e-3, max_iter=1000, verbose=False)
mit.save_results(path_suffix="_COMMIT1")

# Retrieve the streamline contributions estimated by COMMIT for later use in COMMIT2:
x_nnls, _, _ = mit.get_coeffs(get_normalized=False)

# Preparing the anatomical prior on bundles
C = np.loadtxt(args.intermediateConnectomeFile, delimiter=",")
C = np.triu(C) # be sure to get only the upper-triangular part of the matrix
group_size = C[C > 0].astype(np.int32)

tmp = np.insert(np.cumsum(group_size), 0, 0)
group_idx = np.array(
[np.arange(tmp[i], tmp[i + 1]) for i in range(len(tmp) - 1)], dtype=np.object_
)

group_w = np.empty_like(group_size, dtype=np.float64)
for k in range(group_size.size):
group_w[k] = np.sqrt(group_size[k]) / (np.linalg.norm(x_nnls[group_idx[k]]) + 1e-12)

prior_on_bundles = commit.solvers.init_regularisation(
mit,
regnorms=[
commit.solvers.group_sparsity,
commit.solvers.non_negative,
commit.solvers.non_negative,
],
structureIC=group_idx,
weightsIC=group_w,
lambdas=[args.regLambda, 0.0, 0.0],
)

mit.fit(tol_fun=1e-3, max_iter=1000, regularisation=prior_on_bundles, verbose=False)
mit.save_results(path_suffix="_COMMIT2")
89 changes: 89 additions & 0 deletions addons/COMMIT-Filter/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
# Documentation COMMIT-Filter

The COMMIT-Filter add-on adds a post-processing pipeline step to the structural pipeline, which filters streamlines in the fiber cloud using [COMMIT2](https://github.com/daducci/COMMIT) and reconstructs connectivity matrices from the resulting filtered fiber cloud. The current integration (in `COMMIT_script.py`) is based on the [COMMIT2 example](https://github.com/daducci/COMMIT/wiki/COMMIT2) provided on the COMMIT wiki.

## Installation
Before using this add-on, ensure that COMMIT and its dependencies are installed. You can install COMMIT by following the instructions on the [COMMIT wiki](https://github.com/daducci/COMMIT/wiki/Installation).


## Usage

To use the COMMIT-Filter add-on, follow these steps:


1. Add the following items to your configuration file `CATO.conf` to filter the fibercloud constructed using constrained spherical deconvolution (`csd`) using information about fiber bundles form the Desikan-Killiany (`aparc`) parcellation.

```
"commit_filter":{
"reconstructionMethods":[
"csd"
],
"templates":[
"aparc"
],
"outputCommitDir":"OUTPUTDIR/commit",
"filteredFiberFile":"OUTPUTCOMMITDIR/SUBJECT_fibers_commit_METHOD.trk",
"filteredFiberPropertiesFile":"OUTPUTCOMMITDIR/SUBJECT_fiber_properties_commit_METHOD_TEMPLATE.mat",
"schemeFile":"OUTPUTCOMMITDIR/dwi.scheme",
"intermediateConnectomeFile":"OUTPUTCOMMITDIR/connectome.csv",
"setupPythonScript":"",
"commitScriptFile":"TOOLBOXDIR/../addons/COMMIT-filter/COMMIT_script.py",
"fiberWeightsFile":"OUTPUTCOMMITDIR/Results_StickZeppelinBall_COMMIT2/streamline_weights.txt",
"filteredConnectivityMatrixFile":"OUTPUTCOMMITDIR/SUBJECT_connectivity_commit_METHOD_TEMPLATE.mat",
"wmMaskFile": "OUTPUTCOMMITDIR/WM_mask.nii.gz",
"lambda":5E-4
}
```

**Note:** If you installed COMMIT and its dependencies in a specific Andaconda environment, update the `setupPythonScript` parameter such that the right environment is activated. For example, if you are using Anaconda and the environment is called COMMIT use:

```
"setupPythonScript":"source /Applications/anaconda3/bin/activate COMMIT;",
```

**Note:** The suitable value of the regularization term `lambda` depends on the dataset. Update this parameter in accordance with your data.

2. Add the `commit_filter` step to the pipeline steps by editing the `reconstructionSteps` parameter in the configuration file:

```
"reconstructionSteps":{
"structural_preprocessing": true,
"parcellation": true,
"collect_region_properties": true,
"reconstruction_diffusion": true,
"reconstruction_fibers": true,
"reconstruction_fiber_properties": true,
"reconstruction_network": true,
"commit_filter": true
},
```

3. Add the COMMIT-Filter add-on to the MATLAB path:

```
addpath('/LOCATION OF CATO TOOLBOX/addons/COMMIT-Filter')
```

4. Run the structural pipeline with the additional `commit_filter` step:


```
subjectDir = '/Volumes/Example/0001';
configurationFile = '/Volumes/Example/CATO.conf';
structural_pipeline(subjectDir, ...
'configurationFile', configurationFile);
```

This will execute COMMIT and provide the COMMIT output, filtered fiber clouds and filtered connectivity matrices in the directory `OUTPUTDIR/commit`.












200 changes: 200 additions & 0 deletions addons/COMMIT-Filter/commit_filter.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,200 @@
function commit_filter(configParams)
% COMMIT_FILTER Filter the fiber cloud using COMMIT2
%

%% Initialization
status.commit_filter = 'running';
updateStatus(configParams.general.statusFile, status);
fprintf('---commit_filter started----\n');

dwiProcessedFile = configParams.structural_preprocessing.dwiProcessedFile;

fiberFile = configParams.reconstruction_fibers.fiberFile;
fiberPropertiesFile = configParams.reconstruction_fiber_properties.fiberPropertiesFile;

regionPropertiesFile = configParams.collect_region_properties.regionPropertiesFile;

startRegions = configParams.reconstruction_fibers.startRegions;
segmentationFile = configParams.structural_preprocessing.segmentationFile;

processedBvalsFile = configParams.structural_preprocessing.processedBvalsFile;
processedBvecsFile = configParams.structural_preprocessing.processedBvecsFile;
bValueScalingTol = configParams.general.bValueScalingTol;
bValueZeroThreshold = configParams.general.bValueZeroThreshold;


reconstructionMethods = lower(configParams.commit_filter.reconstructionMethods);
templates = configParams.commit_filter.templates; % Run COMMIT-filter only on a selection of templates
filteredFiberFile = configParams.commit_filter.filteredFiberFile;
filteredFiberPropertiesFile = configParams.commit_filter.filteredFiberPropertiesFile;
schemeFile = configParams.commit_filter.schemeFile;
intermediateConnectomeFile = configParams.commit_filter.intermediateConnectomeFile;
setupPythonScript = configParams.commit_filter.setupPythonScript;
commitScriptFile = configParams.commit_filter.commitScriptFile;
fiberWeightsFile = configParams.commit_filter.fiberWeightsFile;
filteredConnectivityMatrixFile = configParams.commit_filter.filteredConnectivityMatrixFile;
outputCommitDir = configParams.commit_filter.outputCommitDir;
lambda = configParams.commit_filter.lambda;
wmMaskFile = configParams.commit_filter.wmMaskFile;
subjectDir = pwd;


%% Convert bvals and bvecs to single scheme file

% DWI.scheme has the following organization:
% VERSION: BVECTOR
% -0.000000 -0.000000 1.000000 0.000000
% 0.971803 -0.194807 -0.132847 700.000000

gtab = load_gtab(processedBvalsFile, processedBvecsFile, bValueZeroThreshold, bValueScalingTol);


[~, ~] = mkdir(fileparts(schemeFile));
fid = fopen(schemeFile, 'w+');

if fid == -1
error('CATO:commit_filter:cannotCreateFile', 'Cannot create ''%s''.', schemeFile);
end

fprintf(fid, 'VERSION: BVECTOR\n');
fprintf(fid, '%.6f\t%.6f\t%.6f\t%.6f\n', [gtab.bvecs gtab.bvals]');

fclose(fid);


%% Create white matter mask file

segmentation = load_nifti(segmentationFile);

whiteMatterMask = segmentation;
whiteMatterMask.vol = zeros(size(segmentation.vol));
whiteMatterMask.vol(ismember(segmentation.vol, startRegions)) = 1;

save_nifti(whiteMatterMask, wmMaskFile);

%% Reconstruct networks for each template and each method.
for iTemplate = 1:length(templates)
thisTemplate = templates{iTemplate};

for iMethod = 1:length(reconstructionMethods)
thisMethod = reconstructionMethods{iMethod};

thisFiberFile = strrep(fiberFile, ...
'METHOD', thisMethod);
thisFilteredFiberFile = strrep(filteredFiberFile, ...
'METHOD', thisMethod);
thisFiberPropertiesFile = strrep(strrep(fiberPropertiesFile, ...
'METHOD', thisMethod), ...
'TEMPLATE', thisTemplate);
thisFilteredFiberPropertiesFile = strrep(strrep(filteredFiberPropertiesFile, ...
'METHOD', thisMethod), ...
'TEMPLATE', thisTemplate);
thisRegionPropertiesFile = strrep(regionPropertiesFile, ...
'TEMPLATE', thisTemplate);

fprintf('%s - %s\n', thisTemplate, upper(thisMethod));

% Open reconstructed fiber file.
% This script assumes that the fibercloud data is small enough to
% fit into memory, which is quite a liberal assumption.
fprintf('Load fibers...');
[fibers, headerFiberFile] = readTrk(thisFiberFile);
fprintf(' done\n');

% Fiber properties
data = load(thisFiberPropertiesFile);
fiberProperties = data.fiberProperties;
propertyDescriptions = data.propertyDescriptions;

% Filter fibers that are in connectome and group fibers into
% connection bundles.

% To keep things simple we want each fiber only one time in the
% fiber cloud file. We will order fibers by the longest connection
% they contribute to.

fprintf('Filter fibers...');
% Sort such that first occurances of a fiber are the longest
[~, indx] = sort(fiberProperties(:,6), 'descend');
fiberProperties = fiberProperties(indx,:);

% Filter the first occurances of a fiber
[~, indx] = unique(fiberProperties(:,1), 'stable');
fiberProperties = fiberProperties(indx,:);

% Sort/group the filtered fibers by connection
[~, indx] = sort(fiberProperties(:,2), 'ascend');
fibers = fibers(fiberProperties(indx,1));
fprintf(' done\n');

fprintf('Write intermediate files for Python COMMIT script...');
voxelSize = headerFiberFile.voxel_size;
headerFiberFile.voxel_order = strtrim(headerFiberFile.voxel_order);
writeFibers(fibers, thisFilteredFiberFile, voxelSize, headerFiberFile);

% Create connectome file for COMMIT script with the updated NOS.
data = load(thisRegionPropertiesFile);
regionDescriptions = data.regionDescriptions;
nRegions = length(regionDescriptions);

assert(max(fiberProperties(:, 2)) <= 0.5*nRegions*(nRegions-1), ...
['The number of regions does not match between ', ...
'fiberPropertiesFile (%s) and regionPropertiesFile (%s).'], ...
thisFiberPropertiesFile, thisRegionPropertiesFile);

W = accumarray(fiberProperties(:, 2), ones(size(fiberProperties,1),1), ...
[0.5*nRegions*(nRegions-1) 1]);
W = squareform(W);
dlmwrite(intermediateConnectomeFile, W, ',');
fprintf(' done\n');

% Run Python script to perform COMMIT and COMMIT2
cmd = [setupPythonScript ' python "' commitScriptFile '"' ...
' --dwiProcessedFile=' dwiProcessedFile, ...
' --dwiSchemeFile=' schemeFile, ...
' --subjectDir=' subjectDir, ...
' --fiberFile=' thisFilteredFiberFile, ...
' --outputCommitDir=' outputCommitDir, ...
' --regLambda=' num2str(lambda), ...
' --wmMaskFile=' wmMaskFile, ...
' --intermediateConnectomeFile=' intermediateConnectomeFile];

fprintf('COMMIT script:\n%s\n', commitScriptFile);
fprintf('Run COMMIT script...');
exitCode = system(cmd);

if exitCode ~=0
error('CATO:commit_filter:errorInCOMMITScript', ...
'Error in executing COMMIT script: \n %s', ...
cmd);
end
fprintf(' done\n');

fprintf('Apply COMMIT filter to connectome matrices...\n');
% Filter with streamline weights
commitWeights = dlmread(fiberWeightsFile);

% Create updated fiber properties file
fiberProperties = fiberProperties(commitWeights > 0,:);
save(thisFilteredFiberPropertiesFile, 'fiberProperties', 'propertyDescriptions');

% Run network reconstruction step
configParams.reconstruction_fiber_properties.fiberPropertiesFile = filteredFiberPropertiesFile;
configParams.general.templates = {thisTemplate};
configParams.general.reconstructionMethods = {thisMethod};
configParams.reconstruction_network.connectivityMatrixFile = filteredConnectivityMatrixFile;
reconstruction_network(configParams);

fprintf('done.\n')
end

end

%% Clean up

status.commit_filter = 'finished';
updateStatus(configParams.general.statusFile, status);

fprintf('---commit_filter finished----\n');


2 changes: 1 addition & 1 deletion src/misc/freesurfer_scripts/save_nifti.m
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@
hdr.db_name = hdr.db_name(1:18);

hdr.dim = ones(1,8);
hdr.dim(1) = 4;
hdr.dim(1) = numel(size(hdr.vol));
hdr.dim(2) = size(hdr.vol,1);
hdr.dim(3) = size(hdr.vol,2);
hdr.dim(4) = size(hdr.vol,3);
Expand Down
Loading