Skip to content

Commit

Permalink
#16
Browse files Browse the repository at this point in the history
  • Loading branch information
sapetnioc committed Feb 2, 2022
1 parent 2064bfc commit 389fef9
Show file tree
Hide file tree
Showing 3 changed files with 115 additions and 30 deletions.
1 change: 1 addition & 0 deletions bv_use_cases/simplest/__main__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
from pprint import pprint

from capsul.api import Capsul
Expand Down
59 changes: 51 additions & 8 deletions bv_use_cases/tiny_morphologist/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
# -*- coding: utf-8 -*-
from pathlib import Path

from soma.controller import field, file

from capsul.api import Process, Pipeline


Expand All @@ -7,22 +11,40 @@ class BiasCorrection(Process):
strength: float = 0.8
output: field(type_=file(), output=True)

def execute(self):
def execute(self, context):
with open(self.input) as f:
content = self.read()
content = f'{content}\nBias correction with strength={self.strength}'
with open(self.output, 'w') as f:
f.write(content)

path_layout = dict(
bids={'output': {'part': 'nobias'}},
brainvisa={'output': {'prefix': 'nobias'}}
)

class SPMNormalization(Process):
input: field(type_=file())
template: field(type_=file())
output: field(type_=file(), output=True)

def execute(self):

requirements = {
'fakespm': {
'version': '12'
}
}

path_layout = dict(
bids={'output': {'part': 'normalized'}},
brainvisa={'output': {'prefix': 'normalized'}}
)

def execute(self, context):
spmdir = Path(context.spm.directory)
real_version = (spmdir / 'spm').read_text().strip()
with open(self.input) as f:
content = self.read()
content = f'{content}\nSPM normalization with template "{self.template}"'
content = f'{content}\nNormalization with fakespm {real_version} installed in {spmdir} using template "{self.template}"'
with open(self.output, 'w') as f:
f.write(content)

Expand All @@ -31,10 +53,15 @@ class AimsNormalization(Process):
origin: field(type_=list[float], default_factory=lambda: [1.2, 3.4, 5.6])
output: field(type_=file(), output=True)

def execute(self):
path_layout = dict(
bids={'output': {'part': 'normalized'}},
brainvisa={'output': {'prefix': 'normalized'}}
)

def execute(self, context):
with open(self.input) as f:
content = self.read()
content = f'{content}\nSPM normalization with origin={self.origin}'
content = f'{content}\nNormalization with Aims, origin={self.origin}'
with open(self.output, 'w') as f:
f.write(content)

Expand All @@ -43,7 +70,12 @@ class SplitBrain(Process):
right_output: field(type_=file(), output=True)
left_output: field(type_=file(), output=True)

def execute(self):
path_layout = dict(
bids={'output': {'part': 'split'}},
brainvisa={'output': {'prefix': 'split'}}
)

def execute(self, context):
with open(self.input) as f:
content = self.read()
content = f'{content}\nBias correction with strength={self.strength}'
Expand All @@ -55,7 +87,7 @@ class ProcessHemisphere(Process):
input: field(type_=file())
output: field(type_=file(), output=True)

def execute(self):
def execute(self, context):
with open(self.input) as f:
content = self.read()
content = f'{content}\nProcess hemisphere'
Expand Down Expand Up @@ -86,4 +118,15 @@ def pipeline_definition(self):
self.add_link('split.left_output->left_hemi.input')
self.export_parameter('left_hemi', 'output', 'left_hemisphere')

path_layout = dict(
bids={
'*': {'pipeline': 'tinymorphologist'},
'right_hemi': {'part': 'right_hemi'}
},
brainvisa={
'*': {'process': 'tinymorphologist'},
'left_hemi': {'prefix': 'left_hemi'}
}
)


85 changes: 63 additions & 22 deletions bv_use_cases/tiny_morphologist/__main__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
# -*- coding: utf-8 -*-
import json
import os
from pathlib import Path
import shutil
import tempfile

from bv_use_cases import tiny_morphologist

from capsul.api import Capsul
Expand Down Expand Up @@ -39,38 +43,73 @@
)

# Create temporary directory for the use case
tmp = tempfile.mkdtemp()
tmp_name = tempfile.mkdtemp()
try:
#-------------------#
# Environment setup #
#-------------------#

tmp = Path(tmp_name)
# Create BIDS directory
bids = f'{tmp}/bids'
bids = tmp / 'bids'
# Write Capsul specific information
os.mkdir(bids)
with open(f'{bids}/capsul.json', 'w') as f:
bids.mkdir()
with (bids / 'capsul.json').open('w') as f:
json.dump({
'paths_layout': 'bids-1.6'
'path_layout': 'bids-1.6'
}, f)

# Create BrainVISA directory
brainvisa = f'{tmp}/brainvisa'
os.mkdir(brainvisa)
brainvisa = tmp / 'brainvisa'
brainvisa.mkdir()
# Write Capsul specific information
with open(f'{brainvisa}/capsul.json', 'w') as f:
with (brainvisa / 'capsul.json').open('w') as f:
json.dump({
'paths_layout': 'brainvisa-6.0'
'path_layout': 'brainvisa-6.0'
}, f)

# Generate fake T1 and T2 data in bids directory
for subject in subjects:
for session in ('m0', 'm12', 'm24'):
for data_type in ('T1w', 'T2w'):
file = f'{bids}/rawdata/sub-{subject}/ses-{session}/anat/sub-{subject}_ses-{session}_{data_type}.nii'
d = os.path.dirname(file)
if not os.path.exists(d):
os.makedirs(d)
with open(file, 'w') as f:
file = (bids/ f'rawdata' / f'sub-{subject}' / f'ses-{session}' / 'anat' /
f'sub-{subject}_ses-{session}_{data_type}.nii')
file.parent.mkdir(parents=True, exist_ok=True)
with file.open('w') as f:
print(f'{data_type} acquisition for subject {subject} acquired in session {session}', file=f)

capsul = Capsul()
# Configuration base dictionary
config = {
'default': {
'label': 'Local computer',
'modules': {}
}
}
# Create fake SPM directories
for version in ('8', '12'):
spm = tmp / 'software' / f'fakespm-{version}'
spm.mkdir(parents=True, exist_ok=True)
# Write a file containing only the version string that will be used
# by fakespm module to check installation.
(spm / 'spm').write_text(version)
fakespm_config = {
'directory': str(spm),
'version': version,
}
config['default']['modules'].setdefault('fakespm', []).append(fakespm_config)


# Create a configuration file
config_json = tmp / 'capsul_config.json'
with config_json.open('w') as f:
json.dump(config, f)


#---------------------#
# Pipelines execution #
#---------------------#

capsul = Capsul(config_file=config_json)
# Input dataset is declared as following BIDS organization in capsul.json
# therefore a BIDS specific object is returned
input_dataset = capsul.dataset(bids)
Expand All @@ -83,21 +122,23 @@
# Parse the dataset with BIDS-specific query (here "suffix" is part
# of BIDS specification). The object returned contains info for main
# BIDS fields (sub, ses, acq, etc.)
for t1_mri in dataset.find(suffix='T1w'):
count = 0
for t1_mri in input_dataset.find(suffix='T1w'):
# Create a TinyMorphologist pipeline
tiny_morphologist = capsul.executable('bv_use_cases.tiny_morphologist.TinyMorphologist')
# Set the input data
tiny_morphologist.input = t1_mri.path
tiny_morphologist.input = t1_mri['path']
# Complete outputs following BraiVISA organization
# Make the link between BIDS metadata and BrainVISA metadata
output_dataset.set_output_paths(tiny_morphologist,
subject=t1_mri.sub,
acquisition=t1_mri.acq,
subject=t1_mri['sub'],
acquisition=t1_mri.get('acq', 'default'),
)
# Add the current TinyMorhpologist pipeline to the main
# pipeline that will be executed
custom_pipeline.add_executable(tiny_morphologist)
# Finally execute all the TinyMorphologist instances
capsul.run(processing_pipeline)
processing_pipeline.add_process(f'pipeline_{count}', tiny_morphologist)
count = count + 1
# # Finally execute all the TinyMorphologist instances
# capsul.run(processing_pipeline)
finally:
shutil.rmtree(tmp)

0 comments on commit 389fef9

Please sign in to comment.