Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ENH] Output derivatives to folder outside of BIDS dataset #29

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions .github/pull_request_template.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
<!---
This is a suggested pull request template for cis-processing.
It's designed to capture information we've found to be useful in reviewing pull requests.

If there is other information that would be helpful to include, please don't hesitate to add it!

Please also label your pull request with the relevant tags.
Here is a list of appropriate tags:
[ENH]: Adds a new feature
[FIX]: Fixes a bug
[REF]: Refactors (reorganizes, improves, etc.) code without impacting behavior
[STY]: Improves coding style
[DOC]: Improvements to documentation, including commenting
[TST]: Testing (e.g., adding unit/integration tests)
[WIP]: A work in progress. We may discuss this PR but will not merge until it is ready
-->

Closes # .

Changes proposed in this pull request:

-
-
49 changes: 6 additions & 43 deletions cis_proc.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@

import argparse
import pandas as pd
from utils import merge_bids


def run(command, env={}):
Expand Down Expand Up @@ -103,8 +104,9 @@ def main(argv=None):
config_options['mriqc'])
mriqc_version = op.basename(mriqc_file).split('-')[0].split('_')[-1]

out_deriv_dir = op.join(args.bids_dir,
'derivatives/mriqc-{0}'.format(mriqc_version))
# Output derivatives to parent directory of BIDS dataset directory.
out_deriv_dir = op.abspath(op.join(
args.bids_dir, '../derivatives/mriqc-{0}'.format(mriqc_version)))

# Additional checks and copying for heuristics file
heuristics_file = config_options['heuristics']
Expand Down Expand Up @@ -174,47 +176,8 @@ def main(argv=None):
os.remove(op.join(scan_work_dir, 'validator.txt'))

if bids_successful:
# Merge BIDS dataset into final folder
dset_files = ['CHANGES', 'README', 'dataset_description.json',
'participants.tsv']
for dset_file in dset_files:
if not op.isfile(op.join(args.bids_dir, dset_file)):
shutil.copyfile(op.join(scan_work_dir, 'bids', dset_file),
op.join(args.bids_dir, dset_file))

p_df = pd.read_csv(op.join(scan_work_dir, 'bids/participants.tsv'),
sep='\t')
p_df = p_df.T.drop_duplicates().T
p_df2 = pd.read_csv(op.join(args.bids_dir, 'participants.tsv'),
sep='\t')
p_df2 = p_df2.T.drop_duplicates().T

# Check if row already in participants file
matches = p_df[(p_df == p_df2.loc[0]).all(axis=1)]
match = matches.index.values.size
if not match:
p_df = pd.concat((p_df, p_df2))
p_df.to_csv(op.join(args.bids_dir, 'participants.tsv'),
sep='\t', index=False)
else:
print('Subject/session already found in participants.tsv')

scratch_sub_dir = op.join(scan_work_dir,
'bids/sub-{0}'.format(args.sub))
out_sub_dir = op.join(args.bids_dir, 'sub-{0}'.format(args.sub))
if not op.isdir(out_sub_dir):
shutil.copytree(scratch_sub_dir, out_sub_dir)
elif args.ses is not None:
scratch_ses_dir = op.join(scratch_sub_dir,
'ses-{0}'.format(args.ses))
out_ses_dir = op.join(out_sub_dir, 'ses-{0}'.format(args.ses))
if not op.isdir(out_ses_dir):
shutil.copytree(scratch_ses_dir, out_ses_dir)
else:
print('Warning: Subject/session directory already exists in '
'dataset.')
else:
print('Warning: Subject directory already exists in dataset.')
merge_bids(op.join(scan_work_dir, 'bids'), args.bids_dir,
args.sub, args.ses)

# Run MRIQC
if not op.isdir(out_deriv_dir):
Expand Down
52 changes: 52 additions & 0 deletions utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
"""
Utility functions for cis-processing tools.
"""
import shutil
import os.path as op
import pandas as pd


def merge_bids(in_dir, dset_dir, sub, ses=None):
"""
Merge the single-subject BIDS dataset generated by the cis_proc pipeline
into the general project BIDS dataset.
"""
dset_files = ['CHANGES', 'README', 'dataset_description.json',
'participants.tsv']
for dset_file in dset_files:
if not op.isfile(op.join(dset_dir, dset_file)):
shutil.copyfile(op.join(in_dir, dset_file),
op.join(dset_dir, dset_file))

p_df = pd.read_csv(op.join(in_dir, 'participants.tsv'),
sep='\t')
p_df = p_df.T.drop_duplicates().T
p_df2 = pd.read_csv(op.join(dset_dir, 'participants.tsv'),
sep='\t')
p_df2 = p_df2.T.drop_duplicates().T

# Check if row already in participants file
matches = p_df[(p_df == p_df2.loc[0]).all(axis=1)]
match = matches.index.values.size
if not match:
p_df = pd.concat((p_df, p_df2))
p_df.to_csv(op.join(dset_dir, 'participants.tsv'),
sep='\t', index=False)
else:
print('Subject/session already found in participants.tsv')

scratch_sub_dir = op.join(in_dir, 'sub-{0}'.format(sub))
out_sub_dir = op.join(dset_dir, 'sub-{0}'.format(sub))
if not op.isdir(out_sub_dir):
shutil.copytree(scratch_sub_dir, out_sub_dir)
elif ses is not None:
scratch_ses_dir = op.join(scratch_sub_dir,
'ses-{0}'.format(ses))
out_ses_dir = op.join(out_sub_dir, 'ses-{0}'.format(ses))
if not op.isdir(out_ses_dir):
shutil.copytree(scratch_ses_dir, out_ses_dir)
else:
print('Warning: Subject/session directory already exists in '
'dataset.')
else:
print('Warning: Subject directory already exists in dataset.')