Skip to content

Commit

Permalink
[DATALAD RUNCMD] And rename all _blocs with _blocks
Browse files Browse the repository at this point in the history
=== Do not change lines below ===
{
 "chain": [],
 "cmd": "git-sedi '_blocs\\b' _blocks",
 "exit": 0,
 "extra_inputs": [],
 "inputs": [],
 "outputs": [],
 "pwd": "."
}
^^^ Do not change lines above ^^^
  • Loading branch information
yarikoptic committed May 21, 2024
1 parent f14a87f commit 8866237
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 16 deletions.
22 changes: 11 additions & 11 deletions bids_prov/afni/afni_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ def get_extern_entities_from_activities(graph, activities, id_fusion_activity):


def afni_to_bids_prov(filename: str, context_url=CONTEXT_URL, output_file=None,
soft_ver='afni24', indent=2, verbose=True, with_blocs=True) -> bool:
soft_ver='afni24', indent=2, verbose=True, with_blocks=True) -> bool:
"""
afni parser
Expand All @@ -443,14 +443,14 @@ def afni_to_bids_prov(filename: str, context_url=CONTEXT_URL, output_file=None,
number of indentation in jsonld
verbose : bool
True to have more verbosity
with_blocs : bool
with_blocks : bool
To retrieve or not the results of the parser in block mode and not only for each command
Returns
-------
bool
Write the json-ld to the location indicated in output_file.
If `with_blocs` is True, it generates the file to the location indicated in output_file.
If `with_blocks` is True, it generates the file to the location indicated in output_file.
"""
commands_block = readlines(filename)

Expand All @@ -460,31 +460,31 @@ def afni_to_bids_prov(filename: str, context_url=CONTEXT_URL, output_file=None,
graph["Records"].update(records)
compute_sha_256_entity(graph["Records"]["Entities"])

if with_blocs:
if with_blocks:
bl_name = list(OrderedDict.fromkeys(bl for (bl, id) in bloc_act))
blocks = [{
"bloc_name": bl,
"act_ids": [id_ for (b, id_) in bloc_act if b == bl]} for bl in bl_name]

graph_block = copy.deepcopy(graph)
activities_blocs = []
entities_blocs = []
activities_blocks = []
entities_blocks = []
for block in blocks:
activities = get_activities_by_ids(graph_block, block["act_ids"])
fus_activities = fusion_activities(activities, block["bloc_name"])
ext_entities = get_extern_entities_from_activities(
graph_block, activities, fus_activities["@id"])
for ent in ext_entities:
if ent["@id"] not in entities_blocs:
entities_blocs.append(ent)
if ent["@id"] not in entities_blocks:
entities_blocks.append(ent)

for ent_used in fus_activities["Used"]:
if ent_used not in [id_["@id"] for id_ in ext_entities]:
fus_activities["Used"].remove(ent_used)
activities_blocs.append(fus_activities)
activities_blocks.append(fus_activities)

graph_block["Records"]["Activities"] = activities_blocs
graph_block["Records"]["Entities"] = entities_blocs
graph_block["Records"]["Activities"] = activities_blocks
graph_block["Records"]["Entities"] = entities_blocks

return writing_jsonld(graph_block, indent, output_file)

Expand Down
10 changes: 5 additions & 5 deletions launch_parser_on_nidm.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@
from bids_prov.visualize import main as visualize


def process_file(context_write, root, file, filename_ss_ext, output_dir, parser_function, verbose, with_blocs=False):
def process_file(context_write, root, file, filename_ss_ext, output_dir, parser_function, verbose, with_blocks=False):
"""Process a file using the given parser function and save the output to the output directory."""
context_write.write(f" file= {root}/{str(file)}\n")
filename = root + "/" + str(file)
if with_blocs is False:
if with_blocks is False:
shutil.copyfile(filename, output_dir + "/" + str(file))
output_base = output_dir + "/" + filename_ss_ext if with_blocs is False else output_dir + "/" + filename_ss_ext + "_block"
output_base = output_dir + "/" + filename_ss_ext if with_blocks is False else output_dir + "/" + filename_ss_ext + "_block"
output_jsonld = output_base + ".jsonld"
output_png = output_base + ".png"

Expand All @@ -26,7 +26,7 @@ def process_file(context_write, root, file, filename_ss_ext, output_dir, parser_
output_file=output_jsonld, verbose=verbose)
else:
jsonld_same_as_existing = parser_function(root + "/" + str(file), CONTEXT_URL,
output_file=output_jsonld, verbose=verbose, with_blocs=with_blocs)
output_file=output_jsonld, verbose=verbose, with_blocks=with_blocks)

if not jsonld_same_as_existing: # do not generate the png if the jsonld has not evolved
visualize(output_jsonld, output_file=output_png)
Expand Down Expand Up @@ -97,7 +97,7 @@ def main():
filename_ss_ext = file.split(".tcsh")[0]
process_file(context_write, root, file, filename_ss_ext, output_dir_afni, afni_to_bids_prov, opt.verbose)
# afni block
process_file(context_write, root, file, filename_ss_ext, output_dir_afni, afni_to_bids_prov, opt.verbose, with_blocs=True)
process_file(context_write, root, file, filename_ss_ext, output_dir_afni, afni_to_bids_prov, opt.verbose, with_blocks=True)

else:
print(" -> Extension of file ", file, " not supported")
Expand Down

0 comments on commit 8866237

Please sign in to comment.