Skip to content

Commit

Permalink
linting
Browse files Browse the repository at this point in the history
  • Loading branch information
akhanf committed Aug 23, 2024
1 parent 942860a commit 6d05409
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 17 deletions.
5 changes: 4 additions & 1 deletion workflow/rules/common.smk
Original file line number Diff line number Diff line change
Expand Up @@ -128,9 +128,11 @@ def get_bids_toplevel_targets():
targets.append(bids_toplevel(resampled, "dataset_description.json"))
return targets


def dataset_is_remote(wildcards):
return is_remote_gcs(Path(get_dataset_path(wildcards)))


def get_input_dataset(wildcards):
"""returns path to extracted dataset or path to provided input folder"""
dataset_path = Path(get_dataset_path(wildcards))
Expand All @@ -149,6 +151,7 @@ def get_input_dataset(wildcards):
else:
print(f"unsupported input: {dataset_path}")


def get_metadata_json(wildcards):
"""returns path to metadata, extracted from local or gcs"""
dataset_path = Path(get_dataset_path(wildcards))
Expand All @@ -158,7 +161,7 @@ def get_metadata_json(wildcards):
return rules.blaze_to_metadata_gcs.output.metadata_json.format(**wildcards)
else:
return rules.blaze_to_metadata.output.metadata_json.format(**wildcards)


# import
def cmd_extract_dataset(wildcards, input, output):
Expand Down
46 changes: 30 additions & 16 deletions workflow/rules/import.smk
Original file line number Diff line number Diff line change
Expand Up @@ -32,17 +32,18 @@ rule extract_dataset:
shell:
"{params.cmd}"


rule blaze_to_metadata_gcs:
input:
creds = os.path.expanduser(config["remote_creds"])
creds=os.path.expanduser(config["remote_creds"]),
params:
dataset_path=get_dataset_path_gs,
in_tif_pattern=lambda wildcards: config["import_blaze"]["raw_tif_pattern"],
storage_provider_settings=workflow.storage_provider_settings,
output:
metadata_json=bids(
root=root,
desc='gcs',
desc="gcs",
subject="{subject}",
datatype="micr",
sample="{sample}",
Expand Down Expand Up @@ -84,15 +85,17 @@ rule blaze_to_metadata:
config["import_blaze"]["raw_tif_pattern"],
),
output:
metadata_json=temp(bids(
root=work,
subject="{subject}",
desc='local',
datatype="micr",
sample="{sample}",
acq="{acq,[a-zA-Z0-9]*blaze[a-zA-Z0-9]*}",
suffix="SPIM.json",
)),
metadata_json=temp(
bids(
root=work,
subject="{subject}",
desc="local",
datatype="micr",
sample="{sample}",
acq="{acq,[a-zA-Z0-9]*blaze[a-zA-Z0-9]*}",
suffix="SPIM.json",
)
),
benchmark:
bids(
root="benchmarks",
Expand Down Expand Up @@ -121,7 +124,7 @@ rule blaze_to_metadata:

rule copy_blaze_metadata:
input:
json=get_metadata_json
json=get_metadata_json,
output:
metadata_json=bids(
root=root,
Expand All @@ -131,7 +134,18 @@ rule copy_blaze_metadata:
acq="{acq,[a-zA-Z0-9]*blaze[a-zA-Z0-9]*}",
suffix="SPIM.json",
),
shell: 'cp {input} {output}'
log:
bids(
root="logs",
datatype="copy_blaze_metadata",
subject="{subject}",
sample="{sample}",
acq="{acq}",
suffix="log.txt",
),
shell:
"cp {input} {output} &> {log}"


rule prestitched_to_metadata:
input:
Expand Down Expand Up @@ -228,17 +242,17 @@ rule tif_to_zarr:
script:
"../scripts/tif_to_zarr.py"


rule tif_to_zarr_gcs:
""" use dask to load tifs in parallel and write to zarr
output shape is (tiles,channels,z,y,x), with the 2d
images as the chunks"""
input:
metadata_json=rules.copy_blaze_metadata.output.metadata_json,
creds = os.path.expanduser(config["remote_creds"])
creds=os.path.expanduser(config["remote_creds"]),
params:
dataset_path=get_dataset_path_gs,
in_tif_pattern=lambda wildcards:
config["import_blaze"]["raw_tif_pattern"],
in_tif_pattern=lambda wildcards: config["import_blaze"]["raw_tif_pattern"],
intensity_rescaling=config["import_blaze"]["intensity_rescaling"],
storage_provider_settings=workflow.storage_provider_settings,
output:
Expand Down

0 comments on commit 6d05409

Please sign in to comment.