Skip to content

Commit

Permalink
cleanup and fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
akhanf committed Oct 8, 2024
1 parent 3cafe41 commit 3799107
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 8 deletions.
4 changes: 1 addition & 3 deletions config/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@ work: 'work'

remote_creds: '~/.config/gcloud/application_default_credentials.json' #this is needed so we can pass creds to container

write_ome_zarr_direct: True #use this to skip writing the final zarr output to work first and copying afterwards -- useful when work is not a fast local disk

use_zipstore: False #if True, produce SPIM.ome.zarr.zip instead of SPIM.ome.zarr

#total resources available, used to set rule resources
Expand Down Expand Up @@ -47,7 +45,7 @@ bigstitcher:
min_r: 0.7
max_shift_total: 50
global_optimization:
enabled: 1
enabled: 1
method: TWO_ROUND_ITERATIVE
methods: #unused, only for reference
ONE_ROUND_SIMPLE: "One-Round"
Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,11 @@ tifffile = "^2024.5.10"

[tool.poe.tasks]
test_localin_gcsout = "snakemake --dry-run --config datasets=testing/dryrun_tests/datasets_local.tsv root='gcs://khanlab-lightsheet/data/test_bids'"
test_localin_localout = "snakemake --dry-run --config datasets=testing/dryrun_tests/datasets_gcs.tsv root=bids"
test_localin_localout = "snakemake --dry-run --config datasets=testing/dryrun_tests/datasets_local.tsv root=bids"
test_gcsin_gcsout = "snakemake --dry-run --config datasets=testing/dryrun_tests/datasets_gcs.tsv root='gcs://khanlab-lightsheet/data/test_bids'"
test_gcsin_localout = "snakemake --dry-run --config datasets=testing/dryrun_tests/datasets_gcs.tsv root=bids"
test_localin_localout_zipstore = "snakemake --dry-run --config datasets=testing/dryrun_tests/datasets_gcs.tsv root=bids use_zipstore=True"
test_localin_gcsout_zipstore = "snakemake --dry-run --config datasets=testing/dryrun_tests/datasets_gcs.tsv root='gcs://khanlab-lightsheet/data/test_bids' use_zipstore=True"
test_localin_localout_zipstore = "snakemake --dry-run --config datasets=testing/dryrun_tests/datasets_local.tsv root=bids use_zipstore=True"
test_localin_gcsout_zipstore = "snakemake --dry-run --config datasets=testing/dryrun_tests/datasets_local.tsv root='gcs://khanlab-lightsheet/data/test_bids' use_zipstore=True"

[build-system]
requires = ["poetry-core"]
Expand Down
1 change: 0 additions & 1 deletion workflow/rules/common.smk
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,6 @@ def get_input_dataset(wildcards):
def get_metadata_json(wildcards):
"""returns path to metadata, extracted from local or gcs"""
dataset_path = Path(get_dataset_path(wildcards))
suffix = dataset_path.suffix

if is_remote_gcs(dataset_path):
return rules.blaze_to_metadata_gcs.output.metadata_json.format(**wildcards)
Expand Down
2 changes: 1 addition & 1 deletion workflow/rules/import.smk
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ rule blaze_to_metadata_gcs:
storage_provider_settings=workflow.storage_provider_settings,
output:
metadata_json=bids(
root=root,
root=work,
desc="gcs",
subject="{subject}",
datatype="micr",
Expand Down

0 comments on commit 3799107

Please sign in to comment.