Skip to content

Commit

Permalink
make logging visible, chunks
Browse files Browse the repository at this point in the history
  • Loading branch information
akhanf committed Sep 6, 2024
1 parent 7fdc3a8 commit e17fea4
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 15 deletions.
18 changes: 9 additions & 9 deletions config/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,20 +55,20 @@ bigstitcher:

fuse_dataset:
downsampling: 1
block_size_x: 4096 # for storage
block_size_y: 4096
block_size_z: 1
block_size_factor_x: 2 #e.g. 2 will use 2*block_size for computation
block_size_factor_y: 2
block_size_x: 256 # for storage
block_size_y: 256
block_size_z: 256
block_size_factor_x: 1 #e.g. 2 will use 2*block_size for computation
block_size_factor_y: 1
block_size_factor_z: 1

ome_zarr:
desc: stitchedflatcorr
max_downsampling_layers: 5 # e.g. 4 levels: { 0: orig, 1: ds2, 2: ds4, 3: ds8, 4: ds16}
rechunk_size: #z, y, x
- 1
- 4096
- 4096
rechunk_size: #z, y, x
- 256
- 256
- 256
scaling_method: 'local_mean' #can be nearest, gaussian, local_mean, zoom (zoom uses spline interp)

omero_metadata:
Expand Down
5 changes: 2 additions & 3 deletions workflow/rules/bigstitcher.smk
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ rule zarr_to_bdv:
)
/ "dataset.xml"
),
chunks=(128,256,256) #the previous default was 1 x Nx x Ny (Nx Ny were full tile size!)
output:
bdv_n5=temp(
directory(
Expand Down Expand Up @@ -159,7 +158,7 @@ rule bigstitcher:
"cp {input.dataset_xml} {output.dataset_xml} && "
" {params.fiji_launcher_cmd} && "
" echo ' -macro {input.ijm} \"{params.macro_args}\"' >> {output.launcher} "
" && {output.launcher} &> {log} && {params.rm_old_xml}"
" && {output.launcher} |& tee {log} && {params.rm_old_xml}"


rule fuse_dataset:
Expand Down Expand Up @@ -246,7 +245,7 @@ rule fuse_dataset:
shell:
" {params.fiji_launcher_cmd} && "
" echo ' -macro {input.ijm} \"{params.macro_args}\"' >> {output.launcher} "
" && {output.launcher} &> {log}"
" && {output.launcher} |& tee {log}"


rule fuse_dataset_spark:
Expand Down
2 changes: 1 addition & 1 deletion workflow/rules/flatfield_corr.smk
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ rule apply_basic_flatfield_corr:
allow_missing=True,
),
params:
out_chunks=(1,1,128,256,256) #make this a config option -- setting it here instead of rechunking in zarr2bdv
out_chunks=config["ome_zarr"]["rechunk_size"],
output:
zarr=temp(
directory(
Expand Down
2 changes: 1 addition & 1 deletion workflow/scripts/apply_basic_flatfield_corr_zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def apply_basic_parallel(x):
chan_arr_list.append(arr_corr)

#stack along chans
arr_stacked = da.stack(chan_arr_list,axis=1).rechunk(snakemake.params.out_chunks)
arr_stacked = da.stack(chan_arr_list,axis=1).rechunk([1,1] + snakemake.params.out_chunks)

with ProgressBar():
da.to_zarr(arr_stacked,snakemake.output.zarr,overwrite=True,dimension_separator='/')
2 changes: 1 addition & 1 deletion workflow/scripts/zarr_to_n5_bdv.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,6 @@ def update_xml_h5_to_n5(in_xml,out_xml,in_n5):
#add attributes for downsampling as a list, and datatype to the setup# level
g = zarr.open_group(store=n5_store,path=f'setup{setup_i}',mode='r+')
g.attrs['downsamplingFactors']=ds_list
g.attrs['dataType']='int16'
g.attrs['dataType']='uint16'


0 comments on commit e17fea4

Please sign in to comment.