Skip to content

Commit

Permalink
updates to get bigstitcher-spark fusion working
Browse files Browse the repository at this point in the history
spimprep-deps WIP
  • Loading branch information
akhanf committed Sep 24, 2024
1 parent 068d422 commit 2357f4c
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 6 deletions.
22 changes: 21 additions & 1 deletion workflow/rules/bigstitcher.smk
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,20 @@ rule fuse_dataset_spark:
suffix="bigstitcher.xml",
),
ijm=Path(workflow.basedir) / "macros" / "FuseImageMacroZarr.ijm",
params:
channel=lambda wildcards: "--channelId={channel}".format(
channel=get_stains(wildcards).index(wildcards.stain)
),
block_size="--blockSize={bsx},{bsy},{bsz}".format(
bsx=config["bigstitcher"]["fuse_dataset"]["block_size_x"],
bsy=config["bigstitcher"]["fuse_dataset"]["block_size_y"],
bsz=config["bigstitcher"]["fuse_dataset"]["block_size_z"],
),
block_size_factor="--blockScale={bsfx},{bsfy},{bsfz}".format(
bsfx=config["bigstitcher"]["fuse_dataset"]["block_size_factor_x"],
bsfy=config["bigstitcher"]["fuse_dataset"]["block_size_factor_y"],
bsfz=config["bigstitcher"]["fuse_dataset"]["block_size_factor_z"],
),
output:
zarr=temp(
directory(
Expand Down Expand Up @@ -315,4 +329,10 @@ rule fuse_dataset_spark:
group:
"preproc"
shell:
"affine-fusion ..."
"affine-fusion --preserveAnisotropy -x {input.dataset_xml} "
" -o {output.zarr} -d /fused/s0 -s ZARR "
" --UINT16 --minIntensity 0 --maxIntensity 65535 "


# " --UINT8 --minIntensity 0 --maxIntensity 255 "
"{params}" # all the params
2 changes: 1 addition & 1 deletion workflow/scripts/apply_basic_flatfield_corr_zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
#now we want to apply correction to all images
#define a function to map
def apply_basic_parallel(x):
return np.reshape(basic.transform(x.squeeze()),(1,1,img_shape[0],img_shape[1])).astype('int16')
return np.reshape(basic.transform(x.squeeze()),(1,1,img_shape[0],img_shape[1])).astype('uint16')
arr_corr = da.map_blocks(apply_basic_parallel,arr_chan)

chan_arr_list.append(arr_corr)
Expand Down
2 changes: 1 addition & 1 deletion workflow/scripts/tif_to_zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def single_imread(*args):

#rescale intensities, and recast
darr = darr * snakemake.params.intensity_rescaling
darr = darr.astype('int16')
darr = darr.astype('uint16')

#now we can do the computation itself, storing to zarr
print('writing images to zarr with dask')
Expand Down
2 changes: 1 addition & 1 deletion workflow/scripts/tif_to_zarr_gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def build_zstack(gcs_uris,fs):

#rescale intensities, and recast
darr = darr * snakemake.params.intensity_rescaling
darr = darr.astype('int16')
darr = darr.astype('uint16')

#now we can do the computation itself, storing to zarr
print('writing images to zarr with dask')
Expand Down
4 changes: 2 additions & 2 deletions workflow/scripts/zarr_to_n5_bdv.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def update_xml_h5_to_n5(in_xml,out_xml,in_n5):
ds_list=[] #for setup-level attrs
for ds in range(max_downsampling_layers):
step=2**ds #1,2,4,8..
zstack = da.squeeze(darr[tile_i,chan_i,:,::step,::step]).astype(np.int16)
zstack = da.squeeze(darr[tile_i,chan_i,:,::step,::step])
print(f'writing to setup{setup_i}/timepoint0/s{ds}')
with ProgressBar():
zstack.to_zarr(n5_store,component=f'setup{setup_i}/timepoint0/s{ds}',overwrite=True,compute=True)
Expand All @@ -111,6 +111,6 @@ def update_xml_h5_to_n5(in_xml,out_xml,in_n5):
#add attributes for downsampling as a list, and datatype to the setup# level
g = zarr.open_group(store=n5_store,path=f'setup{setup_i}',mode='r+')
g.attrs['downsamplingFactors']=ds_list
g.attrs['dataType']='int16'
g.attrs['dataType']='uint16'


0 comments on commit 2357f4c

Please sign in to comment.