Skip to content

Commit

Permalink
Update analyses
Browse files Browse the repository at this point in the history
  • Loading branch information
constantinpape committed Nov 25, 2024
1 parent 9728951 commit 3b4c6c4
Show file tree
Hide file tree
Showing 5 changed files with 165 additions and 8 deletions.
8 changes: 8 additions & 0 deletions scripts/cooper/analysis/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
screenshots/
20241108_3D_Imig_DATA_2014/
*az*/
mrc_files/
imig_data/
results/
*.xlsx
*.tsv
136 changes: 136 additions & 0 deletions scripts/cooper/analysis/export_az_to_imod.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
import os
import tempfile
from glob import glob
from subprocess import run
from shutil import copyfile

import h5py
import pandas as pd

from synaptic_reconstruction.imod.to_imod import write_segmentation_to_imod
from scipy.ndimage import binary_dilation, binary_closing


def check_imod(tomo_path, mod_path):
run(["imod", tomo_path, mod_path])


def export_all_to_imod(check_input=True, check_export=True):
files = sorted(glob("./az_segmentation/**/*.h5", recursive=True))
mrc_root = "./mrc_files"
output_folder = "./az_export/initial_model"

for ff in files:
ds, fname = os.path.split(ff)
ds = os.path.basename(ds)
out_folder = os.path.join(output_folder, ds)
out_path = os.path.join(out_folder, fname.replace(".h5", ".mod"))
if os.path.exists(out_path):
continue

os.makedirs(out_folder, exist_ok=True)
mrc_path = os.path.join(mrc_root, ds, fname.replace(".h5", ".rec"))
assert os.path.exists(mrc_path), mrc_path

with h5py.File(ff, "r") as f:
seg = f["thin_az"][:]

seg = binary_dilation(seg, iterations=2)
seg = binary_closing(seg, iterations=2)

write_segmentation_to_imod(mrc_path, seg, out_path)

if check_input:
import napari
from elf.io import open_file
with open_file(mrc_path, "r") as f:
raw = f["data"][:]
v = napari.Viewer()
v.add_image(raw)
v.add_labels(seg)
napari.run()

if check_export:
check_imod(mrc_path, out_path)


# https://bio3d.colorado.edu/imod/doc/man/reducecont.html
def reduce_all_contours():
pass


# https://bio3d.colorado.edu/imod/doc/man/smoothsurf.html#TOP
def smooth_all_surfaces(check_output=True):
input_files = sorted(glob("./az_export/initial_model/**/*.mod", recursive=True))

mrc_root = "./mrc_files"
output_folder = "./az_export/smoothed_model"
for ff in input_files:
ds, fname = os.path.split(ff)
ds = os.path.basename(ds)
out_folder = os.path.join(output_folder, ds)
out_file = os.path.join(out_folder, fname)
if os.path.exists(out_file):
continue

os.makedirs(out_folder, exist_ok=True)
run(["smoothsurf", ff, out_file])
if check_output:
mrc_path = os.path.join(mrc_root, ds, fname.replace(".mod", ".rec"))
assert os.path.exists(mrc_path), mrc_path
check_imod(mrc_path, out_file)


def measure_surfaces():
input_files = sorted(glob("./az_export/smoothed_model/**/*.mod", recursive=True))

result = {
"Dataset": [],
"Tomogram": [],
"AZ Surface": [],
}
for ff in input_files:
ds, fname = os.path.split(ff)
ds = os.path.basename(ds)
fname = os.path.splitext(fname)[0]

with tempfile.NamedTemporaryFile() as f_mesh, tempfile.NamedTemporaryFile() as f_mod:
tmp_path_mesh = f_mesh.name
tmp_path_mod = f_mod.name
copyfile(ff, tmp_path_mesh)
run(["imodmesh", tmp_path_mesh])
run(["imodinfo", "-f", tmp_path_mod, tmp_path_mesh])
area = None
with open(tmp_path_mod, "r") as f:
for line in f.readlines():
line = line.strip()
if line.startswith("Total mesh surface area"):
area = float(line.split(" ")[-1])
assert area is not None
area /= 2

result["Dataset"].append(ds)
result["Tomogram"].append(fname)
result["AZ Surface"].append(area)

result = pd.DataFrame(result)
result.to_excel("./az_measurements_all.xlsx", index=False)


def filter_surfaces():
all_results = pd.read_excel("./az_measurements_all.xlsx")
man_tomos = pd.read_csv("./man_tomos.tsv")

man_results = all_results.merge(man_tomos[["Dataset", "Tomogram"]], on=["Dataset", "Tomogram"], how="inner")
man_results.to_excel("./az_measuerements_manual.xlsx", index=False)


def main():
export_all_to_imod(False, False)
smooth_all_surfaces(False)
# measure_surfaces()
filter_surfaces()


if __name__ == "__main__":
main()
13 changes: 10 additions & 3 deletions scripts/inner_ear/analysis/analyze_distances.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@

from common import get_all_measurements, get_measurements_with_annotation

POOL_DICT = {"Docked-V": "MP-V", "MP-V": "MP-V", "RA-V": "RA-V"}


def _plot_all(distances):
pools = pd.unique(distances["pool"])
Expand Down Expand Up @@ -45,7 +47,7 @@ def _plot_selected(distances, save_path=None):

def _plot(pool_name, distance_col, structure_name, ax):

this_distances = distances[distances["pool"] == pool_name][["tomogram", "approach", distance_col]]
this_distances = distances[distances["combined_pool"] == pool_name][["tomogram", "approach", distance_col]]

ax.set_title(f"{pool_name} to {structure_name}")
sns.histplot(
Expand Down Expand Up @@ -88,8 +90,8 @@ def _plot(pool_name, distance_col, structure_name, ax):
# NOTE: we over-ride a plot here, should not do this in the actual version
_plot("MP-V", "pd_distance [nm]", "PD", axes[0, 0])
_plot("MP-V", "boundary_distance [nm]", "AZ Membrane", axes[0, 1])
_plot("Docked-V", "pd_distance [nm]", "PD", axes[1, 0])
_plot("Docked-V", "boundary_distance [nm]", "AZ Membrane", axes[1, 0])
# _plot("Docked-V", "pd_distance [nm]", "PD", axes[1, 0])
# _plot("Docked-V", "boundary_distance [nm]", "AZ Membrane", axes[1, 0])
_plot("RA-V", "ribbon_distance [nm]", "Ribbon", axes[1, 1])

fig.tight_layout()
Expand All @@ -103,16 +105,19 @@ def for_tomos_with_annotation(plot_all=True):
["tomogram", "pool", "ribbon_distance [nm]", "pd_distance [nm]", "boundary_distance [nm]"]
]
manual_distances["approach"] = ["manual"] * len(manual_distances)
manual_distances.insert(1, "combined_pool", manual_distances["pool"].replace(POOL_DICT))

semi_automatic_distances = semi_automatic_assignments[
["tomogram", "pool", "ribbon_distance [nm]", "pd_distance [nm]", "boundary_distance [nm]"]
]
semi_automatic_distances["approach"] = ["semi_automatic"] * len(semi_automatic_distances)
semi_automatic_distances.insert(1, "combined_pool", semi_automatic_distances["pool"].replace(POOL_DICT))

proofread_distances = proofread_assignments[
["tomogram", "pool", "ribbon_distance [nm]", "pd_distance [nm]", "boundary_distance [nm]"]
]
proofread_distances["approach"] = ["proofread"] * len(proofread_distances)
proofread_distances.insert(1, "combined_pool", proofread_distances["pool"].replace(POOL_DICT))

distances = pd.concat([manual_distances, semi_automatic_distances, proofread_distances])
if plot_all:
Expand All @@ -129,11 +134,13 @@ def for_all_tomos(plot_all=True):
["tomogram", "pool", "ribbon_distance [nm]", "pd_distance [nm]", "boundary_distance [nm]"]
]
semi_automatic_distances["approach"] = ["semi_automatic"] * len(semi_automatic_distances)
semi_automatic_distances.insert(1, "combined_pool", semi_automatic_distances["pool"].replace(POOL_DICT))

proofread_distances = proofread_assignments[
["tomogram", "pool", "ribbon_distance [nm]", "pd_distance [nm]", "boundary_distance [nm]"]
]
proofread_distances["approach"] = ["proofread"] * len(proofread_distances)
proofread_distances.insert(1, "combined_pool", proofread_distances["pool"].replace(POOL_DICT))

distances = pd.concat([semi_automatic_distances, proofread_distances])
if plot_all:
Expand Down
9 changes: 7 additions & 2 deletions scripts/inner_ear/analysis/analyze_vesicle_diameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@

from common import get_finished_tomos

POOL_DICT = {"Docked-V": "MP-V", "MP-V": "MP-V", "RA-V": "RA-V"}

sys.path.append("../processing")


Expand Down Expand Up @@ -42,6 +44,7 @@ def aggregate_diameters(data_root, table, save_path, get_tab, include_names, she
radius_table.append(this_tab)

radius_table = pd.concat(radius_table)
radius_table.insert(1, "combined_pool", radius_table["pool"].replace(POOL_DICT))

print("Saving table for", len(radius_table), "vesicles to", save_path, sheet_name)
if os.path.exists(save_path):
Expand Down Expand Up @@ -91,11 +94,13 @@ def aggregate_diameters_imod(data_root, table, save_path, include_names, sheet_n
except AssertionError:
continue

# Determined from matching the size of vesicles in IMOD.
radius_factor = 0.85
this_tab = pd.DataFrame({
"tomogram": [tomo_name] * len(radii),
"pool": [label_names[label_id] for label_id in labels],
"radius [nm]": radii,
"diameter [nm]": 2 * radii,
"radius [nm]": radii * radius_factor,
"diameter [nm]": 2 * radii * radius_factor,
})
radius_table.append(this_tab)

Expand Down
7 changes: 4 additions & 3 deletions synaptic_reconstruction/imod/to_imod.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def _pad(inp, n=3):

def write_segmentation_to_imod_as_points(
mrc_path: str,
segmentation_path: str,
segmentation: Union[str, np.ndarray],
output_path: str,
min_radius: Union[int, float],
radius_factor: float = 1.0,
Expand All @@ -194,7 +194,7 @@ def write_segmentation_to_imod_as_points(
Args:
mrc_path: Filepath to the mrc volume that was segmented.
segmentation_path: Filepath to the segmentation stored as .tif.
segmentation: The segmentation (either as numpy array or filepath to a .tif file).
output_path: Where to save the .mod file.
min_radius: Minimum radius for export.
radius_factor: Factor for increasing the radius to account for too small exported spheres.
Expand All @@ -211,7 +211,8 @@ def write_segmentation_to_imod_as_points(
resolution = [res / 10 for res in resolution]

# Extract the center coordinates and radii from the segmentation.
segmentation = imageio.imread(segmentation_path)
if isinstance(segmentation, str):
segmentation = imageio.imread(segmentation)
coordinates, radii = convert_segmentation_to_spheres(
segmentation, resolution=resolution, radius_factor=radius_factor, estimate_radius_2d=estimate_radius_2d
)
Expand Down

0 comments on commit 3b4c6c4

Please sign in to comment.